From 35db5066e77b1cdf2c5c520618c404aee9cb30bf Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Wed, 14 Jan 2026 02:30:43 +0100 Subject: [PATCH 1/8] Add since_version to bundle schema --- bundle/internal/annotation/descriptor.go | 1 + bundle/internal/annotation/file.go | 70 + bundle/internal/annotation/file_test.go | 123 + .../schema/.last_processed_cli_version | 1 + bundle/internal/schema/README.md | 26 + bundle/internal/schema/annotations.go | 54 +- bundle/internal/schema/annotations.yml | 844 +++++ .../schema/annotations_openapi_overrides.yml | 1989 +++++++++++ bundle/internal/schema/main.go | 5 + bundle/internal/schema/parser.go | 6 +- bundle/internal/schema/since_versions.go | 387 +++ bundle/internal/schema/since_versions_test.go | 302 ++ bundle/schema/jsonschema.json | 2934 +++++++++++------ libs/jsonschema/extension.go | 3 + 14 files changed, 5722 insertions(+), 1023 deletions(-) create mode 100644 bundle/internal/annotation/file_test.go create mode 100644 bundle/internal/schema/.last_processed_cli_version create mode 100644 bundle/internal/schema/README.md create mode 100644 bundle/internal/schema/since_versions.go create mode 100644 bundle/internal/schema/since_versions_test.go diff --git a/bundle/internal/annotation/descriptor.go b/bundle/internal/annotation/descriptor.go index 797746b0df..832cc80646 100644 --- a/bundle/internal/annotation/descriptor.go +++ b/bundle/internal/annotation/descriptor.go @@ -10,6 +10,7 @@ type Descriptor struct { DeprecationMessage string `json:"deprecation_message,omitempty"` Preview string `json:"x-databricks-preview,omitempty"` OutputOnly *bool `json:"x-databricks-field-behaviors_output_only,omitempty"` + SinceVersion string `json:"since_version,omitempty"` } const Placeholder = "PLACEHOLDER" diff --git a/bundle/internal/annotation/file.go b/bundle/internal/annotation/file.go index 0317f441a0..501b30a463 100644 --- a/bundle/internal/annotation/file.go +++ b/bundle/internal/annotation/file.go @@ -3,11 +3,15 @@ package annotation import ( "bytes" "os" + "slices" + + yaml3 "gopkg.in/yaml.v3" "github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn/convert" "github.com/databricks/cli/libs/dyn/merge" "github.com/databricks/cli/libs/dyn/yamlloader" + "github.com/databricks/cli/libs/dyn/yamlsaver" ) // Parsed file with annotations, expected format: @@ -17,6 +21,31 @@ import ( // description: "Description" type File map[string]map[string]Descriptor +// Load loads annotations from a single file. +func Load(path string) (File, error) { + b, err := os.ReadFile(path) + if err != nil { + if os.IsNotExist(err) { + return make(File), nil + } + return nil, err + } + + dynVal, err := yamlloader.LoadYAML(path, bytes.NewBuffer(b)) + if err != nil { + return nil, err + } + + var data File + if err := convert.ToTyped(&data, dynVal); err != nil { + return nil, err + } + if data == nil { + data = make(File) + } + return data, nil +} + func LoadAndMerge(sources []string) (File, error) { prev := dyn.NilValue for _, path := range sources { @@ -42,3 +71,44 @@ func LoadAndMerge(sources []string) (File, error) { } return data, nil } + +// Save saves the annotation file to the given path. +func (f File) Save(path string) error { + annotationOrder := yamlsaver.NewOrder([]string{"description", "markdown_description", "title", "default", "enum", "since_version"}) + style := map[string]yaml3.Style{} + + order := alphabeticalOrder(f) + dynMap := map[string]dyn.Value{} + for k, v := range f { + style[k] = yaml3.LiteralStyle + + properties := map[string]dyn.Value{} + propertiesOrder := alphabeticalOrder(v) + for key, value := range v { + d, err := convert.FromTyped(value, dyn.NilValue) + if d.Kind() == dyn.KindNil || err != nil { + properties[key] = dyn.NewValue(map[string]dyn.Value{}, []dyn.Location{{Line: propertiesOrder.Get(key)}}) + continue + } + val, err := yamlsaver.ConvertToMapValue(value, annotationOrder, []string{}, map[string]dyn.Value{}) + if err != nil { + return err + } + properties[key] = val.WithLocations([]dyn.Location{{Line: propertiesOrder.Get(key)}}) + } + + dynMap[k] = dyn.NewValue(properties, []dyn.Location{{Line: order.Get(k)}}) + } + + saver := yamlsaver.NewSaverWithStyle(style) + return saver.SaveAsYAML(dynMap, path, true) +} + +func alphabeticalOrder[T any](mapping map[string]T) *yamlsaver.Order { + var order []string + for k := range mapping { + order = append(order, k) + } + slices.Sort(order) + return yamlsaver.NewOrder(order) +} diff --git a/bundle/internal/annotation/file_test.go b/bundle/internal/annotation/file_test.go new file mode 100644 index 0000000000..6fda9fefda --- /dev/null +++ b/bundle/internal/annotation/file_test.go @@ -0,0 +1,123 @@ +package annotation + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestLoad(t *testing.T) { + t.Run("loads valid file", func(t *testing.T) { + dir := t.TempDir() + path := filepath.Join(dir, "annotations.yml") + content := `github.com/databricks/cli/bundle/config.Bundle: + name: + description: The bundle name + since_version: v0.228.0 + cluster_id: + description: The cluster ID +` + err := os.WriteFile(path, []byte(content), 0o644) + require.NoError(t, err) + + file, err := Load(path) + require.NoError(t, err) + + assert.Equal(t, "The bundle name", file["github.com/databricks/cli/bundle/config.Bundle"]["name"].Description) + assert.Equal(t, "v0.228.0", file["github.com/databricks/cli/bundle/config.Bundle"]["name"].SinceVersion) + assert.Equal(t, "The cluster ID", file["github.com/databricks/cli/bundle/config.Bundle"]["cluster_id"].Description) + }) + + t.Run("returns empty file for nonexistent path", func(t *testing.T) { + file, err := Load("/nonexistent/path/annotations.yml") + require.NoError(t, err) + assert.Empty(t, file) + }) + + t.Run("returns empty file for empty content", func(t *testing.T) { + dir := t.TempDir() + path := filepath.Join(dir, "empty.yml") + err := os.WriteFile(path, []byte(""), 0o644) + require.NoError(t, err) + + file, err := Load(path) + require.NoError(t, err) + assert.Empty(t, file) + }) + + t.Run("returns error for invalid yaml", func(t *testing.T) { + dir := t.TempDir() + path := filepath.Join(dir, "invalid.yml") + err := os.WriteFile(path, []byte("not: valid: yaml: content"), 0o644) + require.NoError(t, err) + + _, err = Load(path) + assert.Error(t, err) + }) +} + +func TestSave(t *testing.T) { + t.Run("saves file with annotations", func(t *testing.T) { + dir := t.TempDir() + path := filepath.Join(dir, "annotations.yml") + + file := File{ + "github.com/databricks/cli/bundle/config.Bundle": { + "name": Descriptor{ + Description: "The bundle name", + SinceVersion: "v0.228.0", + }, + "cluster_id": Descriptor{ + Description: "The cluster ID", + }, + }, + } + + err := file.Save(path) + require.NoError(t, err) + + // Verify by loading it back + loaded, err := Load(path) + require.NoError(t, err) + + assert.Equal(t, "The bundle name", loaded["github.com/databricks/cli/bundle/config.Bundle"]["name"].Description) + assert.Equal(t, "v0.228.0", loaded["github.com/databricks/cli/bundle/config.Bundle"]["name"].SinceVersion) + assert.Equal(t, "The cluster ID", loaded["github.com/databricks/cli/bundle/config.Bundle"]["cluster_id"].Description) + }) + + t.Run("sorts types alphabetically", func(t *testing.T) { + dir := t.TempDir() + path := filepath.Join(dir, "annotations.yml") + + file := File{ + "z_type": {"field": Descriptor{Description: "z"}}, + "a_type": {"field": Descriptor{Description: "a"}}, + "m_type": {"field": Descriptor{Description: "m"}}, + } + + err := file.Save(path) + require.NoError(t, err) + + content, err := os.ReadFile(path) + require.NoError(t, err) + + aIdx := indexOf(string(content), "a_type:") + mIdx := indexOf(string(content), "m_type:") + zIdx := indexOf(string(content), "z_type:") + + assert.Less(t, aIdx, mIdx, "a_type should come before m_type") + assert.Less(t, mIdx, zIdx, "m_type should come before z_type") + }) +} + +func indexOf(s, substr string) int { + for i := 0; i <= len(s)-len(substr); i++ { + if s[i:i+len(substr)] == substr { + return i + } + } + return -1 +} diff --git a/bundle/internal/schema/.last_processed_cli_version b/bundle/internal/schema/.last_processed_cli_version new file mode 100644 index 0000000000..6a2502f283 --- /dev/null +++ b/bundle/internal/schema/.last_processed_cli_version @@ -0,0 +1 @@ +v0.281.0 diff --git a/bundle/internal/schema/README.md b/bundle/internal/schema/README.md new file mode 100644 index 0000000000..2aa5258452 --- /dev/null +++ b/bundle/internal/schema/README.md @@ -0,0 +1,26 @@ +# Bundle Schema Generator + +This package generates the JSON schema for Databricks Asset Bundles configuration. + +## Annotation Files + +The schema generator uses three YAML files to add descriptions and metadata to the generated schema: + +- **annotations_openapi.yml**: Auto-generated from the Databricks OpenAPI spec. Contains descriptions for SDK types (jobs, pipelines, etc.). Do not edit manually. + +- **annotations_openapi_overrides.yml**: Manual overrides for OpenAPI annotations. Use this to fix or enhance descriptions from the OpenAPI spec without modifying the auto-generated file. + +- **annotations.yml**: Manual annotations for CLI-specific types (e.g., `bundle`, `workspace`, `artifacts`). Missing annotations are automatically added with `PLACEHOLDER` descriptions. + +## Annotation Priority + +Files are merged in order, with later files taking precedence: +1. `annotations_openapi.yml` (base) +2. `annotations_openapi_overrides.yml` (overrides OpenAPI) +3. `annotations.yml` (CLI-specific, highest priority) + +## Usage + +Run `make schema` from the repository root to regenerate the bundle JSON schema. + +To update OpenAPI annotations, set `DATABRICKS_OPENAPI_SPEC` to the path of the OpenAPI spec file before running. diff --git a/bundle/internal/schema/annotations.go b/bundle/internal/schema/annotations.go index 787304b077..7dfb89b439 100644 --- a/bundle/internal/schema/annotations.go +++ b/bundle/internal/schema/annotations.go @@ -6,17 +6,13 @@ import ( "os" "reflect" "regexp" - "slices" "strings" - yaml3 "gopkg.in/yaml.v3" - "github.com/databricks/cli/bundle/internal/annotation" "github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn/convert" "github.com/databricks/cli/libs/dyn/merge" "github.com/databricks/cli/libs/dyn/yamlloader" - "github.com/databricks/cli/libs/dyn/yamlsaver" "github.com/databricks/cli/libs/jsonschema" ) @@ -108,11 +104,7 @@ func (d *annotationHandler) syncWithMissingAnnotations(outputPath string) error return err } - err = saveYamlWithStyle(outputPath, outputTyped) - if err != nil { - return err - } - return nil + return outputTyped.Save(outputPath) } func getPath(typ reflect.Type) string { @@ -145,50 +137,10 @@ func assignAnnotation(s *jsonschema.Schema, a annotation.Descriptor) { s.MarkdownDescription = convertLinksToAbsoluteUrl(a.MarkdownDescription) s.Title = a.Title s.Enum = a.Enum -} - -func saveYamlWithStyle(outputPath string, annotations annotation.File) error { - annotationOrder := yamlsaver.NewOrder([]string{"description", "markdown_description", "title", "default", "enum"}) - style := map[string]yaml3.Style{} - - order := getAlphabeticalOrder(annotations) - dynMap := map[string]dyn.Value{} - for k, v := range annotations { - style[k] = yaml3.LiteralStyle - - properties := map[string]dyn.Value{} - propertiesOrder := getAlphabeticalOrder(v) - for key, value := range v { - d, err := convert.FromTyped(value, dyn.NilValue) - if d.Kind() == dyn.KindNil || err != nil { - properties[key] = dyn.NewValue(map[string]dyn.Value{}, []dyn.Location{{Line: propertiesOrder.Get(key)}}) - continue - } - val, err := yamlsaver.ConvertToMapValue(value, annotationOrder, []string{}, map[string]dyn.Value{}) - if err != nil { - return err - } - properties[key] = val.WithLocations([]dyn.Location{{Line: propertiesOrder.Get(key)}}) - } - - dynMap[k] = dyn.NewValue(properties, []dyn.Location{{Line: order.Get(k)}}) - } - - saver := yamlsaver.NewSaverWithStyle(style) - err := saver.SaveAsYAML(dynMap, outputPath, true) - if err != nil { - return err - } - return nil -} -func getAlphabeticalOrder[T any](mapping map[string]T) *yamlsaver.Order { - var order []string - for k := range mapping { - order = append(order, k) + if a.SinceVersion != "" { + s.SinceVersion = a.SinceVersion } - slices.Sort(order) - return yamlsaver.NewOrder(order) } func convertLinksToAbsoluteUrl(s string) string { diff --git a/bundle/internal/schema/annotations.yml b/bundle/internal/schema/annotations.yml index 2a9c78b4ac..c033a1c93b 100644 --- a/bundle/internal/schema/annotations.yml +++ b/bundle/internal/schema/annotations.yml @@ -2,133 +2,203 @@ github.com/databricks/cli/bundle/config.Artifact: "build": "description": |- An optional set of build commands to run locally before deployment. + "since_version": |- + v0.228.1 "dynamic_version": "description": |- Whether to patch the wheel version dynamically based on the timestamp of the whl file. If this is set to `true`, new code can be deployed without having to update the version in `setup.py` or `pyproject.toml`. This setting is only valid when `type` is set to `whl`. See [\_](/dev-tools/bundles/settings.md#bundle-syntax-mappings-artifacts). + "since_version": |- + v0.245.0 "executable": "description": |- The executable type. Valid values are `bash`, `sh`, and `cmd`. + "since_version": |- + v0.228.1 "files": "description": |- The relative or absolute path to the built artifact files. + "since_version": |- + v0.228.1 "path": "description": |- The local path of the directory for the artifact. + "since_version": |- + v0.228.1 "type": "description": |- Required if the artifact is a Python wheel. The type of the artifact. Valid values are `whl` and `jar`. "markdown_description": |- Required if the artifact is a Python wheel. The type of the artifact. Valid values are `whl` and `jar`. + "since_version": |- + v0.228.1 github.com/databricks/cli/bundle/config.ArtifactFile: "source": "description": |- Required. The artifact source file. + "since_version": |- + v0.228.1 github.com/databricks/cli/bundle/config.Bundle: "cluster_id": "description": |- The ID of a cluster to use to run the bundle. "markdown_description": |- The ID of a cluster to use to run the bundle. See [\_](/dev-tools/bundles/settings.md#cluster_id). + "since_version": |- + v0.229.0 "compute_id": "description": |- Deprecated. The ID of the compute to use to run the bundle. + "since_version": |- + v0.228.1 "databricks_cli_version": "description": |- The Databricks CLI version to use for the bundle. "markdown_description": |- The Databricks CLI version to use for the bundle. See [\_](/dev-tools/bundles/settings.md#databricks_cli_version). + "since_version": |- + v0.228.1 "deployment": "description": |- The definition of the bundle deployment "markdown_description": |- The definition of the bundle deployment. For supported attributes see [\_](/dev-tools/bundles/deployment-modes.md). + "since_version": |- + v0.228.1 "git": "description": |- The Git version control details that are associated with your bundle. "markdown_description": |- The Git version control details that are associated with your bundle. For supported attributes see [\_](/dev-tools/bundles/settings.md#git). + "since_version": |- + v0.228.1 "name": "description": |- The name of the bundle. + "since_version": |- + v0.228.1 "uuid": "description": |- Reserved. A Universally Unique Identifier (UUID) for the bundle that uniquely identifies the bundle in internal Databricks systems. This is generated when a bundle project is initialized using a Databricks template (using the `databricks bundle init` command). + "since_version": |- + v0.236.0 github.com/databricks/cli/bundle/config.Deployment: "fail_on_active_runs": "description": |- Whether to fail on active runs. If this is set to true a deployment that is running can be interrupted. + "since_version": |- + v0.228.1 "lock": "description": |- The deployment lock attributes. + "since_version": |- + v0.228.1 github.com/databricks/cli/bundle/config.Experimental: "pydabs": "description": |- The PyDABs configuration. + "since_version": |- + v0.228.1 "deprecation_message": |- Deprecated: please use python instead "python": "description": |- Configures loading of Python code defined with 'databricks-bundles' package. + "since_version": |- + v0.238.0 "python_wheel_wrapper": "description": |- Whether to use a Python wheel wrapper. + "since_version": |- + v0.228.1 "scripts": "description": |- The commands to run. + "since_version": |- + v0.228.1 "skip_artifact_cleanup": "description": |- Determines whether to skip cleaning up the .internal folder + "since_version": |- + v0.254.0 "skip_name_prefix_for_schema": "description": |- Skip adding the prefix that is either set in `presets.name_prefix` or computed when `mode: development` is set, to the names of UC schemas defined in the bundle. + "since_version": |- + v0.255.0 "use_legacy_run_as": "description": |- Whether to use the legacy run_as behavior. + "since_version": |- + v0.228.1 github.com/databricks/cli/bundle/config.Git: "branch": "description": |- The Git branch name. "markdown_description": |- The Git branch name. See [\_](/dev-tools/bundles/settings.md#git). + "since_version": |- + v0.228.1 "origin_url": "description": |- The origin URL of the repository. "markdown_description": |- The origin URL of the repository. See [\_](/dev-tools/bundles/settings.md#git). + "since_version": |- + v0.228.1 github.com/databricks/cli/bundle/config.Lock: "enabled": "description": |- Whether this lock is enabled. + "since_version": |- + v0.228.1 "force": "description": |- Whether to force this lock if it is enabled. + "since_version": |- + v0.228.1 github.com/databricks/cli/bundle/config.Presets: "artifacts_dynamic_version": "description": |- Whether to enable dynamic_version on all artifacts. + "since_version": |- + v0.256.0 "jobs_max_concurrent_runs": "description": |- The maximum concurrent runs for a job. + "since_version": |- + v0.228.1 "name_prefix": "description": |- The prefix for job runs of the bundle. + "since_version": |- + v0.228.1 "pipelines_development": "description": |- Whether pipeline deployments should be locked in development mode. + "since_version": |- + v0.228.1 "source_linked_deployment": "description": |- Whether to link the deployment to the bundle source. + "since_version": |- + v0.236.0 "tags": "description": |- The tags for the bundle deployment. + "since_version": |- + v0.228.1 "trigger_pause_status": "description": |- A pause status to apply to all job triggers and schedules. Valid values are PAUSED or UNPAUSED. + "since_version": |- + v0.228.1 github.com/databricks/cli/bundle/config.PyDABs: "enabled": "description": |- Whether or not PyDABs (Private Preview) is enabled + "since_version": |- + v0.228.1 "import": "description": |- The PyDABs project to import to discover resources, resource generator and mutators @@ -141,101 +211,143 @@ github.com/databricks/cli/bundle/config.Python: Mutators contains a list of fully qualified function paths to mutator functions. Example: ["my_project.mutators:add_default_cluster"] + "since_version": |- + v0.238.0 "resources": "description": |- Resources contains a list of fully qualified function paths to load resources defined in Python code. Example: ["my_project.resources:load_resources"] + "since_version": |- + v0.238.0 "venv_path": "description": |- VEnvPath is path to the virtual environment. If enabled, Python code will execute within this environment. If disabled, it defaults to using the Python interpreter available in the current shell. + "since_version": |- + v0.238.0 github.com/databricks/cli/bundle/config.Resources: "alerts": "description": |- PLACEHOLDER + "since_version": |- + v0.279.0 "apps": "description": |- The app resource defines a Databricks app. "markdown_description": |- The app resource defines a [Databricks app](/api/workspace/apps/create). For information about Databricks Apps, see [\_](/dev-tools/databricks-apps/index.md). + "since_version": |- + v0.239.0 "clusters": "description": |- The cluster definitions for the bundle, where each key is the name of a cluster. "markdown_description": |- The cluster definitions for the bundle, where each key is the name of a cluster. See [\_](/dev-tools/bundles/resources.md#clusters). + "since_version": |- + v0.229.0 "dashboards": "description": |- The dashboard definitions for the bundle, where each key is the name of the dashboard. "markdown_description": |- The dashboard definitions for the bundle, where each key is the name of the dashboard. See [\_](/dev-tools/bundles/resources.md#dashboards). + "since_version": |- + v0.232.0 "database_catalogs": "description": |- PLACEHOLDER + "since_version": |- + v0.265.0 "database_instances": "description": |- PLACEHOLDER + "since_version": |- + v0.265.0 "experiments": "description": |- The experiment definitions for the bundle, where each key is the name of the experiment. "markdown_description": |- The experiment definitions for the bundle, where each key is the name of the experiment. See [\_](/dev-tools/bundles/resources.md#experiments). + "since_version": |- + v0.228.1 "jobs": "description": |- The job definitions for the bundle, where each key is the name of the job. "markdown_description": |- The job definitions for the bundle, where each key is the name of the job. See [\_](/dev-tools/bundles/resources.md#jobs). + "since_version": |- + v0.228.1 "model_serving_endpoints": "description": |- The model serving endpoint definitions for the bundle, where each key is the name of the model serving endpoint. "markdown_description": |- The model serving endpoint definitions for the bundle, where each key is the name of the model serving endpoint. See [\_](/dev-tools/bundles/resources.md#model_serving_endpoints). + "since_version": |- + v0.228.1 "models": "description": |- The model definitions for the bundle, where each key is the name of the model. "markdown_description": |- The model definitions for the bundle, where each key is the name of the model. See [\_](/dev-tools/bundles/resources.md#models). + "since_version": |- + v0.228.1 "pipelines": "description": |- The pipeline definitions for the bundle, where each key is the name of the pipeline. "markdown_description": |- The pipeline definitions for the bundle, where each key is the name of the pipeline. See [\_](/dev-tools/bundles/resources.md#pipelines). + "since_version": |- + v0.228.1 "quality_monitors": "description": |- The quality monitor definitions for the bundle, where each key is the name of the quality monitor. "markdown_description": |- The quality monitor definitions for the bundle, where each key is the name of the quality monitor. See [\_](/dev-tools/bundles/resources.md#quality_monitors). + "since_version": |- + v0.228.1 "registered_models": "description": |- The registered model definitions for the bundle, where each key is the name of the Unity Catalog registered model. "markdown_description": |- The registered model definitions for the bundle, where each key is the name of the Unity Catalog registered model. See [\_](/dev-tools/bundles/resources.md#registered_models) + "since_version": |- + v0.228.1 "schemas": "description": |- The schema definitions for the bundle, where each key is the name of the schema. "markdown_description": |- The schema definitions for the bundle, where each key is the name of the schema. See [\_](/dev-tools/bundles/resources.md#schemas). + "since_version": |- + v0.228.1 "secret_scopes": "description": |- The secret scope definitions for the bundle, where each key is the name of the secret scope. "markdown_description": |- The secret scope definitions for the bundle, where each key is the name of the secret scope. See [\_](/dev-tools/bundles/resources.md#secret_scopes). + "since_version": |- + v0.252.0 "sql_warehouses": "description": |- The SQL warehouse definitions for the bundle, where each key is the name of the warehouse. "markdown_description": |- The SQL warehouse definitions for the bundle, where each key is the name of the warehouse. See [\_](/dev-tools/bundles/resources.md#sql_warehouses). + "since_version": |- + v0.260.0 "synced_database_tables": "description": |- PLACEHOLDER + "since_version": |- + v0.266.0 "volumes": "description": |- The volume definitions for the bundle, where each key is the name of the volume. "markdown_description": |- The volume definitions for the bundle, where each key is the name of the volume. See [\_](/dev-tools/bundles/resources.md#volumes). + "since_version": |- + v0.236.0 github.com/databricks/cli/bundle/config.Root: "artifacts": "description": |- @@ -244,6 +356,8 @@ github.com/databricks/cli/bundle/config.Root: Defines the attributes to build artifacts, where each key is the name of the artifact, and the value is a Map that defines the artifact build settings. For information about the `artifacts` mapping, see [\_](/dev-tools/bundles/settings.md#artifacts). Artifact settings defined in the top level of the bundle configuration can be overridden in the `targets` mapping. See [\_](/dev-tools/bundles/artifact-overrides.md). + "since_version": |- + v0.228.1 "markdown_examples": |- ```yaml artifacts: @@ -257,19 +371,27 @@ github.com/databricks/cli/bundle/config.Root: The bundle attributes when deploying to this target. "markdown_description": |- The bundle attributes when deploying to this target, + "since_version": |- + v0.228.1 "environments": "description": |- PLACEHOLDER + "since_version": |- + v0.243.0 "deprecation_message": |- Deprecated: please use targets instead "experimental": "description": |- Defines attributes for experimental features. + "since_version": |- + v0.228.1 "include": "description": |- Specifies a list of path globs that contain configuration files to include within the bundle. "markdown_description": |- Specifies a list of path globs that contain configuration files to include within the bundle. See [\_](/dev-tools/bundles/settings.md#include). + "since_version": |- + v0.228.1 "permissions": "description": |- Defines a permission for a specific entity. @@ -277,6 +399,8 @@ github.com/databricks/cli/bundle/config.Root: A Sequence that defines the permissions to apply to experiments, jobs, pipelines, and models defined in the bundle, where each item in the sequence is a permission for a specific entity. See [\_](/dev-tools/bundles/settings.md#permissions) and [\_](/dev-tools/bundles/permissions.md). + "since_version": |- + v0.228.1 "markdown_examples": |- ```yaml permissions: @@ -292,9 +416,13 @@ github.com/databricks/cli/bundle/config.Root: Defines bundle deployment presets. "markdown_description": |- Defines bundle deployment presets. See [\_](/dev-tools/bundles/deployment-modes.md#presets). + "since_version": |- + v0.228.1 "python": "description": |- PLACEHOLDER + "since_version": |- + v0.275.0 "resources": "description": |- A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource. @@ -307,144 +435,226 @@ github.com/databricks/cli/bundle/config.Root: : : ``` + "since_version": |- + v0.228.1 "run_as": "description": |- The identity to use when running Databricks Asset Bundles workflows. "markdown_description": |- The identity to use when running Databricks Asset Bundles workflows. See [\_](/dev-tools/bundles/run-as.md). + "since_version": |- + v0.228.1 "scripts": "description": |- PLACEHOLDER + "since_version": |- + v0.259.0 "sync": "description": |- The files and file paths to include or exclude in the bundle. "markdown_description": |- The files and file paths to include or exclude in the bundle. See [\_](/dev-tools/bundles/settings.md#sync). + "since_version": |- + v0.228.1 "targets": "description": |- Defines deployment targets for the bundle. "markdown_description": |- Defines deployment targets for the bundle. See [\_](/dev-tools/bundles/settings.md#targets) + "since_version": |- + v0.228.1 "variables": "description": |- A Map that defines the custom variables for the bundle, where each key is the name of the variable, and the value is a Map that defines the variable. + "since_version": |- + v0.228.1 "workspace": "description": |- Defines the Databricks workspace for the bundle. "markdown_description": |- Defines the Databricks workspace for the bundle. See [\_](/dev-tools/bundles/settings.md#workspace). + "since_version": |- + v0.228.1 github.com/databricks/cli/bundle/config.Script: "content": "description": |- PLACEHOLDER + "since_version": |- + v0.259.0 github.com/databricks/cli/bundle/config.Sync: "exclude": "description": |- A list of files or folders to exclude from the bundle. + "since_version": |- + v0.228.1 "include": "description": |- A list of files or folders to include in the bundle. + "since_version": |- + v0.228.1 "paths": "description": |- The local folder paths, which can be outside the bundle root, to synchronize to the workspace when the bundle is deployed. + "since_version": |- + v0.228.1 github.com/databricks/cli/bundle/config.Target: "artifacts": "description": |- The artifacts to include in the target deployment. + "since_version": |- + v0.228.1 "bundle": "description": |- The bundle attributes when deploying to this target. + "since_version": |- + v0.228.1 "cluster_id": "description": |- The ID of the cluster to use for this target. + "since_version": |- + v0.229.0 "compute_id": "description": |- Deprecated. The ID of the compute to use for this target. + "since_version": |- + v0.228.1 "deprecation_message": |- Deprecated: please use cluster_id instead "default": "description": |- Whether this target is the default target. + "since_version": |- + v0.228.1 "git": "description": |- The Git version control settings for the target. + "since_version": |- + v0.228.1 "mode": "description": |- The deployment mode for the target. "markdown_description": |- The deployment mode for the target. Valid values are `development` or `production`. See [\_](/dev-tools/bundles/deployment-modes.md). + "since_version": |- + v0.228.1 "permissions": "description": |- The permissions for deploying and running the bundle in the target. + "since_version": |- + v0.228.1 "presets": "description": |- The deployment presets for the target. + "since_version": |- + v0.228.1 "resources": "description": |- The resource definitions for the target. + "since_version": |- + v0.228.1 "run_as": "description": |- The identity to use to run the bundle. "markdown_description": |- The identity to use to run the bundle, see [\_](/dev-tools/bundles/run-as.md). + "since_version": |- + v0.228.1 "sync": "description": |- The local paths to sync to the target workspace when a bundle is run or deployed. + "since_version": |- + v0.228.1 "variables": "description": |- The custom variable definitions for the target. + "since_version": |- + v0.228.1 "workspace": "description": |- The Databricks workspace for the target. + "since_version": |- + v0.228.1 github.com/databricks/cli/bundle/config.Workspace: "artifact_path": "description": |- The artifact path to use within the workspace for both deployments and workflow runs + "since_version": |- + v0.228.1 "auth_type": "description": |- The authentication type. + "since_version": |- + v0.228.1 "azure_client_id": "description": |- The Azure client ID + "since_version": |- + v0.228.1 "azure_environment": "description": |- The Azure environment + "since_version": |- + v0.228.1 "azure_login_app_id": "description": |- The Azure login app ID + "since_version": |- + v0.228.1 "azure_tenant_id": "description": |- The Azure tenant ID + "since_version": |- + v0.228.1 "azure_use_msi": "description": |- Whether to use MSI for Azure + "since_version": |- + v0.228.1 "azure_workspace_resource_id": "description": |- The Azure workspace resource ID + "since_version": |- + v0.228.1 "client_id": "description": |- The client ID for the workspace + "since_version": |- + v0.228.1 "file_path": "description": |- The file path to use within the workspace for both deployments and workflow runs + "since_version": |- + v0.228.1 "google_service_account": "description": |- The Google service account name + "since_version": |- + v0.228.1 "host": "description": |- The Databricks workspace host URL + "since_version": |- + v0.228.1 "profile": "description": |- The Databricks workspace profile name + "since_version": |- + v0.228.1 "resource_path": "description": |- The workspace resource path + "since_version": |- + v0.230.0 "root_path": "description": |- The Databricks workspace root path + "since_version": |- + v0.228.1 "state_path": "description": |- The workspace state path + "since_version": |- + v0.228.1 github.com/databricks/cli/bundle/config/resources.Alert: "create_time": "description": |- @@ -452,12 +662,18 @@ github.com/databricks/cli/bundle/config/resources.Alert: "custom_description": "description": |- PLACEHOLDER + "since_version": |- + v0.279.0 "custom_summary": "description": |- PLACEHOLDER + "since_version": |- + v0.279.0 "display_name": "description": |- PLACEHOLDER + "since_version": |- + v0.279.0 "effective_run_as": "description": |- PLACEHOLDER @@ -473,160 +689,490 @@ github.com/databricks/cli/bundle/config/resources.Alert: "parent_path": "description": |- PLACEHOLDER + "since_version": |- + v0.279.0 "query_text": "description": |- PLACEHOLDER + "since_version": |- + v0.279.0 "run_as": "description": |- PLACEHOLDER + "since_version": |- + v0.279.0 "run_as_user_name": "description": |- PLACEHOLDER + "since_version": |- + v0.279.0 "update_time": "description": |- PLACEHOLDER "warehouse_id": "description": |- PLACEHOLDER + "since_version": |- + v0.279.0 github.com/databricks/cli/bundle/config/resources.AlertPermission: "group_name": "description": |- PLACEHOLDER + "since_version": |- + v0.279.0 "level": "description": |- PLACEHOLDER + "since_version": |- + v0.279.0 "service_principal_name": "description": |- PLACEHOLDER + "since_version": |- + v0.279.0 "user_name": "description": |- PLACEHOLDER + "since_version": |- + v0.279.0 +github.com/databricks/cli/bundle/config/resources.App: + "description": + "since_version": |- + v0.239.0 + "name": + "since_version": |- + v0.239.0 + "resources": + "since_version": |- + v0.239.0 github.com/databricks/cli/bundle/config/resources.AppPermission: "group_name": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 "level": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 "service_principal_name": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 "user_name": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 +github.com/databricks/cli/bundle/config/resources.Cluster: + "apply_policy_default_values": + "since_version": |- + v0.229.0 + "autoscale": + "since_version": |- + v0.229.0 + "autotermination_minutes": + "since_version": |- + v0.229.0 + "aws_attributes": + "since_version": |- + v0.229.0 + "azure_attributes": + "since_version": |- + v0.229.0 + "cluster_log_conf": + "since_version": |- + v0.229.0 + "cluster_name": + "since_version": |- + v0.229.0 + "custom_tags": + "since_version": |- + v0.229.0 + "driver_instance_pool_id": + "since_version": |- + v0.229.0 + "driver_node_type_id": + "since_version": |- + v0.229.0 + "enable_elastic_disk": + "since_version": |- + v0.229.0 + "enable_local_disk_encryption": + "since_version": |- + v0.229.0 + "gcp_attributes": + "since_version": |- + v0.229.0 + "init_scripts": + "since_version": |- + v0.229.0 + "instance_pool_id": + "since_version": |- + v0.229.0 + "is_single_node": + "since_version": |- + v0.237.0 + "node_type_id": + "since_version": |- + v0.229.0 + "num_workers": + "since_version": |- + v0.229.0 + "policy_id": + "since_version": |- + v0.229.0 + "remote_disk_throughput": + "since_version": |- + v0.257.0 + "single_user_name": + "since_version": |- + v0.229.0 + "spark_conf": + "since_version": |- + v0.229.0 + "spark_env_vars": + "since_version": |- + v0.229.0 + "spark_version": + "since_version": |- + v0.229.0 + "ssh_public_keys": + "since_version": |- + v0.229.0 + "total_initial_remote_disk_size": + "since_version": |- + v0.257.0 + "use_ml_runtime": + "since_version": |- + v0.237.0 github.com/databricks/cli/bundle/config/resources.ClusterPermission: "group_name": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 "level": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 "service_principal_name": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 "user_name": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 github.com/databricks/cli/bundle/config/resources.Dashboard: "dataset_catalog": "description": |- Sets the default catalog for all datasets in this dashboard. When set, this overrides the catalog specified in individual dataset definitions. + "since_version": |- + v0.281.0 "dataset_schema": "description": |- Sets the default schema for all datasets in this dashboard. When set, this overrides the schema specified in individual dataset definitions. + "since_version": |- + v0.281.0 github.com/databricks/cli/bundle/config/resources.DashboardPermission: "group_name": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 "level": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 "service_principal_name": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 "user_name": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 +github.com/databricks/cli/bundle/config/resources.DatabaseCatalog: + "database_instance_name": + "since_version": |- + v0.265.0 + "database_name": + "since_version": |- + v0.265.0 + "name": + "since_version": |- + v0.265.0 github.com/databricks/cli/bundle/config/resources.DatabaseInstance: + "capacity": + "since_version": |- + v0.265.0 + "custom_tags": + "since_version": |- + v0.273.0 "effective_capacity": "description": |- PLACEHOLDER + "enable_pg_native_login": + "since_version": |- + v0.267.0 + "enable_readable_secondaries": + "since_version": |- + v0.265.0 + "name": + "since_version": |- + v0.265.0 + "node_count": + "since_version": |- + v0.265.0 + "parent_instance_ref": + "since_version": |- + v0.265.0 + "retention_window_in_days": + "since_version": |- + v0.265.0 + "stopped": + "since_version": |- + v0.265.0 + "usage_policy_id": + "since_version": |- + v0.273.0 github.com/databricks/cli/bundle/config/resources.DatabaseInstancePermission: "group_name": "description": |- PLACEHOLDER + "since_version": |- + v0.265.0 "level": "description": |- PLACEHOLDER + "since_version": |- + v0.265.0 "service_principal_name": "description": |- PLACEHOLDER + "since_version": |- + v0.265.0 "user_name": "description": |- PLACEHOLDER + "since_version": |- + v0.265.0 github.com/databricks/cli/bundle/config/resources.Grant: "principal": "description": |- The name of the principal that will be granted privileges + "since_version": |- + v0.228.1 "privileges": "description": |- The privileges to grant to the specified entity + "since_version": |- + v0.228.1 +github.com/databricks/cli/bundle/config/resources.Job: + "budget_policy_id": + "since_version": |- + v0.231.0 + "continuous": + "since_version": |- + v0.228.1 + "description": + "since_version": |- + v0.228.1 + "email_notifications": + "since_version": |- + v0.228.1 + "environments": + "since_version": |- + v0.228.1 + "git_source": + "since_version": |- + v0.228.1 + "job_clusters": + "since_version": |- + v0.228.1 + "max_concurrent_runs": + "since_version": |- + v0.228.1 + "name": + "since_version": |- + v0.228.1 + "notification_settings": + "since_version": |- + v0.228.1 + "parameters": + "since_version": |- + v0.228.1 + "performance_target": + "since_version": |- + v0.241.0 + "queue": + "since_version": |- + v0.228.1 + "schedule": + "since_version": |- + v0.228.1 + "tags": + "since_version": |- + v0.228.1 + "tasks": + "since_version": |- + v0.228.1 + "timeout_seconds": + "since_version": |- + v0.228.1 + "trigger": + "since_version": |- + v0.228.1 + "usage_policy_id": + "since_version": |- + v0.265.0 + "webhook_notifications": + "since_version": |- + v0.228.1 github.com/databricks/cli/bundle/config/resources.JobPermission: "group_name": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 "level": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 "service_principal_name": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 "user_name": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 github.com/databricks/cli/bundle/config/resources.Lifecycle: "prevent_destroy": "description": |- Lifecycle setting to prevent the resource from being destroyed. + "since_version": |- + v0.268.0 +github.com/databricks/cli/bundle/config/resources.MlflowExperiment: + "artifact_location": + "since_version": |- + v0.228.1 + "name": + "since_version": |- + v0.228.1 + "tags": + "since_version": |- + v0.228.1 github.com/databricks/cli/bundle/config/resources.MlflowExperimentPermission: "group_name": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 "level": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 "service_principal_name": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 "user_name": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 +github.com/databricks/cli/bundle/config/resources.MlflowModel: + "description": + "since_version": |- + v0.228.1 + "name": + "since_version": |- + v0.228.1 + "tags": + "since_version": |- + v0.228.1 github.com/databricks/cli/bundle/config/resources.MlflowModelPermission: "group_name": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 "level": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 "service_principal_name": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 "user_name": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 +github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint: + "ai_gateway": + "since_version": |- + v0.230.0 + "budget_policy_id": + "since_version": |- + v0.244.0 + "config": + "since_version": |- + v0.228.1 + "email_notifications": + "since_version": |- + v0.264.0 + "name": + "since_version": |- + v0.228.1 + "rate_limits": + "since_version": |- + v0.228.1 + "route_optimized": + "since_version": |- + v0.228.1 + "tags": + "since_version": |- + v0.228.1 github.com/databricks/cli/bundle/config/resources.ModelServingEndpointPermission: "group_name": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 "level": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 "service_principal_name": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 "user_name": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 github.com/databricks/cli/bundle/config/resources.Permission: "-": "description": |- @@ -636,77 +1182,305 @@ github.com/databricks/cli/bundle/config/resources.Permission: "group_name": "description": |- The name of the group that has the permission set in level. + "since_version": |- + v0.228.1 "level": "description": |- The allowed permission for user, group, service principal defined for this permission. + "since_version": |- + v0.228.1 "service_principal_name": "description": |- The name of the service principal that has the permission set in level. + "since_version": |- + v0.228.1 "user_name": "description": |- The name of the user that has the permission set in level. + "since_version": |- + v0.228.1 +github.com/databricks/cli/bundle/config/resources.Pipeline: + "allow_duplicate_names": + "since_version": |- + v0.261.0 + "budget_policy_id": + "since_version": |- + v0.230.0 + "catalog": + "since_version": |- + v0.228.1 + "channel": + "since_version": |- + v0.228.1 + "clusters": + "since_version": |- + v0.228.1 + "configuration": + "since_version": |- + v0.228.1 + "continuous": + "since_version": |- + v0.228.1 + "development": + "since_version": |- + v0.228.1 + "edition": + "since_version": |- + v0.228.1 + "environment": + "since_version": |- + v0.257.0 + "event_log": + "since_version": |- + v0.246.0 + "filters": + "since_version": |- + v0.228.1 + "gateway_definition": + "since_version": |- + v0.228.1 + "id": + "since_version": |- + v0.228.1 + "ingestion_definition": + "since_version": |- + v0.228.1 + "libraries": + "since_version": |- + v0.228.1 + "name": + "since_version": |- + v0.228.1 + "notifications": + "since_version": |- + v0.228.1 + "photon": + "since_version": |- + v0.228.1 + "restart_window": + "since_version": |- + v0.234.0 + "root_path": + "since_version": |- + v0.253.0 + "schema": + "since_version": |- + v0.230.0 + "serverless": + "since_version": |- + v0.228.1 + "storage": + "since_version": |- + v0.228.1 + "tags": + "since_version": |- + v0.256.0 + "target": + "since_version": |- + v0.228.1 + "usage_policy_id": + "since_version": |- + v0.276.0 github.com/databricks/cli/bundle/config/resources.PipelinePermission: "group_name": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 "level": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 "service_principal_name": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 "user_name": "description": |- PLACEHOLDER + "since_version": |- + v0.247.0 +github.com/databricks/cli/bundle/config/resources.QualityMonitor: + "assets_dir": + "since_version": |- + v0.228.1 + "baseline_table_name": + "since_version": |- + v0.228.1 + "custom_metrics": + "since_version": |- + v0.228.1 + "data_classification_config": + "since_version": |- + v0.228.1 + "latest_monitor_failure_msg": + "since_version": |- + v0.264.0 + "notifications": + "since_version": |- + v0.228.1 + "output_schema_name": + "since_version": |- + v0.228.1 + "schedule": + "since_version": |- + v0.228.1 + "skip_builtin_dashboard": + "since_version": |- + v0.228.1 + "slicing_exprs": + "since_version": |- + v0.228.1 + "snapshot": + "since_version": |- + v0.228.1 + "time_series": + "since_version": |- + v0.228.1 + "warehouse_id": + "since_version": |- + v0.228.1 +github.com/databricks/cli/bundle/config/resources.RegisteredModel: + "catalog_name": + "since_version": |- + v0.228.1 + "comment": + "since_version": |- + v0.228.1 + "name": + "since_version": |- + v0.228.1 + "schema_name": + "since_version": |- + v0.228.1 + "storage_location": + "since_version": |- + v0.228.1 +github.com/databricks/cli/bundle/config/resources.Schema: + "catalog_name": + "since_version": |- + v0.228.1 + "comment": + "since_version": |- + v0.228.1 + "name": + "since_version": |- + v0.228.1 + "storage_root": + "since_version": |- + v0.228.1 github.com/databricks/cli/bundle/config/resources.SchemaGrant: "principal": "description": |- PLACEHOLDER + "since_version": |- + v0.267.0 "privileges": "description": |- PLACEHOLDER + "since_version": |- + v0.267.0 github.com/databricks/cli/bundle/config/resources.SecretScope: "backend_type": "description": |- The backend type the scope will be created with. If not specified, will default to `DATABRICKS` + "since_version": |- + v0.252.0 "keyvault_metadata": "description": |- The metadata for the secret scope if the `backend_type` is `AZURE_KEYVAULT` + "since_version": |- + v0.252.0 "lifecycle": "description": |- Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed. + "since_version": |- + v0.268.0 "name": "description": |- Scope name requested by the user. Scope names are unique. + "since_version": |- + v0.252.0 "permissions": "description": |- The permissions to apply to the secret scope. Permissions are managed via secret scope ACLs. + "since_version": |- + v0.252.0 github.com/databricks/cli/bundle/config/resources.SecretScopePermission: "group_name": "description": |- The name of the group that has the permission set in level. This field translates to a `principal` field in secret scope ACL. + "since_version": |- + v0.252.0 "level": "description": |- The allowed permission for user, group, service principal defined for this permission. + "since_version": |- + v0.252.0 "service_principal_name": "description": |- The application ID of an active service principal. This field translates to a `principal` field in secret scope ACL. + "since_version": |- + v0.252.0 "user_name": "description": |- The name of the user that has the permission set in level. This field translates to a `principal` field in secret scope ACL. + "since_version": |- + v0.252.0 +github.com/databricks/cli/bundle/config/resources.SqlWarehouse: + "auto_stop_mins": + "since_version": |- + v0.260.0 + "channel": + "since_version": |- + v0.260.0 + "cluster_size": + "since_version": |- + v0.260.0 + "creator_name": + "since_version": |- + v0.260.0 + "enable_serverless_compute": + "since_version": |- + v0.260.0 + "instance_profile_arn": + "since_version": |- + v0.260.0 + "max_num_clusters": + "since_version": |- + v0.260.0 + "min_num_clusters": + "since_version": |- + v0.260.0 + "name": + "since_version": |- + v0.260.0 + "tags": + "since_version": |- + v0.260.0 github.com/databricks/cli/bundle/config/resources.SqlWarehousePermission: "group_name": "description": |- PLACEHOLDER + "since_version": |- + v0.260.0 "level": "description": |- PLACEHOLDER + "since_version": |- + v0.260.0 "service_principal_name": "description": |- PLACEHOLDER + "since_version": |- + v0.260.0 "user_name": "description": |- PLACEHOLDER + "since_version": |- + v0.260.0 github.com/databricks/cli/bundle/config/resources.SyncedDatabaseTable: "data_synchronization_status": "description": |- @@ -714,6 +1488,8 @@ github.com/databricks/cli/bundle/config/resources.SyncedDatabaseTable: "database_instance_name": "description": |- PLACEHOLDER + "since_version": |- + v0.266.0 "effective_database_instance_name": "description": |- PLACEHOLDER @@ -723,78 +1499,138 @@ github.com/databricks/cli/bundle/config/resources.SyncedDatabaseTable: "lifecycle": "description": |- Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed. + "since_version": |- + v0.268.0 "logical_database_name": "description": |- PLACEHOLDER + "since_version": |- + v0.266.0 "name": "description": |- PLACEHOLDER + "since_version": |- + v0.266.0 "spec": "description": |- PLACEHOLDER + "since_version": |- + v0.266.0 "unity_catalog_provisioning_state": "description": |- PLACEHOLDER +github.com/databricks/cli/bundle/config/resources.Volume: + "catalog_name": + "since_version": |- + v0.236.0 + "comment": + "since_version": |- + v0.236.0 + "name": + "since_version": |- + v0.236.0 + "schema_name": + "since_version": |- + v0.236.0 + "storage_location": + "since_version": |- + v0.236.0 github.com/databricks/cli/bundle/config/resources.VolumeGrant: "principal": "description": |- PLACEHOLDER + "since_version": |- + v0.264.1 "privileges": "description": |- PLACEHOLDER + "since_version": |- + v0.264.1 github.com/databricks/cli/bundle/config/variable.Lookup: "alert": "description": |- The name of the alert for which to retrieve an ID. + "since_version": |- + v0.228.1 "cluster": "description": |- The name of the cluster for which to retrieve an ID. + "since_version": |- + v0.228.1 "cluster_policy": "description": |- The name of the cluster_policy for which to retrieve an ID. + "since_version": |- + v0.228.1 "dashboard": "description": |- The name of the dashboard for which to retrieve an ID. + "since_version": |- + v0.228.1 "instance_pool": "description": |- The name of the instance_pool for which to retrieve an ID. + "since_version": |- + v0.228.1 "job": "description": |- The name of the job for which to retrieve an ID. + "since_version": |- + v0.228.1 "metastore": "description": |- The name of the metastore for which to retrieve an ID. + "since_version": |- + v0.228.1 "notification_destination": "description": |- The name of the notification_destination for which to retrieve an ID. + "since_version": |- + v0.236.0 "pipeline": "description": |- The name of the pipeline for which to retrieve an ID. + "since_version": |- + v0.228.1 "query": "description": |- The name of the query for which to retrieve an ID. + "since_version": |- + v0.228.1 "service_principal": "description": |- The name of the service_principal for which to retrieve an ID. + "since_version": |- + v0.228.1 "warehouse": "description": |- The name of the warehouse for which to retrieve an ID. + "since_version": |- + v0.228.1 github.com/databricks/cli/bundle/config/variable.TargetVariable: "default": "description": |- The default value for the variable. + "since_version": |- + v0.228.1 "description": "description": |- The description of the variable. + "since_version": |- + v0.228.1 "lookup": "description": |- The name of the alert, cluster_policy, cluster, dashboard, instance_pool, job, metastore, pipeline, query, service_principal, or warehouse object for which to retrieve an ID. + "since_version": |- + v0.228.1 "markdown_description": "description": |- The type of the variable. "type": "description": |- The type of the variable. + "since_version": |- + v0.228.1 github.com/databricks/cli/bundle/config/variable.Variable: "_": "description": |- @@ -804,17 +1640,25 @@ github.com/databricks/cli/bundle/config/variable.Variable: "default": "description": |- The default value for the variable. + "since_version": |- + v0.228.1 "description": "description": |- The description of the variable + "since_version": |- + v0.228.1 "lookup": "description": |- The name of the alert, cluster_policy, cluster, dashboard, instance_pool, job, metastore, pipeline, query, service_principal, or warehouse object for which to retrieve an ID. "markdown_description": |- The name of the `alert`, `cluster_policy`, `cluster`, `dashboard`, `instance_pool`, `job`, `metastore`, `pipeline`, `query`, `service_principal`, or `warehouse` object for which to retrieve an ID. + "since_version": |- + v0.228.1 "type": "description": |- The type of the variable. + "since_version": |- + v0.228.1 github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs: "service_principal_name": "description": |- diff --git a/bundle/internal/schema/annotations_openapi_overrides.yml b/bundle/internal/schema/annotations_openapi_overrides.yml index 427c184570..c452ba610a 100644 --- a/bundle/internal/schema/annotations_openapi_overrides.yml +++ b/bundle/internal/schema/annotations_openapi_overrides.yml @@ -701,12 +701,18 @@ github.com/databricks/databricks-sdk-go/service/apps.AppDeployment: "deployment_id": "description": |- PLACEHOLDER + "since_version": |- + v0.239.0 "mode": "description": |- PLACEHOLDER + "since_version": |- + v0.239.0 "source_code_path": "description": |- PLACEHOLDER + "since_version": |- + v0.239.0 "status": "description": |- PLACEHOLDER @@ -717,6 +723,8 @@ github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentArtifacts: "source_code_path": "description": |- PLACEHOLDER + "since_version": |- + v0.239.0 github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentStatus: "message": "description": |- @@ -728,85 +736,141 @@ github.com/databricks/databricks-sdk-go/service/apps.AppResource: "database": "description": |- PLACEHOLDER + "since_version": |- + v0.260.0 + "description": + "since_version": |- + v0.239.0 "genie_space": "description": |- PLACEHOLDER + "since_version": |- + v0.273.0 "job": "description": |- PLACEHOLDER + "since_version": |- + v0.239.0 + "name": + "since_version": |- + v0.239.0 "secret": "description": |- PLACEHOLDER + "since_version": |- + v0.239.0 "serving_endpoint": "description": |- PLACEHOLDER + "since_version": |- + v0.239.0 "sql_warehouse": "description": |- PLACEHOLDER + "since_version": |- + v0.239.0 "uc_securable": "description": |- PLACEHOLDER + "since_version": |- + v0.253.0 github.com/databricks/databricks-sdk-go/service/apps.AppResourceDatabase: "database_name": "description": |- PLACEHOLDER + "since_version": |- + v0.260.0 "instance_name": "description": |- PLACEHOLDER + "since_version": |- + v0.260.0 "permission": "description": |- PLACEHOLDER + "since_version": |- + v0.260.0 github.com/databricks/databricks-sdk-go/service/apps.AppResourceGenieSpace: "name": "description": |- PLACEHOLDER + "since_version": |- + v0.273.0 "permission": "description": |- PLACEHOLDER + "since_version": |- + v0.273.0 "space_id": "description": |- PLACEHOLDER + "since_version": |- + v0.273.0 github.com/databricks/databricks-sdk-go/service/apps.AppResourceJob: "id": "description": |- PLACEHOLDER + "since_version": |- + v0.239.0 "permission": "description": |- PLACEHOLDER + "since_version": |- + v0.239.0 github.com/databricks/databricks-sdk-go/service/apps.AppResourceSecret: "key": "description": |- PLACEHOLDER + "since_version": |- + v0.239.0 "permission": "description": |- PLACEHOLDER + "since_version": |- + v0.239.0 "scope": "description": |- PLACEHOLDER + "since_version": |- + v0.239.0 github.com/databricks/databricks-sdk-go/service/apps.AppResourceServingEndpoint: "name": "description": |- PLACEHOLDER + "since_version": |- + v0.239.0 "permission": "description": |- PLACEHOLDER + "since_version": |- + v0.239.0 github.com/databricks/databricks-sdk-go/service/apps.AppResourceSqlWarehouse: "id": "description": |- PLACEHOLDER + "since_version": |- + v0.239.0 "permission": "description": |- PLACEHOLDER + "since_version": |- + v0.239.0 github.com/databricks/databricks-sdk-go/service/apps.AppResourceUcSecurable: "permission": "description": |- PLACEHOLDER + "since_version": |- + v0.253.0 "securable_full_name": "description": |- PLACEHOLDER + "since_version": |- + v0.253.0 "securable_type": "description": |- PLACEHOLDER + "since_version": |- + v0.253.0 github.com/databricks/databricks-sdk-go/service/apps.ApplicationStatus: "message": "description": |- @@ -819,249 +883,2174 @@ github.com/databricks/databricks-sdk-go/service/apps.ComputeStatus: "description": |- PLACEHOLDER "state": {} +github.com/databricks/databricks-sdk-go/service/catalog.MonitorCronSchedule: + "pause_status": + "since_version": |- + v0.228.1 + "quartz_cron_expression": + "since_version": |- + v0.228.1 + "timezone_id": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/catalog.MonitorDataClassificationConfig: + "enabled": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/catalog.MonitorDestination: + "email_addresses": + "since_version": |- + v0.228.1 github.com/databricks/databricks-sdk-go/service/catalog.MonitorInferenceLog: "granularities": "description": |- Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year. + "since_version": |- + v0.228.1 + "label_col": + "since_version": |- + v0.228.1 + "model_id_col": + "since_version": |- + v0.228.1 + "prediction_col": + "since_version": |- + v0.228.1 + "prediction_proba_col": + "since_version": |- + v0.228.1 + "problem_type": + "since_version": |- + v0.228.1 + "timestamp_col": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/catalog.MonitorMetric: + "definition": + "since_version": |- + v0.228.1 + "input_columns": + "since_version": |- + v0.228.1 + "name": + "since_version": |- + v0.228.1 + "output_data_type": + "since_version": |- + v0.228.1 + "type": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/catalog.MonitorNotifications: + "on_failure": + "since_version": |- + v0.228.1 + "on_new_classification_tag_detected": + "since_version": |- + v0.228.1 github.com/databricks/databricks-sdk-go/service/catalog.MonitorTimeSeries: "granularities": "description": |- Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year. + "since_version": |- + v0.228.1 + "timestamp_col": + "since_version": |- + v0.228.1 github.com/databricks/databricks-sdk-go/service/catalog.RegisteredModelAlias: + "alias_name": + "since_version": |- + v0.273.0 "catalog_name": "description": |- PLACEHOLDER + "since_version": |- + v0.273.0 "id": "description": |- PLACEHOLDER + "since_version": |- + v0.273.0 "model_name": "description": |- PLACEHOLDER + "since_version": |- + v0.273.0 "schema_name": "description": |- PLACEHOLDER + "since_version": |- + v0.273.0 + "version_num": + "since_version": |- + v0.273.0 +github.com/databricks/databricks-sdk-go/service/compute.Adlsgen2Info: + "destination": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/compute.AutoScale: + "max_workers": + "since_version": |- + v0.228.1 + "min_workers": + "since_version": |- + v0.228.1 github.com/databricks/databricks-sdk-go/service/compute.AwsAttributes: "availability": "description": |- PLACEHOLDER + "since_version": |- + v0.228.1 + "ebs_volume_count": + "since_version": |- + v0.228.1 + "ebs_volume_iops": + "since_version": |- + v0.228.1 + "ebs_volume_size": + "since_version": |- + v0.228.1 + "ebs_volume_throughput": + "since_version": |- + v0.228.1 "ebs_volume_type": "description": |- PLACEHOLDER + "since_version": |- + v0.228.1 + "first_on_demand": + "since_version": |- + v0.228.1 + "instance_profile_arn": + "since_version": |- + v0.228.1 + "spot_bid_price_percent": + "since_version": |- + v0.228.1 + "zone_id": + "since_version": |- + v0.228.1 github.com/databricks/databricks-sdk-go/service/compute.AzureAttributes: "availability": "description": |- PLACEHOLDER + "since_version": |- + v0.228.1 + "first_on_demand": + "since_version": |- + v0.228.1 + "log_analytics_info": + "since_version": |- + v0.228.1 + "spot_bid_max_price": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/compute.ClientsTypes: + "jobs": + "since_version": |- + v0.228.1 + "notebooks": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/compute.ClusterLogConf: + "dbfs": + "since_version": |- + v0.228.1 + "s3": + "since_version": |- + v0.228.1 + "volumes": + "since_version": |- + v0.242.0 github.com/databricks/databricks-sdk-go/service/compute.ClusterSpec: + "apply_policy_default_values": + "since_version": |- + v0.228.1 + "autoscale": + "since_version": |- + v0.228.1 + "autotermination_minutes": + "since_version": |- + v0.228.1 + "aws_attributes": + "since_version": |- + v0.228.1 + "azure_attributes": + "since_version": |- + v0.228.1 + "cluster_log_conf": + "since_version": |- + v0.228.1 + "cluster_name": + "since_version": |- + v0.228.1 + "custom_tags": + "since_version": |- + v0.228.1 "data_security_mode": "description": |- PLACEHOLDER + "since_version": |- + v0.228.1 "docker_image": "description": |- PLACEHOLDER + "since_version": |- + v0.228.1 + "driver_instance_pool_id": + "since_version": |- + v0.228.1 + "driver_node_type_id": + "since_version": |- + v0.228.1 + "enable_elastic_disk": + "since_version": |- + v0.228.1 + "enable_local_disk_encryption": + "since_version": |- + v0.228.1 + "gcp_attributes": + "since_version": |- + v0.228.1 + "init_scripts": + "since_version": |- + v0.228.1 + "instance_pool_id": + "since_version": |- + v0.228.1 + "is_single_node": + "since_version": |- + v0.237.0 "kind": "description": |- PLACEHOLDER + "since_version": |- + v0.237.0 + "node_type_id": + "since_version": |- + v0.228.1 + "num_workers": + "since_version": |- + v0.228.1 + "policy_id": + "since_version": |- + v0.228.1 + "remote_disk_throughput": + "since_version": |- + v0.257.0 "runtime_engine": "description": |- PLACEHOLDER + "since_version": |- + v0.228.1 + "single_user_name": + "since_version": |- + v0.228.1 + "spark_conf": + "since_version": |- + v0.228.1 + "spark_env_vars": + "since_version": |- + v0.228.1 + "spark_version": + "since_version": |- + v0.228.1 + "ssh_public_keys": + "since_version": |- + v0.228.1 + "total_initial_remote_disk_size": + "since_version": |- + v0.257.0 + "use_ml_runtime": + "since_version": |- + v0.237.0 "workload_type": "description": |- PLACEHOLDER + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/compute.DbfsStorageInfo: + "destination": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/compute.DockerBasicAuth: + "password": + "since_version": |- + v0.228.1 + "username": + "since_version": |- + v0.228.1 github.com/databricks/databricks-sdk-go/service/compute.DockerImage: "basic_auth": "description": |- PLACEHOLDER + "since_version": |- + v0.228.1 + "url": + "since_version": |- + v0.228.1 github.com/databricks/databricks-sdk-go/service/compute.Environment: + "client": + "since_version": |- + v0.228.1 "dependencies": "description": |- List of pip dependencies, as supported by the version of pip in this environment. + "since_version": |- + v0.228.1 + "environment_version": + "since_version": |- + v0.252.0 "java_dependencies": "description": |- PLACEHOLDER + "since_version": |- + v0.271.0 github.com/databricks/databricks-sdk-go/service/compute.GcpAttributes: "availability": "description": |- PLACEHOLDER + "since_version": |- + v0.228.1 + "boot_disk_size": + "since_version": |- + v0.228.1 + "first_on_demand": + "since_version": |- + v0.265.0 + "google_service_account": + "since_version": |- + v0.228.1 + "local_ssd_count": + "since_version": |- + v0.228.1 + "use_preemptible_executors": + "since_version": |- + v0.228.1 + "zone_id": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/compute.GcsStorageInfo: + "destination": + "since_version": |- + v0.228.1 github.com/databricks/databricks-sdk-go/service/compute.InitScriptInfo: "abfss": "description": |- Contains the Azure Data Lake Storage destination path + "since_version": |- + v0.228.1 + "dbfs": + "since_version": |- + v0.228.1 + "file": + "since_version": |- + v0.228.1 + "gcs": + "since_version": |- + v0.228.1 + "s3": + "since_version": |- + v0.228.1 + "volumes": + "since_version": |- + v0.228.1 + "workspace": + "since_version": |- + v0.228.1 github.com/databricks/databricks-sdk-go/service/compute.Kind: "_": "enum": - |- CLASSIC_PREVIEW +github.com/databricks/databricks-sdk-go/service/compute.Library: + "cran": + "since_version": |- + v0.228.1 + "egg": + "since_version": |- + v0.228.1 + "jar": + "since_version": |- + v0.228.1 + "maven": + "since_version": |- + v0.228.1 + "pypi": + "since_version": |- + v0.228.1 + "requirements": + "since_version": |- + v0.228.1 + "whl": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/compute.LocalFileInfo: + "destination": + "since_version": |- + v0.228.1 github.com/databricks/databricks-sdk-go/service/compute.LogAnalyticsInfo: "log_analytics_primary_key": "description": |- The primary key for the Azure Log Analytics agent configuration + "since_version": |- + v0.228.1 "log_analytics_workspace_id": "description": |- The workspace ID for the Azure Log Analytics agent configuration + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/compute.MavenLibrary: + "coordinates": + "since_version": |- + v0.228.1 + "exclusions": + "since_version": |- + v0.228.1 + "repo": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/compute.PythonPyPiLibrary: + "package": + "since_version": |- + v0.228.1 + "repo": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/compute.RCranLibrary: + "package": + "since_version": |- + v0.228.1 + "repo": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/compute.S3StorageInfo: + "canned_acl": + "since_version": |- + v0.228.1 + "destination": + "since_version": |- + v0.228.1 + "enable_encryption": + "since_version": |- + v0.228.1 + "encryption_type": + "since_version": |- + v0.228.1 + "endpoint": + "since_version": |- + v0.228.1 + "kms_key": + "since_version": |- + v0.228.1 + "region": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/compute.VolumesStorageInfo: + "destination": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/compute.WorkloadType: + "clients": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/compute.WorkspaceStorageInfo: + "destination": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/database.CustomTag: + "key": + "since_version": |- + v0.273.0 + "value": + "since_version": |- + v0.273.0 +github.com/databricks/databricks-sdk-go/service/database.DatabaseInstanceRef: + "branch_time": + "since_version": |- + v0.265.0 + "lsn": + "since_version": |- + v0.265.0 + "name": + "since_version": |- + v0.265.0 +github.com/databricks/databricks-sdk-go/service/database.NewPipelineSpec: + "budget_policy_id": + "since_version": |- + v0.279.0 + "storage_catalog": + "since_version": |- + v0.266.0 + "storage_schema": + "since_version": |- + v0.266.0 github.com/databricks/databricks-sdk-go/service/database.SyncedTablePosition: "delta_table_sync_info": "description": |- PLACEHOLDER +github.com/databricks/databricks-sdk-go/service/database.SyncedTableSpec: + "create_database_objects_if_missing": + "since_version": |- + v0.266.0 + "existing_pipeline_id": + "since_version": |- + v0.266.0 + "new_pipeline_spec": + "since_version": |- + v0.266.0 + "primary_key_columns": + "since_version": |- + v0.266.0 + "scheduling_policy": + "since_version": |- + v0.266.0 + "source_table_full_name": + "since_version": |- + v0.266.0 + "timeseries_key": + "since_version": |- + v0.266.0 +github.com/databricks/databricks-sdk-go/service/database.SyncedTableStatus: + "continuous_update_status": + "since_version": |- + v0.266.0 + "failed_status": + "since_version": |- + v0.266.0 + "provisioning_status": + "since_version": |- + v0.266.0 + "triggered_update_status": + "since_version": |- + v0.266.0 +github.com/databricks/databricks-sdk-go/service/jobs.CleanRoomsNotebookTask: + "clean_room_name": + "since_version": |- + v0.237.0 + "etag": + "since_version": |- + v0.237.0 + "notebook_base_parameters": + "since_version": |- + v0.237.0 + "notebook_name": + "since_version": |- + v0.237.0 +github.com/databricks/databricks-sdk-go/service/jobs.ComputeConfig: + "gpu_node_pool_id": + "since_version": |- + v0.243.0 + "gpu_type": + "since_version": |- + v0.243.0 + "num_gpus": + "since_version": |- + v0.243.0 +github.com/databricks/databricks-sdk-go/service/jobs.ConditionTask: + "left": + "since_version": |- + v0.228.1 + "op": + "since_version": |- + v0.228.1 + "right": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/jobs.Continuous: + "pause_status": + "since_version": |- + v0.228.1 + "task_retry_mode": + "since_version": |- + v0.267.0 +github.com/databricks/databricks-sdk-go/service/jobs.CronSchedule: + "pause_status": + "since_version": |- + v0.228.1 + "quartz_cron_expression": + "since_version": |- + v0.228.1 + "timezone_id": + "since_version": |- + v0.228.1 github.com/databricks/databricks-sdk-go/service/jobs.DashboardTask: "dashboard_id": "description": |- PLACEHOLDER + "since_version": |- + v0.248.0 "subscription": "description": |- PLACEHOLDER + "since_version": |- + v0.248.0 + "warehouse_id": + "since_version": |- + v0.248.0 +github.com/databricks/databricks-sdk-go/service/jobs.DbtCloudTask: + "connection_resource_name": + "since_version": |- + v0.256.0 + "dbt_cloud_job_id": + "since_version": |- + v0.256.0 +github.com/databricks/databricks-sdk-go/service/jobs.DbtPlatformTask: + "connection_resource_name": + "since_version": |- + v0.257.0 + "dbt_platform_job_id": + "since_version": |- + v0.257.0 +github.com/databricks/databricks-sdk-go/service/jobs.DbtTask: + "catalog": + "since_version": |- + v0.228.1 + "commands": + "since_version": |- + v0.228.1 + "profiles_directory": + "since_version": |- + v0.228.1 + "project_directory": + "since_version": |- + v0.228.1 + "schema": + "since_version": |- + v0.228.1 + "source": + "since_version": |- + v0.228.1 + "warehouse_id": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/jobs.FileArrivalTriggerConfiguration: + "min_time_between_triggers_seconds": + "since_version": |- + v0.228.1 + "url": + "since_version": |- + v0.228.1 + "wait_after_last_change_seconds": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/jobs.ForEachTask: + "concurrency": + "since_version": |- + v0.228.1 + "inputs": + "since_version": |- + v0.228.1 + "task": + "since_version": |- + v0.228.1 github.com/databricks/databricks-sdk-go/service/jobs.GenAiComputeTask: + "command": + "since_version": |- + v0.243.0 "compute": "description": |- PLACEHOLDER + "since_version": |- + v0.243.0 + "dl_runtime_image": + "since_version": |- + v0.243.0 + "mlflow_experiment_name": + "since_version": |- + v0.243.0 + "source": + "since_version": |- + v0.243.0 + "training_script_path": + "since_version": |- + v0.243.0 + "yaml_parameters": + "since_version": |- + v0.243.0 + "yaml_parameters_file_path": + "since_version": |- + v0.243.0 +github.com/databricks/databricks-sdk-go/service/jobs.GitSnapshot: + "used_commit": + "since_version": |- + v0.228.1 github.com/databricks/databricks-sdk-go/service/jobs.GitSource: + "git_branch": + "since_version": |- + v0.228.1 + "git_commit": + "since_version": |- + v0.228.1 + "git_provider": + "since_version": |- + v0.228.1 "git_snapshot": "description": |- PLACEHOLDER + "git_tag": + "since_version": |- + v0.228.1 + "git_url": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/jobs.JobCluster: + "job_cluster_key": + "since_version": |- + v0.228.1 + "new_cluster": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/jobs.JobDeployment: + "kind": + "since_version": |- + v0.228.1 + "metadata_file_path": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/jobs.JobEmailNotifications: + "no_alert_for_skipped_runs": + "since_version": |- + v0.228.1 + "on_duration_warning_threshold_exceeded": + "since_version": |- + v0.228.1 + "on_failure": + "since_version": |- + v0.228.1 + "on_start": + "since_version": |- + v0.228.1 + "on_streaming_backlog_exceeded": + "since_version": |- + v0.228.1 + "on_success": + "since_version": |- + v0.228.1 github.com/databricks/databricks-sdk-go/service/jobs.JobEnvironment: + "environment_key": + "since_version": |- + v0.228.1 "spec": "description": |- PLACEHOLDER + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/jobs.JobNotificationSettings: + "no_alert_for_canceled_runs": + "since_version": |- + v0.228.1 + "no_alert_for_skipped_runs": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/jobs.JobParameterDefinition: + "default": + "since_version": |- + v0.228.1 + "name": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/jobs.JobSource: + "dirty_state": + "since_version": |- + v0.228.1 + "import_from_git_branch": + "since_version": |- + v0.228.1 + "job_config_path": + "since_version": |- + v0.228.1 github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRule: "metric": "description": |- PLACEHOLDER + "since_version": |- + v0.228.1 "op": "description": |- PLACEHOLDER + "since_version": |- + v0.228.1 + "value": + "since_version": |- + v0.228.1 github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRules: "rules": "description": |- PLACEHOLDER + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/jobs.ModelTriggerConfiguration: + "aliases": + "since_version": |- + v0.279.0 + "condition": + "since_version": |- + v0.279.0 + "min_time_between_triggers_seconds": + "since_version": |- + v0.279.0 + "securable_name": + "since_version": |- + v0.279.0 + "wait_after_last_change_seconds": + "since_version": |- + v0.279.0 +github.com/databricks/databricks-sdk-go/service/jobs.NotebookTask: + "base_parameters": + "since_version": |- + v0.228.1 + "notebook_path": + "since_version": |- + v0.228.1 + "source": + "since_version": |- + v0.228.1 + "warehouse_id": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/jobs.PeriodicTriggerConfiguration: + "interval": + "since_version": |- + v0.228.1 + "unit": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/jobs.PipelineParams: + "full_refresh": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/jobs.PipelineTask: + "full_refresh": + "since_version": |- + v0.228.1 + "pipeline_id": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/jobs.PowerBiModel: + "authentication_method": + "since_version": |- + v0.248.0 + "model_name": + "since_version": |- + v0.248.0 + "overwrite_existing": + "since_version": |- + v0.248.0 + "storage_mode": + "since_version": |- + v0.248.0 + "workspace_name": + "since_version": |- + v0.248.0 +github.com/databricks/databricks-sdk-go/service/jobs.PowerBiTable: + "catalog": + "since_version": |- + v0.248.0 + "name": + "since_version": |- + v0.248.0 + "schema": + "since_version": |- + v0.248.0 + "storage_mode": + "since_version": |- + v0.248.0 +github.com/databricks/databricks-sdk-go/service/jobs.PowerBiTask: + "connection_resource_name": + "since_version": |- + v0.248.0 + "power_bi_model": + "since_version": |- + v0.248.0 + "refresh_after_update": + "since_version": |- + v0.248.0 + "tables": + "since_version": |- + v0.248.0 + "warehouse_id": + "since_version": |- + v0.248.0 +github.com/databricks/databricks-sdk-go/service/jobs.PythonWheelTask: + "entry_point": + "since_version": |- + v0.228.1 + "named_parameters": + "since_version": |- + v0.228.1 + "package_name": + "since_version": |- + v0.228.1 + "parameters": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/jobs.QueueSettings: + "enabled": + "since_version": |- + v0.228.1 github.com/databricks/databricks-sdk-go/service/jobs.RunJobTask: + "dbt_commands": + "since_version": |- + v0.228.1 + "jar_params": + "since_version": |- + v0.228.1 + "job_id": + "since_version": |- + v0.228.1 + "job_parameters": + "since_version": |- + v0.228.1 + "notebook_params": + "since_version": |- + v0.228.1 + "pipeline_params": + "since_version": |- + v0.228.1 "python_named_params": "description": |- PLACEHOLDER + "since_version": |- + v0.228.1 + "python_params": + "since_version": |- + v0.228.1 + "spark_submit_params": + "since_version": |- + v0.228.1 + "sql_params": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/jobs.SparkJarTask: + "jar_uri": + "since_version": |- + v0.228.1 + "main_class_name": + "since_version": |- + v0.228.1 + "parameters": + "since_version": |- + v0.228.1 + "run_as_repl": + "since_version": |- + v0.240.0 +github.com/databricks/databricks-sdk-go/service/jobs.SparkPythonTask: + "parameters": + "since_version": |- + v0.228.1 + "python_file": + "since_version": |- + v0.228.1 + "source": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/jobs.SparkSubmitTask: + "parameters": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/jobs.SqlTask: + "alert": + "since_version": |- + v0.228.1 + "dashboard": + "since_version": |- + v0.228.1 + "file": + "since_version": |- + v0.228.1 + "parameters": + "since_version": |- + v0.228.1 + "query": + "since_version": |- + v0.228.1 + "warehouse_id": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskAlert: + "alert_id": + "since_version": |- + v0.228.1 + "pause_subscriptions": + "since_version": |- + v0.228.1 + "subscriptions": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskDashboard: + "custom_subject": + "since_version": |- + v0.228.1 + "dashboard_id": + "since_version": |- + v0.228.1 + "pause_subscriptions": + "since_version": |- + v0.228.1 + "subscriptions": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskFile: + "path": + "since_version": |- + v0.228.1 + "source": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskQuery: + "query_id": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskSubscription: + "destination_id": + "since_version": |- + v0.228.1 + "user_name": + "since_version": |- + v0.228.1 github.com/databricks/databricks-sdk-go/service/jobs.Subscription: + "custom_subject": + "since_version": |- + v0.248.0 + "paused": + "since_version": |- + v0.248.0 "subscribers": "description": |- PLACEHOLDER + "since_version": |- + v0.248.0 github.com/databricks/databricks-sdk-go/service/jobs.SubscriptionSubscriber: "destination_id": "description": |- PLACEHOLDER + "since_version": |- + v0.248.0 "user_name": "description": |- PLACEHOLDER + "since_version": |- + v0.248.0 +github.com/databricks/databricks-sdk-go/service/jobs.TableUpdateTriggerConfiguration: + "condition": + "since_version": |- + v0.228.1 + "min_time_between_triggers_seconds": + "since_version": |- + v0.228.1 + "table_names": + "since_version": |- + v0.228.1 + "wait_after_last_change_seconds": + "since_version": |- + v0.228.1 github.com/databricks/databricks-sdk-go/service/jobs.Task: + "clean_rooms_notebook_task": + "since_version": |- + v0.237.0 + "condition_task": + "since_version": |- + v0.228.1 + "dashboard_task": + "since_version": |- + v0.248.0 + "dbt_cloud_task": + "since_version": |- + v0.256.0 "dbt_platform_task": "description": |- PLACEHOLDER + "since_version": |- + v0.257.0 + "dbt_task": + "since_version": |- + v0.228.1 + "depends_on": + "since_version": |- + v0.228.1 + "description": + "since_version": |- + v0.228.1 + "disable_auto_optimization": + "since_version": |- + v0.228.1 + "disabled": + "since_version": |- + v0.271.0 + "email_notifications": + "since_version": |- + v0.228.1 + "environment_key": + "since_version": |- + v0.228.1 + "existing_cluster_id": + "since_version": |- + v0.228.1 + "for_each_task": + "since_version": |- + v0.228.1 "gen_ai_compute_task": "description": |- PLACEHOLDER + "since_version": |- + v0.243.0 "health": "description": |- PLACEHOLDER + "since_version": |- + v0.228.1 + "job_cluster_key": + "since_version": |- + v0.228.1 + "libraries": + "since_version": |- + v0.228.1 + "max_retries": + "since_version": |- + v0.228.1 + "min_retry_interval_millis": + "since_version": |- + v0.228.1 + "new_cluster": + "since_version": |- + v0.228.1 + "notebook_task": + "since_version": |- + v0.228.1 + "notification_settings": + "since_version": |- + v0.228.1 + "pipeline_task": + "since_version": |- + v0.228.1 + "power_bi_task": + "since_version": |- + v0.248.0 + "python_wheel_task": + "since_version": |- + v0.228.1 + "retry_on_timeout": + "since_version": |- + v0.228.1 + "run_if": + "since_version": |- + v0.228.1 + "run_job_task": + "since_version": |- + v0.228.1 + "spark_jar_task": + "since_version": |- + v0.228.1 + "spark_python_task": + "since_version": |- + v0.228.1 + "spark_submit_task": + "since_version": |- + v0.228.1 + "sql_task": + "since_version": |- + v0.228.1 + "task_key": + "since_version": |- + v0.228.1 + "timeout_seconds": + "since_version": |- + v0.228.1 + "webhook_notifications": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/jobs.TaskDependency: + "outcome": + "since_version": |- + v0.228.1 + "task_key": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/jobs.TaskEmailNotifications: + "no_alert_for_skipped_runs": + "since_version": |- + v0.228.1 + "on_duration_warning_threshold_exceeded": + "since_version": |- + v0.228.1 + "on_failure": + "since_version": |- + v0.228.1 + "on_start": + "since_version": |- + v0.228.1 + "on_streaming_backlog_exceeded": + "since_version": |- + v0.228.1 + "on_success": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/jobs.TaskNotificationSettings: + "alert_on_last_attempt": + "since_version": |- + v0.228.1 + "no_alert_for_canceled_runs": + "since_version": |- + v0.228.1 + "no_alert_for_skipped_runs": + "since_version": |- + v0.228.1 github.com/databricks/databricks-sdk-go/service/jobs.TriggerSettings: + "file_arrival": + "since_version": |- + v0.228.1 "model": "description": |- PLACEHOLDER + "since_version": |- + v0.279.0 + "pause_status": + "since_version": |- + v0.228.1 + "periodic": + "since_version": |- + v0.228.1 "table_update": "description": |- PLACEHOLDER + "since_version": |- + v0.228.1 github.com/databricks/databricks-sdk-go/service/jobs.Webhook: "id": "description": |- PLACEHOLDER + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/jobs.WebhookNotifications: + "on_duration_warning_threshold_exceeded": + "since_version": |- + v0.228.1 + "on_failure": + "since_version": |- + v0.228.1 + "on_start": + "since_version": |- + v0.228.1 + "on_streaming_backlog_exceeded": + "since_version": |- + v0.228.1 + "on_success": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/ml.ExperimentTag: + "key": + "since_version": |- + v0.228.1 + "value": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/ml.ModelTag: + "key": + "since_version": |- + v0.228.1 + "value": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/pipelines.ConnectionParameters: + "source_catalog": + "since_version": |- + v0.279.0 github.com/databricks/databricks-sdk-go/service/pipelines.CronTrigger: "quartz_cron_schedule": "description": |- PLACEHOLDER + "since_version": |- + v0.228.1 "timezone_id": "description": |- PLACEHOLDER + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/pipelines.EventLogSpec: + "catalog": + "since_version": |- + v0.246.0 + "name": + "since_version": |- + v0.246.0 + "schema": + "since_version": |- + v0.246.0 +github.com/databricks/databricks-sdk-go/service/pipelines.FileLibrary: + "path": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/pipelines.Filters: + "exclude": + "since_version": |- + v0.228.1 + "include": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/pipelines.IngestionConfig: + "report": + "since_version": |- + v0.231.0 + "schema": + "since_version": |- + v0.228.1 + "table": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/pipelines.IngestionGatewayPipelineDefinition: + "connection_id": + "since_version": |- + v0.228.1 + "connection_name": + "since_version": |- + v0.234.0 + "connection_parameters": + "since_version": |- + v0.279.0 + "gateway_storage_catalog": + "since_version": |- + v0.228.1 + "gateway_storage_name": + "since_version": |- + v0.228.1 + "gateway_storage_schema": + "since_version": |- + v0.228.1 github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinition: + "connection_name": + "since_version": |- + v0.228.1 + "ingest_from_uc_foreign_catalog": + "since_version": |- + v0.279.0 + "ingestion_gateway_id": + "since_version": |- + v0.228.1 "netsuite_jar_path": "description": |- PLACEHOLDER + "since_version": |- + v0.271.0 + "objects": + "since_version": |- + v0.228.1 + "source_configurations": + "since_version": |- + v0.267.0 + "table_configuration": + "since_version": |- + v0.228.1 +? github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig +: "cursor_columns": + "since_version": |- + v0.264.0 + "deletion_condition": + "since_version": |- + v0.264.0 + "hard_deletion_sync_min_interval_in_seconds": + "since_version": |- + v0.264.0 +github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinitionWorkdayReportParameters: + "incremental": + "since_version": |- + v0.271.0 + "parameters": + "since_version": |- + v0.271.0 + "report_parameters": + "since_version": |- + v0.271.0 +github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue: + "key": + "since_version": |- + v0.271.0 + "value": + "since_version": |- + v0.271.0 +github.com/databricks/databricks-sdk-go/service/pipelines.NotebookLibrary: + "path": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/pipelines.Notifications: + "alerts": + "since_version": |- + v0.228.1 + "email_recipients": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/pipelines.PathPattern: + "include": + "since_version": |- + v0.252.0 +github.com/databricks/databricks-sdk-go/service/pipelines.PipelineCluster: + "apply_policy_default_values": + "since_version": |- + v0.228.1 + "autoscale": + "since_version": |- + v0.228.1 + "aws_attributes": + "since_version": |- + v0.228.1 + "azure_attributes": + "since_version": |- + v0.228.1 + "cluster_log_conf": + "since_version": |- + v0.228.1 + "custom_tags": + "since_version": |- + v0.228.1 + "driver_instance_pool_id": + "since_version": |- + v0.228.1 + "driver_node_type_id": + "since_version": |- + v0.228.1 + "enable_local_disk_encryption": + "since_version": |- + v0.228.1 + "gcp_attributes": + "since_version": |- + v0.228.1 + "init_scripts": + "since_version": |- + v0.228.1 + "instance_pool_id": + "since_version": |- + v0.228.1 + "label": + "since_version": |- + v0.228.1 + "node_type_id": + "since_version": |- + v0.228.1 + "num_workers": + "since_version": |- + v0.228.1 + "policy_id": + "since_version": |- + v0.228.1 + "spark_conf": + "since_version": |- + v0.228.1 + "spark_env_vars": + "since_version": |- + v0.228.1 + "ssh_public_keys": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/pipelines.PipelineClusterAutoscale: + "max_workers": + "since_version": |- + v0.228.1 + "min_workers": + "since_version": |- + v0.228.1 + "mode": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/pipelines.PipelineDeployment: + "kind": + "since_version": |- + v0.228.1 + "metadata_file_path": + "since_version": |- + v0.228.1 github.com/databricks/databricks-sdk-go/service/pipelines.PipelineLibrary: + "file": + "since_version": |- + v0.228.1 + "glob": + "since_version": |- + v0.252.0 + "jar": + "since_version": |- + v0.228.1 + "maven": + "since_version": |- + v0.228.1 + "notebook": + "since_version": |- + v0.228.1 "whl": + "since_version": |- + v0.228.1 "deprecation_message": |- This field is deprecated github.com/databricks/databricks-sdk-go/service/pipelines.PipelineTrigger: "cron": "description": |- PLACEHOLDER + "since_version": |- + v0.228.1 "manual": "description": |- PLACEHOLDER + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/pipelines.PipelinesEnvironment: + "dependencies": + "since_version": |- + v0.257.0 +github.com/databricks/databricks-sdk-go/service/pipelines.PostgresCatalogConfig: + "slot_config": + "since_version": |- + v0.267.0 +github.com/databricks/databricks-sdk-go/service/pipelines.PostgresSlotConfig: + "publication_name": + "since_version": |- + v0.267.0 + "slot_name": + "since_version": |- + v0.267.0 +github.com/databricks/databricks-sdk-go/service/pipelines.ReportSpec: + "destination_catalog": + "since_version": |- + v0.231.0 + "destination_schema": + "since_version": |- + v0.231.0 + "destination_table": + "since_version": |- + v0.231.0 + "source_url": + "since_version": |- + v0.231.0 + "table_configuration": + "since_version": |- + v0.231.0 +github.com/databricks/databricks-sdk-go/service/pipelines.RestartWindow: + "days_of_week": + "since_version": |- + v0.234.0 + "start_hour": + "since_version": |- + v0.234.0 + "time_zone_id": + "since_version": |- + v0.234.0 +github.com/databricks/databricks-sdk-go/service/pipelines.RunAs: + "service_principal_name": + "since_version": |- + v0.241.0 + "user_name": + "since_version": |- + v0.241.0 +github.com/databricks/databricks-sdk-go/service/pipelines.SchemaSpec: + "destination_catalog": + "since_version": |- + v0.228.1 + "destination_schema": + "since_version": |- + v0.228.1 + "source_catalog": + "since_version": |- + v0.228.1 + "source_schema": + "since_version": |- + v0.228.1 + "table_configuration": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/pipelines.SourceCatalogConfig: + "postgres": + "since_version": |- + v0.267.0 + "source_catalog": + "since_version": |- + v0.267.0 +github.com/databricks/databricks-sdk-go/service/pipelines.SourceConfig: + "catalog": + "since_version": |- + v0.267.0 +github.com/databricks/databricks-sdk-go/service/pipelines.TableSpec: + "destination_catalog": + "since_version": |- + v0.228.1 + "destination_schema": + "since_version": |- + v0.228.1 + "destination_table": + "since_version": |- + v0.228.1 + "source_catalog": + "since_version": |- + v0.228.1 + "source_schema": + "since_version": |- + v0.228.1 + "source_table": + "since_version": |- + v0.228.1 + "table_configuration": + "since_version": |- + v0.228.1 github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig: + "exclude_columns": + "since_version": |- + v0.251.0 + "include_columns": + "since_version": |- + v0.251.0 + "primary_keys": + "since_version": |- + v0.228.1 + "query_based_connector_config": + "since_version": |- + v0.264.0 + "salesforce_include_formula_fields": + "since_version": |- + v0.228.1 + "scd_type": + "since_version": |- + v0.228.1 + "sequence_by": + "since_version": |- + v0.231.0 "workday_report_parameters": "description": |- PLACEHOLDER + "since_version": |- + v0.271.0 +github.com/databricks/databricks-sdk-go/service/serving.Ai21LabsConfig: + "ai21labs_api_key": + "since_version": |- + v0.228.1 + "ai21labs_api_key_plaintext": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/serving.AiGatewayConfig: + "fallback_config": + "since_version": |- + v0.246.0 + "guardrails": + "since_version": |- + v0.230.0 + "inference_table_config": + "since_version": |- + v0.230.0 + "rate_limits": + "since_version": |- + v0.230.0 + "usage_tracking_config": + "since_version": |- + v0.230.0 +github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailParameters: + "invalid_keywords": + "since_version": |- + v0.230.0 + "pii": + "since_version": |- + v0.230.0 + "safety": + "since_version": |- + v0.230.0 + "valid_topics": + "since_version": |- + v0.230.0 +github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailPiiBehavior: + "behavior": + "since_version": |- + v0.230.0 +github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrails: + "input": + "since_version": |- + v0.230.0 + "output": + "since_version": |- + v0.230.0 +github.com/databricks/databricks-sdk-go/service/serving.AiGatewayInferenceTableConfig: + "catalog_name": + "since_version": |- + v0.230.0 + "enabled": + "since_version": |- + v0.230.0 + "schema_name": + "since_version": |- + v0.230.0 + "table_name_prefix": + "since_version": |- + v0.230.0 +github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimit: + "calls": + "since_version": |- + v0.230.0 + "key": + "since_version": |- + v0.230.0 + "principal": + "since_version": |- + v0.260.0 + "renewal_period": + "since_version": |- + v0.230.0 + "tokens": + "since_version": |- + v0.265.0 +github.com/databricks/databricks-sdk-go/service/serving.AiGatewayUsageTrackingConfig: + "enabled": + "since_version": |- + v0.230.0 +github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfig: + "aws_access_key_id": + "since_version": |- + v0.228.1 + "aws_access_key_id_plaintext": + "since_version": |- + v0.228.1 + "aws_region": + "since_version": |- + v0.228.1 + "aws_secret_access_key": + "since_version": |- + v0.228.1 + "aws_secret_access_key_plaintext": + "since_version": |- + v0.228.1 + "bedrock_provider": + "since_version": |- + v0.228.1 + "instance_profile_arn": + "since_version": |- + v0.243.0 +github.com/databricks/databricks-sdk-go/service/serving.AnthropicConfig: + "anthropic_api_key": + "since_version": |- + v0.228.1 + "anthropic_api_key_plaintext": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/serving.ApiKeyAuth: + "key": + "since_version": |- + v0.246.0 + "value": + "since_version": |- + v0.246.0 + "value_plaintext": + "since_version": |- + v0.246.0 +github.com/databricks/databricks-sdk-go/service/serving.AutoCaptureConfigInput: + "catalog_name": + "since_version": |- + v0.228.1 + "enabled": + "since_version": |- + v0.228.1 + "schema_name": + "since_version": |- + v0.228.1 + "table_name_prefix": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/serving.BearerTokenAuth: + "token": + "since_version": |- + v0.246.0 + "token_plaintext": + "since_version": |- + v0.246.0 +github.com/databricks/databricks-sdk-go/service/serving.CohereConfig: + "cohere_api_base": + "since_version": |- + v0.228.1 + "cohere_api_key": + "since_version": |- + v0.228.1 + "cohere_api_key_plaintext": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/serving.CustomProviderConfig: + "api_key_auth": + "since_version": |- + v0.246.0 + "bearer_token_auth": + "since_version": |- + v0.246.0 + "custom_provider_url": + "since_version": |- + v0.246.0 +github.com/databricks/databricks-sdk-go/service/serving.DatabricksModelServingConfig: + "databricks_api_token": + "since_version": |- + v0.228.1 + "databricks_api_token_plaintext": + "since_version": |- + v0.228.1 + "databricks_workspace_url": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/serving.EmailNotifications: + "on_update_failure": + "since_version": |- + v0.264.0 + "on_update_success": + "since_version": |- + v0.264.0 +github.com/databricks/databricks-sdk-go/service/serving.EndpointCoreConfigInput: + "auto_capture_config": + "since_version": |- + v0.228.1 + "served_entities": + "since_version": |- + v0.228.1 + "served_models": + "since_version": |- + v0.228.1 + "traffic_config": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/serving.EndpointTag: + "key": + "since_version": |- + v0.228.1 + "value": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/serving.ExternalModel: + "ai21labs_config": + "since_version": |- + v0.228.1 + "amazon_bedrock_config": + "since_version": |- + v0.228.1 + "anthropic_config": + "since_version": |- + v0.228.1 + "cohere_config": + "since_version": |- + v0.228.1 + "custom_provider_config": + "since_version": |- + v0.246.0 + "databricks_model_serving_config": + "since_version": |- + v0.228.1 + "google_cloud_vertex_ai_config": + "since_version": |- + v0.228.1 + "name": + "since_version": |- + v0.228.1 + "openai_config": + "since_version": |- + v0.228.1 + "palm_config": + "since_version": |- + v0.228.1 + "provider": + "since_version": |- + v0.228.1 + "task": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/serving.FallbackConfig: + "enabled": + "since_version": |- + v0.246.0 +github.com/databricks/databricks-sdk-go/service/serving.GoogleCloudVertexAiConfig: + "private_key": + "since_version": |- + v0.228.1 + "private_key_plaintext": + "since_version": |- + v0.228.1 + "project_id": + "since_version": |- + v0.228.1 + "region": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/serving.OpenAiConfig: + "microsoft_entra_client_id": + "since_version": |- + v0.228.1 + "microsoft_entra_client_secret": + "since_version": |- + v0.228.1 + "microsoft_entra_client_secret_plaintext": + "since_version": |- + v0.228.1 + "microsoft_entra_tenant_id": + "since_version": |- + v0.228.1 + "openai_api_base": + "since_version": |- + v0.228.1 + "openai_api_key": + "since_version": |- + v0.228.1 + "openai_api_key_plaintext": + "since_version": |- + v0.228.1 + "openai_api_type": + "since_version": |- + v0.228.1 + "openai_api_version": + "since_version": |- + v0.228.1 + "openai_deployment_name": + "since_version": |- + v0.228.1 + "openai_organization": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/serving.PaLmConfig: + "palm_api_key": + "since_version": |- + v0.228.1 + "palm_api_key_plaintext": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/serving.RateLimit: + "calls": + "since_version": |- + v0.228.1 + "key": + "since_version": |- + v0.228.1 + "renewal_period": + "since_version": |- + v0.228.1 github.com/databricks/databricks-sdk-go/service/serving.Route: "served_entity_name": "description": |- PLACEHOLDER + "since_version": |- + v0.260.0 + "served_model_name": + "since_version": |- + v0.228.1 + "traffic_percentage": + "since_version": |- + v0.228.1 github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput: + "entity_name": + "since_version": |- + v0.228.1 "entity_version": "description": |- PLACEHOLDER + "since_version": |- + v0.228.1 + "environment_vars": + "since_version": |- + v0.228.1 + "external_model": + "since_version": |- + v0.228.1 + "instance_profile_arn": + "since_version": |- + v0.228.1 + "max_provisioned_concurrency": + "since_version": |- + v0.256.0 + "max_provisioned_throughput": + "since_version": |- + v0.228.1 + "min_provisioned_concurrency": + "since_version": |- + v0.256.0 + "min_provisioned_throughput": + "since_version": |- + v0.228.1 + "name": + "since_version": |- + v0.228.1 + "provisioned_model_units": + "since_version": |- + v0.252.0 + "scale_to_zero_enabled": + "since_version": |- + v0.228.1 + "workload_size": + "since_version": |- + v0.228.1 + "workload_type": + "since_version": |- + v0.228.1 github.com/databricks/databricks-sdk-go/service/serving.ServedModelInput: + "environment_vars": + "since_version": |- + v0.228.1 + "instance_profile_arn": + "since_version": |- + v0.228.1 + "max_provisioned_concurrency": + "since_version": |- + v0.256.0 + "max_provisioned_throughput": + "since_version": |- + v0.228.1 + "min_provisioned_concurrency": + "since_version": |- + v0.256.0 + "min_provisioned_throughput": + "since_version": |- + v0.228.1 "model_name": "description": |- PLACEHOLDER + "since_version": |- + v0.228.1 "model_version": "description": |- PLACEHOLDER + "since_version": |- + v0.228.1 + "name": + "since_version": |- + v0.228.1 + "provisioned_model_units": + "since_version": |- + v0.252.0 + "scale_to_zero_enabled": + "since_version": |- + v0.228.1 + "workload_size": + "since_version": |- + v0.228.1 + "workload_type": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/serving.TrafficConfig: + "routes": + "since_version": |- + v0.228.1 +github.com/databricks/databricks-sdk-go/service/sql.AlertV2Evaluation: + "comparison_operator": + "since_version": |- + v0.279.0 + "empty_result_state": + "since_version": |- + v0.279.0 + "notification": + "since_version": |- + v0.279.0 + "source": + "since_version": |- + v0.279.0 + "threshold": + "since_version": |- + v0.279.0 github.com/databricks/databricks-sdk-go/service/sql.AlertV2Notification: + "notify_on_ok": + "since_version": |- + v0.279.0 + "retrigger_seconds": + "since_version": |- + v0.279.0 "subscriptions": "description": |- PLACEHOLDER + "since_version": |- + v0.279.0 github.com/databricks/databricks-sdk-go/service/sql.AlertV2Operand: "column": "description": |- PLACEHOLDER + "since_version": |- + v0.279.0 "value": "description": |- PLACEHOLDER + "since_version": |- + v0.279.0 github.com/databricks/databricks-sdk-go/service/sql.AlertV2OperandColumn: "aggregation": "description": |- PLACEHOLDER + "since_version": |- + v0.279.0 "display": "description": |- PLACEHOLDER + "since_version": |- + v0.279.0 "name": "description": |- PLACEHOLDER + "since_version": |- + v0.279.0 github.com/databricks/databricks-sdk-go/service/sql.AlertV2OperandValue: "bool_value": "description": |- PLACEHOLDER + "since_version": |- + v0.279.0 "double_value": "description": |- PLACEHOLDER + "since_version": |- + v0.279.0 "string_value": "description": |- PLACEHOLDER + "since_version": |- + v0.279.0 +github.com/databricks/databricks-sdk-go/service/sql.AlertV2RunAs: + "service_principal_name": + "since_version": |- + v0.279.0 + "user_name": + "since_version": |- + v0.279.0 github.com/databricks/databricks-sdk-go/service/sql.AlertV2Subscription: "destination_id": "description": |- PLACEHOLDER + "since_version": |- + v0.279.0 "user_email": "description": |- PLACEHOLDER + "since_version": |- + v0.279.0 github.com/databricks/databricks-sdk-go/service/sql.Channel: "dbsql_version": "description": |- PLACEHOLDER + "since_version": |- + v0.260.0 "name": "description": |- PLACEHOLDER + "since_version": |- + v0.260.0 +github.com/databricks/databricks-sdk-go/service/sql.CronSchedule: + "pause_status": + "since_version": |- + v0.279.0 + "quartz_cron_schedule": + "since_version": |- + v0.279.0 + "timezone_id": + "since_version": |- + v0.279.0 github.com/databricks/databricks-sdk-go/service/sql.EndpointTagPair: "key": "description": |- PLACEHOLDER + "since_version": |- + v0.260.0 "value": "description": |- PLACEHOLDER + "since_version": |- + v0.260.0 github.com/databricks/databricks-sdk-go/service/sql.EndpointTags: "custom_tags": "description": |- PLACEHOLDER + "since_version": |- + v0.260.0 +github.com/databricks/databricks-sdk-go/service/workspace.AzureKeyVaultSecretScopeMetadata: + "dns_name": + "since_version": |- + v0.252.0 + "resource_id": + "since_version": |- + v0.252.0 diff --git a/bundle/internal/schema/main.go b/bundle/internal/schema/main.go index e0d4d516e0..1b2a320ab5 100644 --- a/bundle/internal/schema/main.go +++ b/bundle/internal/schema/main.go @@ -201,6 +201,11 @@ func generateSchema(workdir, outputFile string) { annotationsOpenApiPath := filepath.Join(workdir, "annotations_openapi.yml") annotationsOpenApiOverridesPath := filepath.Join(workdir, "annotations_openapi_overrides.yml") + // Update since_version annotations based on git version history + if err := updateSinceVersions(workdir); err != nil { + log.Printf("Warning: failed to update since_version annotations: %v", err) + } + // Input file, the databricks openapi spec. inputFile := os.Getenv("DATABRICKS_OPENAPI_SPEC") if inputFile != "" { diff --git a/bundle/internal/schema/parser.go b/bundle/internal/schema/parser.go index d72524dc59..70474fea80 100644 --- a/bundle/internal/schema/parser.go +++ b/bundle/internal/schema/parser.go @@ -219,12 +219,10 @@ func (p *openapiParser) extractAnnotations(typ reflect.Type, outputPath, overrid return err } - err = saveYamlWithStyle(overridesPath, overrides) - if err != nil { + if err := overrides.Save(overridesPath); err != nil { return err } - err = saveYamlWithStyle(outputPath, annotations) - if err != nil { + if err := annotations.Save(outputPath); err != nil { return err } err = prependCommentToFile(outputPath, "# This file is auto-generated. DO NOT EDIT.\n") diff --git a/bundle/internal/schema/since_versions.go b/bundle/internal/schema/since_versions.go new file mode 100644 index 0000000000..46508eaf9c --- /dev/null +++ b/bundle/internal/schema/since_versions.go @@ -0,0 +1,387 @@ +package main + +import ( + "encoding/json" + "errors" + "fmt" + "maps" + "os" + "os/exec" + "path/filepath" + "strconv" + "strings" + + "github.com/databricks/cli/bundle/internal/annotation" +) + +// Version when bundle/schema/jsonschema.json was added to the repo. +var embeddedSchemaVersion = [3]int{0, 229, 0} + +// updateSinceVersions updates annotation files with since_version for each field. +// It uses .last_processed_cli_version to track the last processed version and only +// processes new versions incrementally. +func updateSinceVersions(workDir string) error { + annotationsPath := filepath.Join(workDir, "annotations.yml") + overridesPath := filepath.Join(workDir, "annotations_openapi_overrides.yml") + previousVersionFile := filepath.Join(workDir, ".last_processed_cli_version") + + // Load existing annotations + annotations, err := annotation.Load(annotationsPath) + if err != nil { + return fmt.Errorf("loading annotations: %w", err) + } + + overrides, err := annotation.Load(overridesPath) + if err != nil { + return fmt.Errorf("loading overrides: %w", err) + } + + // Get all version tags + allVersions, err := getVersionTags() + if err != nil { + return fmt.Errorf("getting version tags: %w", err) + } + if len(allVersions) == 0 { + return errors.New("no version tags found") + } + + // Read previous version to determine where to start + previousVersion := readLastProcessedVersion(previousVersionFile) + versions := filterVersionsAfter(allVersions, previousVersion) + + if len(versions) == 0 { + fmt.Printf("Since versions are up to date (at %s)\n", previousVersion) + return nil + } + + fmt.Printf("Updating since_version annotations from %s to %s (%d versions)\n", + versions[0], versions[len(versions)-1], len(versions)) + + // Compute since versions for the new range + sinceVersions, err := computeSinceVersions(versions) + if err != nil { + return fmt.Errorf("computing since versions: %w", err) + } + + // Get current schema to filter out removed types + currentSchema, err := getSchemaAtVersion(allVersions[len(allVersions)-1]) + if err != nil { + return fmt.Errorf("getting current schema: %w", err) + } + currentFields := flattenSchema(currentSchema) + sinceVersions = filterToCurrentFields(sinceVersions, currentFields) + + // Update annotations + cliAdded := updateAnnotationsWithVersions(annotations, sinceVersions, overrides, true) + sdkAdded := updateAnnotationsWithVersions(overrides, sinceVersions, annotations, false) + + // Save if there were changes + if cliAdded > 0 { + if err := annotations.Save(annotationsPath); err != nil { + return fmt.Errorf("saving annotations: %w", err) + } + fmt.Printf("Added %d since_version entries to annotations.yml\n", cliAdded) + } + + if sdkAdded > 0 { + if err := overrides.Save(overridesPath); err != nil { + return fmt.Errorf("saving overrides: %w", err) + } + fmt.Printf("Added %d since_version entries to annotations_openapi_overrides.yml\n", sdkAdded) + } + + // Save the current version as the new previous version + latestVersion := allVersions[len(allVersions)-1] + if err := os.WriteFile(previousVersionFile, []byte(latestVersion+"\n"), 0o644); err != nil { + return fmt.Errorf("writing previous version file: %w", err) + } + + return nil +} + +// readLastProcessedVersion reads the last processed version from the file. +func readLastProcessedVersion(path string) string { + data, err := os.ReadFile(path) + if err != nil { + return "" + } + return strings.TrimSpace(string(data)) +} + +// filterVersionsAfter returns versions that come after the given version. +func filterVersionsAfter(versions []string, after string) []string { + if after == "" { + return versions + } + + afterParsed, err := parseVersion(after) + if err != nil { + return versions + } + + var result []string + for _, v := range versions { + parsed, err := parseVersion(v) + if err != nil { + continue + } + if compareVersions(parsed, afterParsed) > 0 { + result = append(result, v) + } + } + return result +} + +// parseVersion parses a version tag like "v0.228.0" into [0, 228, 0]. +func parseVersion(tag string) ([3]int, error) { + tag = strings.TrimPrefix(tag, "v") + parts := strings.Split(tag, ".") + if len(parts) < 3 { + return [3]int{}, fmt.Errorf("invalid version tag: %s", tag) + } + var v [3]int + for i := range 3 { + n, err := strconv.Atoi(parts[i]) + if err != nil { + return [3]int{}, fmt.Errorf("invalid version component: %s", parts[i]) + } + v[i] = n + } + return v, nil +} + +// compareVersions returns -1 if a < b, 0 if a == b, 1 if a > b. +func compareVersions(a, b [3]int) int { + for i := range 3 { + if a[i] < b[i] { + return -1 + } + if a[i] > b[i] { + return 1 + } + } + return 0 +} + +// getVersionTags returns sorted list of version tags from git (oldest first). +func getVersionTags() ([]string, error) { + cmd := exec.Command("git", "tag", "--list", "v*", "--sort=version:refname") + output, err := cmd.Output() + if err != nil { + return nil, fmt.Errorf("failed to get git tags: %w", err) + } + + var tags []string + for _, line := range strings.Split(string(output), "\n") { + tag := strings.TrimSpace(line) + if tag == "" { + continue + } + v, err := parseVersion(tag) + if err != nil { + continue + } + if compareVersions(v, embeddedSchemaVersion) >= 0 { + tags = append(tags, tag) + } + } + return tags, nil +} + +// getSchemaAtVersion extracts the JSON schema from the embedded file at a given version. +func getSchemaAtVersion(version string) (map[string]any, error) { + cmd := exec.Command("git", "show", version+":bundle/schema/jsonschema.json") + output, err := cmd.Output() + if err != nil { + return nil, fmt.Errorf("failed to get schema at %s: %w", version, err) + } + + var schema map[string]any + if err := json.Unmarshal(output, &schema); err != nil { + return nil, fmt.Errorf("failed to parse schema at %s: %w", version, err) + } + return schema, nil +} + +// walkDefs recursively walks $defs to extract type definitions. +func walkDefs(defs map[string]any, prefix string) map[string][]string { + result := make(map[string][]string) + + for key, value := range defs { + valueMap, ok := value.(map[string]any) + if !ok { + continue + } + + currentPath := prefix + if currentPath != "" { + currentPath += "/" + key + } else { + currentPath = key + } + + props, hasProps := valueMap["properties"].(map[string]any) + _, hasOneOf := valueMap["oneOf"] + _, hasAnyOf := valueMap["anyOf"] + + if hasProps || hasOneOf || hasAnyOf { + if !hasProps { + if oneOf, ok := valueMap["oneOf"].([]any); ok { + for _, variant := range oneOf { + if variantMap, ok := variant.(map[string]any); ok { + if p, ok := variantMap["properties"].(map[string]any); ok { + props = p + break + } + } + } + } + if props == nil { + if anyOf, ok := valueMap["anyOf"].([]any); ok { + for _, variant := range anyOf { + if variantMap, ok := variant.(map[string]any); ok { + if p, ok := variantMap["properties"].(map[string]any); ok { + props = p + break + } + } + } + } + } + } + if props != nil { + var propNames []string + for propName := range props { + propNames = append(propNames, propName) + } + result[currentPath] = propNames + } + } else if _, hasType := valueMap["type"]; hasType { + continue + } else { + nested := walkDefs(valueMap, currentPath) + maps.Copy(result, nested) + } + } + return result +} + +// flattenSchema extracts all field paths from a JSON schema. +func flattenSchema(schema map[string]any) map[string]bool { + fields := make(map[string]bool) + + if defs, ok := schema["$defs"].(map[string]any); ok { + typeDefs := walkDefs(defs, "") + for typePath, propNames := range typeDefs { + for _, propName := range propNames { + fields[typePath+"."+propName] = true + } + } + } + + rootType := "github.com/databricks/cli/bundle/config.Root" + if props, ok := schema["properties"].(map[string]any); ok { + for propName := range props { + fields[rootType+"."+propName] = true + } + } + + return fields +} + +// computeSinceVersions computes when each field was first introduced. +func computeSinceVersions(versions []string) (map[string]map[string]string, error) { + allFields := make(map[string]bool) + sinceVersions := make(map[string]string) + + for _, version := range versions { + schema, err := getSchemaAtVersion(version) + if err != nil { + continue + } + + currentFields := flattenSchema(schema) + for field := range currentFields { + if !allFields[field] { + sinceVersions[field] = version + allFields[field] = true + } + } + } + + result := make(map[string]map[string]string) + for fieldPath, version := range sinceVersions { + lastDot := strings.LastIndex(fieldPath, ".") + if lastDot == -1 { + continue + } + typePath := fieldPath[:lastDot] + propName := fieldPath[lastDot+1:] + + if result[typePath] == nil { + result[typePath] = make(map[string]string) + } + result[typePath][propName] = version + } + + return result, nil +} + +// isBundleCliPath checks if a type path is from the CLI package (not SDK). +func isBundleCliPath(path string) bool { + return strings.HasPrefix(path, "github.com/databricks/cli/") +} + +// filterToCurrentFields removes fields that don't exist in currentFields. +func filterToCurrentFields(sinceVersions map[string]map[string]string, currentFields map[string]bool) map[string]map[string]string { + result := make(map[string]map[string]string) + for typePath, props := range sinceVersions { + for propName, version := range props { + fieldPath := typePath + "." + propName + if currentFields[fieldPath] { + if result[typePath] == nil { + result[typePath] = make(map[string]string) + } + result[typePath][propName] = version + } + } + } + return result +} + +// updateAnnotationsWithVersions updates annotations with since_version. +func updateAnnotationsWithVersions( + annotations annotation.File, + sinceVersions map[string]map[string]string, + skipIfIn annotation.File, + cliTypes bool, +) int { + added := 0 + for typePath, props := range sinceVersions { + isCli := isBundleCliPath(typePath) + if cliTypes != isCli { + continue + } + + for propName, version := range props { + if skipIfIn[typePath] != nil { + if _, exists := skipIfIn[typePath][propName]; exists { + continue + } + } + + if annotations[typePath] == nil { + annotations[typePath] = make(map[string]annotation.Descriptor) + } + + propData := annotations[typePath][propName] + if propData.SinceVersion == "" { + propData.SinceVersion = version + annotations[typePath][propName] = propData + added++ + } + } + } + + return added +} diff --git a/bundle/internal/schema/since_versions_test.go b/bundle/internal/schema/since_versions_test.go new file mode 100644 index 0000000000..a1c4477fa9 --- /dev/null +++ b/bundle/internal/schema/since_versions_test.go @@ -0,0 +1,302 @@ +package main + +import ( + "os" + "path/filepath" + "testing" + + "github.com/databricks/cli/bundle/internal/annotation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestParseVersion(t *testing.T) { + tests := []struct { + input string + expected [3]int + wantErr bool + }{ + {"v0.228.0", [3]int{0, 228, 0}, false}, + {"v1.2.3", [3]int{1, 2, 3}, false}, + {"0.228.0", [3]int{0, 228, 0}, false}, + {"v0.228", [3]int{}, true}, + {"invalid", [3]int{}, true}, + } + + for _, tt := range tests { + t.Run(tt.input, func(t *testing.T) { + result, err := parseVersion(tt.input) + if tt.wantErr { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, tt.expected, result) + } + }) + } +} + +func TestCompareVersions(t *testing.T) { + tests := []struct { + a, b [3]int + expected int + }{ + {[3]int{0, 228, 0}, [3]int{0, 228, 0}, 0}, + {[3]int{0, 228, 0}, [3]int{0, 229, 0}, -1}, + {[3]int{0, 229, 0}, [3]int{0, 228, 0}, 1}, + {[3]int{1, 0, 0}, [3]int{0, 999, 999}, 1}, + {[3]int{0, 228, 1}, [3]int{0, 228, 0}, 1}, + } + + for _, tt := range tests { + result := compareVersions(tt.a, tt.b) + assert.Equal(t, tt.expected, result) + } +} + +func TestFilterVersionsAfter(t *testing.T) { + versions := []string{"v0.228.0", "v0.229.0", "0.229.1", "v0.230.0", "v1.0.0"} + + t.Run("empty after returns all", func(t *testing.T) { + result := filterVersionsAfter(versions, "") + assert.Equal(t, versions, result) + }) + + t.Run("filters after v0.228.0", func(t *testing.T) { + result := filterVersionsAfter(versions, "v0.228.0") + assert.Equal(t, []string{"v0.229.0", "0.229.1", "v0.230.0", "v1.0.0"}, result) + }) + + t.Run("filters after v0.229.1", func(t *testing.T) { + result := filterVersionsAfter(versions, "v0.229.1") + assert.Equal(t, []string{"v0.230.0", "v1.0.0"}, result) + }) + + t.Run("filters after v0.230.0", func(t *testing.T) { + result := filterVersionsAfter(versions, "v0.230.0") + assert.Equal(t, []string{"v1.0.0"}, result) + }) + + t.Run("returns empty for last version", func(t *testing.T) { + result := filterVersionsAfter(versions, "v1.0.0") + assert.Empty(t, result) + }) + + t.Run("returns empty for future version", func(t *testing.T) { + result := filterVersionsAfter(versions, "v1.0.1") + assert.Empty(t, result) + }) +} + +func TestReadLastProcessedVersion(t *testing.T) { + dir := t.TempDir() + + t.Run("file exists", func(t *testing.T) { + path := filepath.Join(dir, "version1") + err := os.WriteFile(path, []byte("v0.281.0\n"), 0o644) + require.NoError(t, err) + + result := readLastProcessedVersion(path) + assert.Equal(t, "v0.281.0", result) + }) + + t.Run("file does not exist", func(t *testing.T) { + path := filepath.Join(dir, "nonexistent") + result := readLastProcessedVersion(path) + assert.Equal(t, "", result) + }) + + t.Run("file with whitespace", func(t *testing.T) { + path := filepath.Join(dir, "version2") + err := os.WriteFile(path, []byte(" v0.280.0 \n"), 0o644) + require.NoError(t, err) + + result := readLastProcessedVersion(path) + assert.Equal(t, "v0.280.0", result) + }) +} + +func TestIsBundleCliPath(t *testing.T) { + tests := []struct { + path string + expected bool + }{ + {"github.com/databricks/cli/bundle/config.Root", true}, + {"github.com/databricks/cli/libs/dyn.Value", true}, + {"github.com/databricks/databricks-sdk-go/service/jobs.Job", false}, + {"github.com/other/package.Type", false}, + } + + for _, tt := range tests { + t.Run(tt.path, func(t *testing.T) { + result := isBundleCliPath(tt.path) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestFlattenSchema(t *testing.T) { + schema := map[string]any{ + "$defs": map[string]any{ + "github.com": map[string]any{ + "databricks": map[string]any{ + "cli": map[string]any{ + "bundle": map[string]any{ + "config.Bundle": map[string]any{ + "properties": map[string]any{ + "name": map[string]any{"type": "string"}, + "cluster_id": map[string]any{"type": "string"}, + }, + }, + }, + }, + }, + }, + }, + "properties": map[string]any{ + "bundle": map[string]any{}, + "resources": map[string]any{}, + }, + } + + fields := flattenSchema(schema) + + assert.True(t, fields["github.com/databricks/cli/bundle/config.Bundle.name"]) + assert.True(t, fields["github.com/databricks/cli/bundle/config.Bundle.cluster_id"]) + assert.True(t, fields["github.com/databricks/cli/bundle/config.Root.bundle"]) + assert.True(t, fields["github.com/databricks/cli/bundle/config.Root.resources"]) +} + +func TestFilterToCurrentFields(t *testing.T) { + sinceVersions := map[string]map[string]string{ + "type.A": { + "field1": "v0.228.0", + "field2": "v0.229.0", + }, + "type.B": { + "field3": "v0.230.0", + }, + } + + currentFields := map[string]bool{ + "type.A.field1": true, + "type.B.field3": true, + // field2 is not in current schema (was removed) + } + + result := filterToCurrentFields(sinceVersions, currentFields) + + assert.Equal(t, "v0.228.0", result["type.A"]["field1"]) + assert.NotContains(t, result["type.A"], "field2") + assert.Equal(t, "v0.230.0", result["type.B"]["field3"]) +} + +func TestUpdateAnnotationsWithVersions(t *testing.T) { + t.Run("adds new since_version", func(t *testing.T) { + annotations := annotation.File{ + "github.com/databricks/cli/bundle/config.Bundle": { + "name": annotation.Descriptor{Description: "The bundle name"}, + }, + } + + sinceVersions := map[string]map[string]string{ + "github.com/databricks/cli/bundle/config.Bundle": { + "name": "v0.228.0", + "cluster_id": "v0.229.0", + }, + } + + added := updateAnnotationsWithVersions(annotations, sinceVersions, nil, true) + + assert.Equal(t, 2, added) + assert.Equal(t, "v0.228.0", annotations["github.com/databricks/cli/bundle/config.Bundle"]["name"].SinceVersion) + assert.Equal(t, "v0.229.0", annotations["github.com/databricks/cli/bundle/config.Bundle"]["cluster_id"].SinceVersion) + }) + + t.Run("skips existing since_version", func(t *testing.T) { + annotations := annotation.File{ + "github.com/databricks/cli/bundle/config.Bundle": { + "name": annotation.Descriptor{ + Description: "The bundle name", + SinceVersion: "v0.200.0", // Already set + }, + }, + } + + sinceVersions := map[string]map[string]string{ + "github.com/databricks/cli/bundle/config.Bundle": { + "name": "v0.228.0", + }, + } + + added := updateAnnotationsWithVersions(annotations, sinceVersions, nil, true) + + assert.Equal(t, 0, added) + // Should keep the original value + assert.Equal(t, "v0.200.0", annotations["github.com/databricks/cli/bundle/config.Bundle"]["name"].SinceVersion) + }) + + t.Run("filters by CLI types", func(t *testing.T) { + annotations := annotation.File{} + + sinceVersions := map[string]map[string]string{ + "github.com/databricks/cli/bundle/config.Bundle": { + "name": "v0.228.0", + }, + "github.com/databricks/databricks-sdk-go/service/jobs.Job": { + "name": "v0.228.0", + }, + } + + // Only CLI types + added := updateAnnotationsWithVersions(annotations, sinceVersions, nil, true) + assert.Equal(t, 1, added) + assert.Contains(t, annotations, "github.com/databricks/cli/bundle/config.Bundle") + assert.NotContains(t, annotations, "github.com/databricks/databricks-sdk-go/service/jobs.Job") + }) + + t.Run("skips if in other file", func(t *testing.T) { + annotations := annotation.File{} + skipIfIn := annotation.File{ + "github.com/databricks/cli/bundle/config.Bundle": { + "name": annotation.Descriptor{}, + }, + } + + sinceVersions := map[string]map[string]string{ + "github.com/databricks/cli/bundle/config.Bundle": { + "name": "v0.228.0", + "cluster_id": "v0.229.0", + }, + } + + added := updateAnnotationsWithVersions(annotations, sinceVersions, skipIfIn, true) + + assert.Equal(t, 1, added) // Only cluster_id added, name skipped + assert.NotContains(t, annotations["github.com/databricks/cli/bundle/config.Bundle"], "name") + assert.Equal(t, "v0.229.0", annotations["github.com/databricks/cli/bundle/config.Bundle"]["cluster_id"].SinceVersion) + }) +} + +func TestWalkDefs(t *testing.T) { + defs := map[string]any{ + "github.com": map[string]any{ + "databricks": map[string]any{ + "cli": map[string]any{ + "bundle": map[string]any{ + "config.Bundle": map[string]any{ + "properties": map[string]any{ + "name": map[string]any{"type": "string"}, + }, + }, + }, + }, + }, + }, + } + + result := walkDefs(defs, "") + + assert.Contains(t, result, "github.com/databricks/cli/bundle/config.Bundle") + assert.Contains(t, result["github.com/databricks/cli/bundle/config.Bundle"], "name") +} diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index 41cb0fa591..d5d29d5c9b 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -65,13 +65,16 @@ "type": "object", "properties": { "custom_description": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.279.0" }, "custom_summary": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.279.0" }, "display_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.279.0" }, "evaluation": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2Evaluation" @@ -80,27 +83,32 @@ "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" }, "parent_path": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.279.0" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.AlertPermission" }, "query_text": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.279.0" }, "run_as": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2RunAs" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2RunAs", + "x-since-version": "v0.279.0" }, "run_as_user_name": { "$ref": "#/$defs/string", "deprecationMessage": "This field is deprecated", + "x-since-version": "v0.279.0", "deprecated": true }, "schedule": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.CronSchedule" }, "warehouse_id": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.279.0" } }, "additionalProperties": false, @@ -124,16 +132,20 @@ "type": "object", "properties": { "group_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.279.0" }, "level": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.AlertPermissionLevel" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.AlertPermissionLevel", + "x-since-version": "v0.279.0" }, "service_principal_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.279.0" }, "user_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.279.0" } }, "additionalProperties": false, @@ -177,7 +189,8 @@ }, "description": { "description": "The description of the app.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.239.0" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", @@ -185,14 +198,16 @@ }, "name": { "description": "The name of the app. The name must contain only lowercase alphanumeric characters and hyphens.\nIt must be unique within the workspace.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.239.0" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.AppPermission" }, "resources": { "description": "Resources for the app.", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/apps.AppResource" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/apps.AppResource", + "x-since-version": "v0.239.0" }, "source_code_path": { "$ref": "#/$defs/string" @@ -219,16 +234,20 @@ "type": "object", "properties": { "group_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.247.0" }, "level": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.AppPermissionLevel" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.AppPermissionLevel", + "x-since-version": "v0.247.0" }, "service_principal_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.247.0" }, "user_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.247.0" } }, "additionalProperties": false, @@ -265,35 +284,43 @@ "properties": { "apply_policy_default_values": { "description": "When set to true, fixed and default values from the policy will be used for fields that are omitted. When set to false, only fixed values from the policy will be applied.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.229.0" }, "autoscale": { "description": "Parameters needed in order to automatically scale clusters up and down based on load.\nNote: autoscaling works best with DB runtime versions 3.0 or later.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AutoScale" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AutoScale", + "x-since-version": "v0.229.0" }, "autotermination_minutes": { "description": "Automatically terminates the cluster after it is inactive for this time in minutes. If not set,\nthis cluster will not be automatically terminated. If specified, the threshold must be between\n10 and 10000 minutes.\nUsers can also set this value to 0 to explicitly disable automatic termination.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.229.0" }, "aws_attributes": { "description": "Attributes related to clusters running on Amazon Web Services.\nIf not specified at cluster creation, a set of default values will be used.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AwsAttributes" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AwsAttributes", + "x-since-version": "v0.229.0" }, "azure_attributes": { "description": "Attributes related to clusters running on Microsoft Azure.\nIf not specified at cluster creation, a set of default values will be used.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AzureAttributes" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AzureAttributes", + "x-since-version": "v0.229.0" }, "cluster_log_conf": { "description": "The configuration for delivering spark logs to a long-term storage destination.\nThree kinds of destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be specified\nfor one cluster. If the conf is given, the logs will be delivered to the destination every\n`5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while\nthe destination of executor logs is `$destination/$clusterId/executor`.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClusterLogConf" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClusterLogConf", + "x-since-version": "v0.229.0" }, "cluster_name": { "description": "Cluster name requested by the user. This doesn't have to be unique.\nIf not specified at creation, the cluster name will be an empty string.\nFor job clusters, the cluster name is automatically set based on the job and job run IDs.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.229.0" }, "custom_tags": { "description": "Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS\ninstances and EBS volumes) with these tags in addition to `default_tags`. Notes:\n\n- Currently, Databricks allows at most 45 custom tags\n\n- Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags", - "$ref": "#/$defs/map/string" + "$ref": "#/$defs/map/string", + "x-since-version": "v0.229.0" }, "data_security_mode": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DataSecurityMode" @@ -303,35 +330,43 @@ }, "driver_instance_pool_id": { "description": "The optional ID of the instance pool for the driver of the cluster belongs.\nThe pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not\nassigned.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.229.0" }, "driver_node_type_id": { "description": "The node type of the Spark driver.\nNote that this field is optional; if unset, the driver node type will be set as the same value\nas `node_type_id` defined above.\n\nThis field, along with node_type_id, should not be set if virtual_cluster_size is set.\nIf both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id and node_type_id take precedence.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.229.0" }, "enable_elastic_disk": { "description": "Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk\nspace when its Spark workers are running low on disk space.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.229.0" }, "enable_local_disk_encryption": { "description": "Whether to enable LUKS on cluster VMs' local disks", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.229.0" }, "gcp_attributes": { "description": "Attributes related to clusters running on Google Cloud Platform.\nIf not specified at cluster creation, a set of default values will be used.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.GcpAttributes" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.GcpAttributes", + "x-since-version": "v0.229.0" }, "init_scripts": { "description": "The configuration for storing init scripts. Any number of destinations can be specified.\nThe scripts are executed sequentially in the order provided.\nIf `cluster_log_conf` is specified, init script logs are sent to `\u003cdestination\u003e/\u003ccluster-ID\u003e/init_scripts`.", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/compute.InitScriptInfo" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/compute.InitScriptInfo", + "x-since-version": "v0.229.0" }, "instance_pool_id": { "description": "The optional ID of the instance pool to which the cluster belongs.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.229.0" }, "is_single_node": { "description": "This field can only be used when `kind = CLASSIC_PREVIEW`.\n\nWhen set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers`", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.237.0" }, "kind": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Kind" @@ -342,53 +377,64 @@ }, "node_type_id": { "description": "This field encodes, through a single value, the resources available to each of\nthe Spark nodes in this cluster. For example, the Spark nodes can be provisioned\nand optimized for memory or compute intensive workloads. A list of available node\ntypes can be retrieved by using the :method:clusters/listNodeTypes API call.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.229.0" }, "num_workers": { "description": "Number of worker nodes that this cluster should have. A cluster has one Spark Driver\nand `num_workers` Executors for a total of `num_workers` + 1 Spark nodes.\n\nNote: When reading the properties of a cluster, this field reflects the desired number\nof workers rather than the actual current number of workers. For instance, if a cluster\nis resized from 5 to 10 workers, this field will immediately be updated to reflect\nthe target size of 10 workers, whereas the workers listed in `spark_info` will gradually\nincrease from 5 to 10 as the new nodes are provisioned.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.229.0" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.ClusterPermission" }, "policy_id": { "description": "The ID of the cluster policy used to create the cluster if applicable.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.229.0" }, "remote_disk_throughput": { "description": "If set, what the configurable throughput (in Mb/s) for the remote disk is. Currently only supported for GCP HYPERDISK_BALANCED disks.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.257.0" }, "runtime_engine": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.RuntimeEngine" }, "single_user_name": { "description": "Single user name if data_security_mode is `SINGLE_USER`", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.229.0" }, "spark_conf": { "description": "An object containing a set of optional, user-specified Spark configuration key-value pairs.\nUsers can also pass in a string of extra JVM options to the driver and the executors via\n`spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.", - "$ref": "#/$defs/map/string" + "$ref": "#/$defs/map/string", + "x-since-version": "v0.229.0" }, "spark_env_vars": { "description": "An object containing a set of optional, user-specified environment variable key-value pairs.\nPlease note that key-value pair of the form (X,Y) will be exported as is (i.e.,\n`export X='Y'`) while launching the driver and workers.\n\nIn order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending\nthem to `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all\ndefault databricks managed environmental variables are included as well.\n\nExample Spark environment variables:\n`{\"SPARK_WORKER_MEMORY\": \"28000m\", \"SPARK_LOCAL_DIRS\": \"/local_disk0\"}` or\n`{\"SPARK_DAEMON_JAVA_OPTS\": \"$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true\"}`", - "$ref": "#/$defs/map/string" + "$ref": "#/$defs/map/string", + "x-since-version": "v0.229.0" }, "spark_version": { "description": "The Spark version of the cluster, e.g. `3.3.x-scala2.11`.\nA list of available Spark versions can be retrieved by using\nthe :method:clusters/sparkVersions API call.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.229.0" }, "ssh_public_keys": { "description": "SSH public key contents that will be added to each Spark node in this cluster. The\ncorresponding private keys can be used to login with the user name `ubuntu` on port `2200`.\nUp to 10 keys can be specified.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.229.0" }, "total_initial_remote_disk_size": { "description": "If set, what the total initial volume size (in GB) of the remote disks should be. Currently only supported for GCP HYPERDISK_BALANCED disks.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.257.0" }, "use_ml_runtime": { "description": "This field can only be used when `kind = CLASSIC_PREVIEW`.\n\n`effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.237.0" }, "workload_type": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.WorkloadType" @@ -409,16 +455,20 @@ "type": "object", "properties": { "group_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.247.0" }, "level": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.ClusterPermissionLevel" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.ClusterPermissionLevel", + "x-since-version": "v0.247.0" }, "service_principal_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.247.0" }, "user_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.247.0" } }, "additionalProperties": false, @@ -463,11 +513,13 @@ }, "dataset_catalog": { "description": "Sets the default catalog for all datasets in this dashboard. When set, this overrides the catalog specified in individual dataset definitions.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.281.0" }, "dataset_schema": { "description": "Sets the default schema for all datasets in this dashboard. When set, this overrides the schema specified in individual dataset definitions.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.281.0" }, "display_name": { "description": "The display name of the dashboard.", @@ -530,16 +582,20 @@ "type": "object", "properties": { "group_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.247.0" }, "level": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.DashboardPermissionLevel" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.DashboardPermissionLevel", + "x-since-version": "v0.247.0" }, "service_principal_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.247.0" }, "user_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.247.0" } }, "additionalProperties": false, @@ -580,11 +636,13 @@ }, "database_instance_name": { "description": "The name of the DatabaseInstance housing the database.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.265.0" }, "database_name": { "description": "The name of the database (in a instance) associated with the catalog.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.265.0" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", @@ -592,7 +650,8 @@ }, "name": { "description": "The name of the catalog in UC.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.265.0" } }, "additionalProperties": false, @@ -616,19 +675,23 @@ "properties": { "capacity": { "description": "The sku of the instance. Valid values are \"CU_1\", \"CU_2\", \"CU_4\", \"CU_8\".", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.265.0" }, "custom_tags": { "description": "Custom tags associated with the instance. This field is only included on create and update responses.", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/database.CustomTag" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/database.CustomTag", + "x-since-version": "v0.273.0" }, "enable_pg_native_login": { "description": "Whether to enable PG native password login on the instance. Defaults to false.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.267.0" }, "enable_readable_secondaries": { "description": "Whether to enable secondaries to serve read-only traffic. Defaults to false.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.265.0" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", @@ -636,30 +699,36 @@ }, "name": { "description": "The name of the instance. This is the unique identifier for the instance.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.265.0" }, "node_count": { "description": "The number of nodes in the instance, composed of 1 primary and 0 or more secondaries. Defaults to\n1 primary and 0 secondaries. This field is input only, see effective_node_count for the output.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.265.0" }, "parent_instance_ref": { "description": "The ref of the parent instance. This is only available if the instance is\nchild instance.\nInput: For specifying the parent instance to create a child instance. Optional.\nOutput: Only populated if provided as input to create a child instance.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.DatabaseInstanceRef" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.DatabaseInstanceRef", + "x-since-version": "v0.265.0" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.DatabaseInstancePermission" }, "retention_window_in_days": { "description": "The retention window for the instance. This is the time window in days\nfor which the historical data is retained. The default value is 7 days.\nValid values are 2 to 35 days.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.265.0" }, "stopped": { "description": "Whether to stop the instance. An input only param, see effective_stopped for the output.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.265.0" }, "usage_policy_id": { "description": "The desired usage policy to associate with the instance.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.273.0" } }, "additionalProperties": false, @@ -679,16 +748,20 @@ "type": "object", "properties": { "group_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.265.0" }, "level": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.DatabaseInstancePermissionLevel" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.DatabaseInstancePermissionLevel", + "x-since-version": "v0.265.0" }, "service_principal_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.265.0" }, "user_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.265.0" } }, "additionalProperties": false, @@ -725,11 +798,13 @@ "properties": { "principal": { "description": "The name of the principal that will be granted privileges", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "privileges": { "description": "The privileges to grant to the specified entity", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -751,34 +826,41 @@ "properties": { "budget_policy_id": { "description": "The id of the user specified budget policy to use for this job.\nIf not specified, a default budget policy may be applied when creating or modifying the job.\nSee `effective_budget_policy_id` for the budget policy used by this workload.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.231.0" }, "continuous": { "description": "An optional continuous property for this job. The continuous property will ensure that there is always one run executing. Only one of `schedule` and `continuous` can be used.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Continuous" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Continuous", + "x-since-version": "v0.228.1" }, "description": { "description": "An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "email_notifications": { "description": "An optional set of email addresses that is notified when runs of this job begin or complete as well as when this job is deleted.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobEmailNotifications" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobEmailNotifications", + "x-since-version": "v0.228.1" }, "environments": { "description": "A list of task execution environment specifications that can be referenced by serverless tasks of this job.\nFor serverless notebook tasks, if the environment_key is not specified, the notebook environment will be used if present. If a jobs environment is specified, it will override the notebook environment.\nFor other serverless tasks, the task environment is required to be specified using environment_key in the task settings.", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.JobEnvironment" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.JobEnvironment", + "x-since-version": "v0.228.1" }, "git_source": { "description": "An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks.\n\nIf `git_source` is set, these tasks retrieve the file from the remote repository by default. However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task.\n\nNote: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.GitSource" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.GitSource", + "x-since-version": "v0.228.1" }, "health": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRules" }, "job_clusters": { "description": "A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings.", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.JobCluster" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.JobCluster", + "x-since-version": "v0.228.1" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", @@ -786,63 +868,76 @@ }, "max_concurrent_runs": { "description": "An optional maximum allowed number of concurrent runs of the job.\nSet this value if you want to be able to execute multiple runs of the same job concurrently.\nThis is useful for example if you trigger your job on a frequent schedule and want to allow consecutive runs to overlap with each other, or if you want to trigger multiple runs which differ by their input parameters.\nThis setting affects only new runs. For example, suppose the job’s concurrency is 4 and there are 4 concurrent active runs. Then setting the concurrency to 3 won’t kill any of the active runs.\nHowever, from then on, new runs are skipped unless there are fewer than 3 active runs.\nThis value cannot exceed 1000. Setting this value to `0` causes all new runs to be skipped.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" }, "name": { "description": "An optional name for the job. The maximum length is 4096 bytes in UTF-8 encoding.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "notification_settings": { "description": "Optional notification settings that are used when sending notifications to each of the `email_notifications` and `webhook_notifications` for this job.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobNotificationSettings" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobNotificationSettings", + "x-since-version": "v0.228.1" }, "parameters": { "description": "Job-level parameter definitions", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.JobParameterDefinition" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.JobParameterDefinition", + "x-since-version": "v0.228.1" }, "performance_target": { "description": "The performance mode on a serverless job. This field determines the level of compute performance or cost-efficiency for the run.\nThe performance target does not apply to tasks that run on Serverless GPU compute.\n\n* `STANDARD`: Enables cost-efficient execution of serverless workloads.\n* `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PerformanceTarget" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PerformanceTarget", + "x-since-version": "v0.241.0" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.JobPermission" }, "queue": { "description": "The queue settings of the job.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.QueueSettings" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.QueueSettings", + "x-since-version": "v0.228.1" }, "run_as": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs" }, "schedule": { "description": "An optional periodic schedule for this job. The default behavior is that the job only runs when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.CronSchedule" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.CronSchedule", + "x-since-version": "v0.228.1" }, "tags": { "description": "A map of tags associated with the job. These are forwarded to the cluster as cluster tags for jobs clusters, and are subject to the same limitations as cluster tags. A maximum of 25 tags can be added to the job.", - "$ref": "#/$defs/map/string" + "$ref": "#/$defs/map/string", + "x-since-version": "v0.228.1" }, "tasks": { "description": "A list of task specifications to be executed by this job.\nIt supports up to 1000 elements in write endpoints (:method:jobs/create, :method:jobs/reset, :method:jobs/update, :method:jobs/submit).\nRead endpoints return only 100 tasks. If more than 100 tasks are available, you can paginate through them using :method:jobs/get. Use the `next_page_token` field at the object root to determine if more results are available.", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Task" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Task", + "x-since-version": "v0.228.1" }, "timeout_seconds": { "description": "An optional timeout applied to each run of this job. A value of `0` means no timeout.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" }, "trigger": { "description": "A configuration to trigger a run when certain conditions are met. The default behavior is that the job runs only when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TriggerSettings" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TriggerSettings", + "x-since-version": "v0.228.1" }, "usage_policy_id": { "description": "The id of the user specified usage policy to use for this job.\nIf not specified, a default usage policy may be applied when creating or modifying the job.\nSee `effective_usage_policy_id` for the usage policy used by this workload.", "$ref": "#/$defs/string", "x-databricks-preview": "PRIVATE", - "doNotSuggest": true + "doNotSuggest": true, + "x-since-version": "v0.265.0" }, "webhook_notifications": { "description": "A collection of system notification IDs to notify when runs of this job begin or complete.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.WebhookNotifications" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.WebhookNotifications", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -860,16 +955,20 @@ "type": "object", "properties": { "group_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.247.0" }, "level": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.JobPermissionLevel" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.JobPermissionLevel", + "x-since-version": "v0.247.0" }, "service_principal_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.247.0" }, "user_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.247.0" } }, "additionalProperties": false, @@ -907,7 +1006,8 @@ "properties": { "prevent_destroy": { "description": "Lifecycle setting to prevent the resource from being destroyed.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.268.0" } }, "additionalProperties": false @@ -925,7 +1025,8 @@ "properties": { "artifact_location": { "description": "Location where all artifacts for the experiment are stored.\nIf not provided, the remote server will select an appropriate default.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", @@ -933,14 +1034,16 @@ }, "name": { "description": "Experiment name.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.MlflowExperimentPermission" }, "tags": { "description": "A collection of tags to set on the experiment. Maximum tag size and number of tags per request\ndepends on the storage backend. All storage backends are guaranteed to support tag keys up\nto 250 bytes in size and tag values up to 5000 bytes in size. All storage backends are also\nguaranteed to support up to 20 tags per request.", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/ml.ExperimentTag" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/ml.ExperimentTag", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -961,16 +1064,20 @@ "type": "object", "properties": { "group_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.247.0" }, "level": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.MlflowExperimentPermissionLevel" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.MlflowExperimentPermissionLevel", + "x-since-version": "v0.247.0" }, "service_principal_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.247.0" }, "user_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.247.0" } }, "additionalProperties": false, @@ -1007,7 +1114,8 @@ "properties": { "description": { "description": "Optional description for registered model.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", @@ -1015,14 +1123,16 @@ }, "name": { "description": "Register models under this name", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.MlflowModelPermission" }, "tags": { "description": "Additional metadata for registered model.", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/ml.ModelTag" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/ml.ModelTag", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -1043,16 +1153,20 @@ "type": "object", "properties": { "group_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.247.0" }, "level": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.MlflowModelPermissionLevel" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.MlflowModelPermissionLevel", + "x-since-version": "v0.247.0" }, "service_principal_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.247.0" }, "user_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.247.0" } }, "additionalProperties": false, @@ -1091,22 +1205,26 @@ "properties": { "ai_gateway": { "description": "The AI Gateway configuration for the serving endpoint. NOTE: External model, provisioned throughput, and pay-per-token endpoints are fully supported; agent endpoints currently only support inference tables.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayConfig" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayConfig", + "x-since-version": "v0.230.0" }, "budget_policy_id": { "description": "The budget policy to be applied to the serving endpoint.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.244.0" }, "config": { "description": "The core config of the serving endpoint.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.EndpointCoreConfigInput" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.EndpointCoreConfigInput", + "x-since-version": "v0.228.1" }, "description": { "$ref": "#/$defs/string" }, "email_notifications": { "description": "Email notification settings.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.EmailNotifications" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.EmailNotifications", + "x-since-version": "v0.264.0" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", @@ -1114,7 +1232,8 @@ }, "name": { "description": "The name of the serving endpoint. This field is required and must be unique across a Databricks workspace.\nAn endpoint name can consist of alphanumeric characters, dashes, and underscores.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.ModelServingEndpointPermission" @@ -1123,15 +1242,18 @@ "description": "Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI Gateway to manage rate limits.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.RateLimit", "deprecationMessage": "This field is deprecated", + "x-since-version": "v0.228.1", "deprecated": true }, "route_optimized": { "description": "Enable route optimization for the serving endpoint.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" }, "tags": { "description": "Tags to be attached to the serving endpoint and automatically propagated to billing logs.", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.EndpointTag" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.EndpointTag", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -1152,16 +1274,20 @@ "type": "object", "properties": { "group_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.247.0" }, "level": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.ModelServingEndpointPermissionLevel" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.ModelServingEndpointPermissionLevel", + "x-since-version": "v0.247.0" }, "service_principal_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.247.0" }, "user_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.247.0" } }, "additionalProperties": false, @@ -1198,19 +1324,23 @@ "properties": { "group_name": { "description": "The name of the group that has the permission set in level.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "level": { "description": "The allowed permission for user, group, service principal defined for this permission.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "service_principal_name": { "description": "The name of the service principal that has the permission set in level.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "user_name": { "description": "The name of the user that has the permission set in level.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -1231,69 +1361,85 @@ "properties": { "allow_duplicate_names": { "description": "If false, deployment will fail if name conflicts with that of another pipeline.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.261.0" }, "budget_policy_id": { "description": "Budget policy of this pipeline.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.230.0" }, "catalog": { "description": "A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, tables in this pipeline are published to a `target` schema inside `catalog` (for example, `catalog`.`target`.`table`). If `target` is not specified, no data is published to Unity Catalog.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "channel": { "description": "DLT Release Channel that specifies which version to use.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "clusters": { "description": "Cluster settings for this pipeline deployment.", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineCluster" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineCluster", + "x-since-version": "v0.228.1" }, "configuration": { "description": "String-String configuration for this pipeline execution.", - "$ref": "#/$defs/map/string" + "$ref": "#/$defs/map/string", + "x-since-version": "v0.228.1" }, "continuous": { "description": "Whether the pipeline is continuous or triggered. This replaces `trigger`.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" }, "development": { "description": "Whether the pipeline is in Development mode. Defaults to false.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" }, "edition": { "description": "Pipeline product edition.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "environment": { "description": "Environment specification for this pipeline used to install dependencies.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PipelinesEnvironment" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PipelinesEnvironment", + "x-since-version": "v0.257.0" }, "event_log": { "description": "Event log configuration for this pipeline", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.EventLogSpec" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.EventLogSpec", + "x-since-version": "v0.246.0" }, "filters": { "description": "Filters on which Pipeline packages to include in the deployed graph.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.Filters" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.Filters", + "x-since-version": "v0.228.1" }, "gateway_definition": { "description": "The definition of a gateway pipeline to support change data capture.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionGatewayPipelineDefinition", "x-databricks-preview": "PRIVATE", - "doNotSuggest": true + "doNotSuggest": true, + "x-since-version": "v0.228.1" }, "id": { "description": "Unique identifier for this pipeline.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "ingestion_definition": { "description": "The configuration for a managed ingestion pipeline. These settings cannot be used with the 'libraries', 'schema', 'target', or 'catalog' settings.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinition" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinition", + "x-since-version": "v0.228.1" }, "libraries": { "description": "Libraries or code needed by this deployment.", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineLibrary" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineLibrary", + "x-since-version": "v0.228.1" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", @@ -1301,52 +1447,62 @@ }, "name": { "description": "Friendly identifier for this pipeline.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "notifications": { "description": "List of notification settings for this pipeline.", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.Notifications" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.Notifications", + "x-since-version": "v0.228.1" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.PipelinePermission" }, "photon": { "description": "Whether Photon is enabled for this pipeline.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" }, "restart_window": { "description": "Restart window of this pipeline.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.RestartWindow", "x-databricks-preview": "PRIVATE", - "doNotSuggest": true + "doNotSuggest": true, + "x-since-version": "v0.234.0" }, "root_path": { "description": "Root path for this pipeline.\nThis is used as the root directory when editing the pipeline in the Databricks user interface and it is\nadded to sys.path when executing Python sources during pipeline execution.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.253.0" }, "run_as": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.RunAs" }, "schema": { "description": "The default schema (database) where tables are read from or published to.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.230.0" }, "serverless": { "description": "Whether serverless compute is enabled for this pipeline.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" }, "storage": { "description": "DBFS root directory for storing checkpoints and tables.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "tags": { "description": "A map of tags associated with the pipeline.\nThese are forwarded to the cluster as cluster tags, and are therefore subject to the same limitations.\nA maximum of 25 tags can be added to the pipeline.", - "$ref": "#/$defs/map/string" + "$ref": "#/$defs/map/string", + "x-since-version": "v0.256.0" }, "target": { "description": "Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field.", "$ref": "#/$defs/string", "deprecationMessage": "This field is deprecated", + "x-since-version": "v0.228.1", "deprecated": true }, "trigger": { @@ -1359,7 +1515,8 @@ "description": "Usage policy of this pipeline.", "$ref": "#/$defs/string", "x-databricks-preview": "PRIVATE", - "doNotSuggest": true + "doNotSuggest": true, + "x-since-version": "v0.276.0" } }, "additionalProperties": false, @@ -1377,16 +1534,20 @@ "type": "object", "properties": { "group_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.247.0" }, "level": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.PipelinePermissionLevel" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.PipelinePermissionLevel", + "x-since-version": "v0.247.0" }, "service_principal_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.247.0" }, "user_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.247.0" } }, "additionalProperties": false, @@ -1424,28 +1585,33 @@ "properties": { "assets_dir": { "description": "[Create:REQ Update:IGN] Field for specifying the absolute path to a custom directory to store data-monitoring\nassets. Normally prepopulated to a default user location via UI and Python APIs.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "baseline_table_name": { "description": "[Create:OPT Update:OPT] Baseline table name.\nBaseline data is used to compute drift from the data in the monitored `table_name`.\nThe baseline table and the monitored table shall have the same schema.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "custom_metrics": { "description": "[Create:OPT Update:OPT] Custom metrics.", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/catalog.MonitorMetric" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/catalog.MonitorMetric", + "x-since-version": "v0.228.1" }, "data_classification_config": { "description": "[Create:OPT Update:OPT] Data classification related config.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorDataClassificationConfig", "x-databricks-preview": "PRIVATE", - "doNotSuggest": true + "doNotSuggest": true, + "x-since-version": "v0.228.1" }, "inference_log": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorInferenceLog" }, "latest_monitor_failure_msg": { "description": "[Create:ERR Update:IGN] The latest error message for a monitor failure.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.264.0" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", @@ -1453,38 +1619,46 @@ }, "notifications": { "description": "[Create:OPT Update:OPT] Field for specifying notification settings.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorNotifications" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorNotifications", + "x-since-version": "v0.228.1" }, "output_schema_name": { "description": "[Create:REQ Update:REQ] Schema where output tables are created. Needs to be in 2-level format {catalog}.{schema}", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "schedule": { "description": "[Create:OPT Update:OPT] The monitor schedule.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorCronSchedule" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorCronSchedule", + "x-since-version": "v0.228.1" }, "skip_builtin_dashboard": { "description": "Whether to skip creating a default dashboard summarizing data quality metrics.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" }, "slicing_exprs": { "description": "[Create:OPT Update:OPT] List of column expressions to slice data with for targeted analysis. The data is grouped by\neach expression independently, resulting in a separate slice for each predicate and its\ncomplements. For example `slicing_exprs=[“col_1”, “col_2 \u003e 10”]` will generate the following\nslices: two slices for `col_2 \u003e 10` (True and False), and one slice per unique value in\n`col1`. For high-cardinality columns, only the top 100 unique values by frequency will\ngenerate slices.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" }, "snapshot": { "description": "Configuration for monitoring snapshot tables.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorSnapshot" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorSnapshot", + "x-since-version": "v0.228.1" }, "table_name": { "$ref": "#/$defs/string" }, "time_series": { "description": "Configuration for monitoring time series tables.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorTimeSeries" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorTimeSeries", + "x-since-version": "v0.228.1" }, "warehouse_id": { "description": "Optional argument to specify the warehouse for dashboard creation. If not specified, the first running\nwarehouse will be used.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -1514,11 +1688,13 @@ }, "catalog_name": { "description": "The name of the catalog where the schema and the registered model reside", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "comment": { "description": "The comment attached to the registered model", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "created_at": { "$ref": "#/$defs/int64" @@ -1541,18 +1717,21 @@ }, "name": { "description": "The name of the registered model", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "owner": { "$ref": "#/$defs/string" }, "schema_name": { "description": "The name of the schema where the registered model resides", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "storage_location": { "description": "The storage location on the cloud under which model version data files are stored", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "updated_at": { "$ref": "#/$defs/int64" @@ -1577,11 +1756,13 @@ "properties": { "catalog_name": { "description": "Name of parent catalog.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "comment": { "description": "User-provided free-form text description.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "grants": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.SchemaGrant" @@ -1592,14 +1773,16 @@ }, "name": { "description": "Name of schema, relative to parent catalog.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "properties": { "$ref": "#/$defs/map/string" }, "storage_root": { "description": "Storage root URL for managed tables within schema.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -1621,10 +1804,12 @@ "type": "object", "properties": { "principal": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.267.0" }, "privileges": { - "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.SchemaGrantPrivilege" + "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.SchemaGrantPrivilege", + "x-since-version": "v0.267.0" } }, "additionalProperties": false, @@ -1672,23 +1857,28 @@ "properties": { "backend_type": { "description": "The backend type the scope will be created with. If not specified, will default to `DATABRICKS`", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/workspace.ScopeBackendType" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/workspace.ScopeBackendType", + "x-since-version": "v0.252.0" }, "keyvault_metadata": { "description": "The metadata for the secret scope if the `backend_type` is `AZURE_KEYVAULT`", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/workspace.AzureKeyVaultSecretScopeMetadata" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/workspace.AzureKeyVaultSecretScopeMetadata", + "x-since-version": "v0.252.0" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", - "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle", + "x-since-version": "v0.268.0" }, "name": { "description": "Scope name requested by the user. Scope names are unique.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.252.0" }, "permissions": { "description": "The permissions to apply to the secret scope. Permissions are managed via secret scope ACLs.", - "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.SecretScopePermission" + "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.SecretScopePermission", + "x-since-version": "v0.252.0" } }, "additionalProperties": false, @@ -1709,19 +1899,23 @@ "properties": { "group_name": { "description": "The name of the group that has the permission set in level. This field translates to a `principal` field in secret scope ACL.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.252.0" }, "level": { "description": "The allowed permission for user, group, service principal defined for this permission.", - "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.SecretScopePermissionLevel" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.SecretScopePermissionLevel", + "x-since-version": "v0.252.0" }, "service_principal_name": { "description": "The application ID of an active service principal. This field translates to a `principal` field in secret scope ACL.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.252.0" }, "user_name": { "description": "The name of the user that has the permission set in level. This field translates to a `principal` field in secret scope ACL.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.252.0" } }, "additionalProperties": false, @@ -1759,19 +1953,23 @@ "properties": { "auto_stop_mins": { "description": "The amount of time in minutes that a SQL warehouse must be idle (i.e., no\nRUNNING queries) before it is automatically stopped.\n\nSupported values:\n- Must be == 0 or \u003e= 10 mins\n- 0 indicates no autostop.\n\nDefaults to 120 mins", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.260.0" }, "channel": { "description": "Channel Details", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.Channel" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.Channel", + "x-since-version": "v0.260.0" }, "cluster_size": { "description": "Size of the clusters allocated for this warehouse.\nIncreasing the size of a spark cluster allows you to run larger queries on\nit. If you want to increase the number of concurrent queries, please tune\nmax_num_clusters.\n\nSupported values:\n- 2X-Small\n- X-Small\n- Small\n- Medium\n- Large\n- X-Large\n- 2X-Large\n- 3X-Large\n- 4X-Large", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.260.0" }, "creator_name": { "description": "warehouse creator name", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.260.0" }, "enable_photon": { "description": "Configures whether the warehouse should use Photon optimized clusters.\n\nDefaults to true.", @@ -1779,12 +1977,14 @@ }, "enable_serverless_compute": { "description": "Configures whether the warehouse should use serverless compute", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.260.0" }, "instance_profile_arn": { "description": "Deprecated. Instance profile used to pass IAM role to the cluster", "$ref": "#/$defs/string", "deprecationMessage": "This field is deprecated", + "x-since-version": "v0.260.0", "deprecated": true }, "lifecycle": { @@ -1793,15 +1993,18 @@ }, "max_num_clusters": { "description": "Maximum number of clusters that the autoscaler will create to handle\nconcurrent queries.\n\nSupported values:\n- Must be \u003e= min_num_clusters\n- Must be \u003c= 40.\n\nDefaults to min_clusters if unset.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.260.0" }, "min_num_clusters": { "description": "Minimum number of available clusters that will be maintained for this SQL\nwarehouse. Increasing this will ensure that a larger number of clusters are\nalways running and therefore may reduce the cold start time for new\nqueries. This is similar to reserved vs. revocable cores in a resource\nmanager.\n\nSupported values:\n- Must be \u003e 0\n- Must be \u003c= min(max_num_clusters, 30)\n\nDefaults to 1", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.260.0" }, "name": { "description": "Logical name for the cluster.\n\nSupported values:\n- Must be unique within an org.\n- Must be less than 100 characters.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.260.0" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.SqlWarehousePermission" @@ -1811,7 +2014,8 @@ }, "tags": { "description": "A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes) associated\nwith this SQL warehouse.\n\nSupported values:\n- Number of tags \u003c 45.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.EndpointTags" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.EndpointTags", + "x-since-version": "v0.260.0" }, "warehouse_type": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.CreateWarehouseRequestWarehouseType" @@ -1831,16 +2035,20 @@ "type": "object", "properties": { "group_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.260.0" }, "level": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.SqlWarehousePermissionLevel" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.SqlWarehousePermissionLevel", + "x-since-version": "v0.260.0" }, "service_principal_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.260.0" }, "user_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.260.0" } }, "additionalProperties": false, @@ -1878,20 +2086,25 @@ "description": "Next field marker: 18", "properties": { "database_instance_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.266.0" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", - "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle", + "x-since-version": "v0.268.0" }, "logical_database_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.266.0" }, "name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.266.0" }, "spec": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.SyncedTableSpec" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.SyncedTableSpec", + "x-since-version": "v0.266.0" } }, "additionalProperties": false, @@ -1912,11 +2125,13 @@ "properties": { "catalog_name": { "description": "The name of the catalog where the schema and the volume are", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.236.0" }, "comment": { "description": "The comment attached to the volume", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.236.0" }, "grants": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.VolumeGrant" @@ -1927,15 +2142,18 @@ }, "name": { "description": "The name of the volume", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.236.0" }, "schema_name": { "description": "The name of the schema where the volume is", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.236.0" }, "storage_location": { "description": "The storage location on the cloud", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.236.0" }, "volume_type": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.VolumeType" @@ -1961,10 +2179,12 @@ "type": "object", "properties": { "principal": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.264.1" }, "privileges": { - "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.VolumeGrantPrivilege" + "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.VolumeGrantPrivilege", + "x-since-version": "v0.264.1" } }, "additionalProperties": false, @@ -2004,51 +2224,63 @@ "properties": { "alert": { "description": "The name of the alert for which to retrieve an ID.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "cluster": { "description": "The name of the cluster for which to retrieve an ID.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "cluster_policy": { "description": "The name of the cluster_policy for which to retrieve an ID.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "dashboard": { "description": "The name of the dashboard for which to retrieve an ID.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "instance_pool": { "description": "The name of the instance_pool for which to retrieve an ID.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "job": { "description": "The name of the job for which to retrieve an ID.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "metastore": { "description": "The name of the metastore for which to retrieve an ID.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "notification_destination": { "description": "The name of the notification_destination for which to retrieve an ID.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.236.0" }, "pipeline": { "description": "The name of the pipeline for which to retrieve an ID.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "query": { "description": "The name of the query for which to retrieve an ID.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "service_principal": { "description": "The name of the service_principal for which to retrieve an ID.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "warehouse": { "description": "The name of the warehouse for which to retrieve an ID.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -2066,19 +2298,23 @@ "properties": { "default": { "description": "The default value for the variable.", - "$ref": "#/$defs/interface" + "$ref": "#/$defs/interface", + "x-since-version": "v0.228.1" }, "description": { "description": "The description of the variable.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "lookup": { "description": "The name of the alert, cluster_policy, cluster, dashboard, instance_pool, job, metastore, pipeline, query, service_principal, or warehouse object for which to retrieve an ID.", - "$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.Lookup" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.Lookup", + "x-since-version": "v0.228.1" }, "type": { "description": "The type of the variable.", - "$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.VariableType" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.VariableType", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -2092,20 +2328,24 @@ "properties": { "default": { "description": "The default value for the variable.", - "$ref": "#/$defs/interface" + "$ref": "#/$defs/interface", + "x-since-version": "v0.228.1" }, "description": { "description": "The description of the variable", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "lookup": { "description": "The name of the alert, cluster_policy, cluster, dashboard, instance_pool, job, metastore, pipeline, query, service_principal, or warehouse object for which to retrieve an ID.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.Lookup", - "markdownDescription": "The name of the `alert`, `cluster_policy`, `cluster`, `dashboard`, `instance_pool`, `job`, `metastore`, `pipeline`, `query`, `service_principal`, or `warehouse` object for which to retrieve an ID." + "markdownDescription": "The name of the `alert`, `cluster_policy`, `cluster`, `dashboard`, `instance_pool`, `job`, `metastore`, `pipeline`, `query`, `service_principal`, or `warehouse` object for which to retrieve an ID.", + "x-since-version": "v0.228.1" }, "type": { "description": "The type of the variable.", - "$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.VariableType" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.VariableType", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -2122,28 +2362,34 @@ "properties": { "build": { "description": "An optional set of build commands to run locally before deployment.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "dynamic_version": { "description": "Whether to patch the wheel version dynamically based on the timestamp of the whl file. If this is set to `true`, new code can be deployed without having to update the version in `setup.py` or `pyproject.toml`. This setting is only valid when `type` is set to `whl`. See [\\_](/dev-tools/bundles/settings.md#bundle-syntax-mappings-artifacts).", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.245.0" }, "executable": { "description": "The executable type. Valid values are `bash`, `sh`, and `cmd`.", - "$ref": "#/$defs/github.com/databricks/cli/libs/exec.ExecutableType" + "$ref": "#/$defs/github.com/databricks/cli/libs/exec.ExecutableType", + "x-since-version": "v0.228.1" }, "files": { "description": "The relative or absolute path to the built artifact files.", - "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config.ArtifactFile" + "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config.ArtifactFile", + "x-since-version": "v0.228.1" }, "path": { "description": "The local path of the directory for the artifact.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "type": { "description": "Required if the artifact is a Python wheel. The type of the artifact. Valid values are `whl` and `jar`.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.ArtifactType", - "markdownDescription": "Required if the artifact is a Python wheel. The type of the artifact. Valid values are `whl` and `jar`." + "markdownDescription": "Required if the artifact is a Python wheel. The type of the artifact. Valid values are `whl` and `jar`.", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -2161,7 +2407,8 @@ "properties": { "source": { "description": "Required. The artifact source file.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -2186,34 +2433,41 @@ "cluster_id": { "description": "The ID of a cluster to use to run the bundle.", "$ref": "#/$defs/string", - "markdownDescription": "The ID of a cluster to use to run the bundle. See [cluster_id](https://docs.databricks.com/dev-tools/bundles/settings.html#cluster_id)." + "markdownDescription": "The ID of a cluster to use to run the bundle. See [cluster_id](https://docs.databricks.com/dev-tools/bundles/settings.html#cluster_id).", + "x-since-version": "v0.229.0" }, "compute_id": { "description": "Deprecated. The ID of the compute to use to run the bundle.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "databricks_cli_version": { "description": "The Databricks CLI version to use for the bundle.", "$ref": "#/$defs/string", - "markdownDescription": "The Databricks CLI version to use for the bundle. See [databricks_cli_version](https://docs.databricks.com/dev-tools/bundles/settings.html#databricks_cli_version)." + "markdownDescription": "The Databricks CLI version to use for the bundle. See [databricks_cli_version](https://docs.databricks.com/dev-tools/bundles/settings.html#databricks_cli_version).", + "x-since-version": "v0.228.1" }, "deployment": { "description": "The definition of the bundle deployment", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Deployment", - "markdownDescription": "The definition of the bundle deployment. For supported attributes see [link](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html)." + "markdownDescription": "The definition of the bundle deployment. For supported attributes see [link](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html).", + "x-since-version": "v0.228.1" }, "git": { "description": "The Git version control details that are associated with your bundle.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Git", - "markdownDescription": "The Git version control details that are associated with your bundle. For supported attributes see [git](https://docs.databricks.com/dev-tools/bundles/settings.html#git)." + "markdownDescription": "The Git version control details that are associated with your bundle. For supported attributes see [git](https://docs.databricks.com/dev-tools/bundles/settings.html#git).", + "x-since-version": "v0.228.1" }, "name": { "description": "The name of the bundle.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "uuid": { "description": "Reserved. A Universally Unique Identifier (UUID) for the bundle that uniquely identifies the bundle in internal Databricks systems. This is generated when a bundle project is initialized using a Databricks template (using the `databricks bundle init` command).", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.236.0" } }, "additionalProperties": false, @@ -2237,11 +2491,13 @@ "properties": { "fail_on_active_runs": { "description": "Whether to fail on active runs. If this is set to true a deployment that is running can be interrupted.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" }, "lock": { "description": "The deployment lock attributes.", - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Lock" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Lock", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -2261,31 +2517,38 @@ "description": "The PyDABs configuration.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.PyDABs", "deprecationMessage": "Deprecated: please use python instead", + "x-since-version": "v0.228.1", "deprecated": true }, "python": { "description": "Configures loading of Python code defined with 'databricks-bundles' package.", - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Python" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Python", + "x-since-version": "v0.238.0" }, "python_wheel_wrapper": { "description": "Whether to use a Python wheel wrapper.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" }, "scripts": { "description": "The commands to run.", - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Command" + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Command", + "x-since-version": "v0.228.1" }, "skip_artifact_cleanup": { "description": "Determines whether to skip cleaning up the .internal folder", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.254.0" }, "skip_name_prefix_for_schema": { "description": "Skip adding the prefix that is either set in `presets.name_prefix` or computed when `mode: development`\nis set, to the names of UC schemas defined in the bundle.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.255.0" }, "use_legacy_run_as": { "description": "Whether to use the legacy run_as behavior.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -2304,12 +2567,14 @@ "branch": { "description": "The Git branch name.", "$ref": "#/$defs/string", - "markdownDescription": "The Git branch name. See [git](https://docs.databricks.com/dev-tools/bundles/settings.html#git)." + "markdownDescription": "The Git branch name. See [git](https://docs.databricks.com/dev-tools/bundles/settings.html#git).", + "x-since-version": "v0.228.1" }, "origin_url": { "description": "The origin URL of the repository.", "$ref": "#/$defs/string", - "markdownDescription": "The origin URL of the repository. See [git](https://docs.databricks.com/dev-tools/bundles/settings.html#git)." + "markdownDescription": "The origin URL of the repository. See [git](https://docs.databricks.com/dev-tools/bundles/settings.html#git).", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -2327,11 +2592,13 @@ "properties": { "enabled": { "description": "Whether this lock is enabled.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" }, "force": { "description": "Whether to force this lock if it is enabled.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -2352,31 +2619,38 @@ "properties": { "artifacts_dynamic_version": { "description": "Whether to enable dynamic_version on all artifacts.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.256.0" }, "jobs_max_concurrent_runs": { "description": "The maximum concurrent runs for a job.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" }, "name_prefix": { "description": "The prefix for job runs of the bundle.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "pipelines_development": { "description": "Whether pipeline deployments should be locked in development mode.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" }, "source_linked_deployment": { "description": "Whether to link the deployment to the bundle source.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.236.0" }, "tags": { "description": "The tags for the bundle deployment.", - "$ref": "#/$defs/map/string" + "$ref": "#/$defs/map/string", + "x-since-version": "v0.228.1" }, "trigger_pause_status": { "description": "A pause status to apply to all job triggers and schedules. Valid values are PAUSED or UNPAUSED.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -2394,7 +2668,8 @@ "properties": { "enabled": { "description": "Whether or not PyDABs (Private Preview) is enabled", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -2412,15 +2687,18 @@ "properties": { "mutators": { "description": "Mutators contains a list of fully qualified function paths to mutator functions.\n\nExample: [\"my_project.mutators:add_default_cluster\"]", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.238.0" }, "resources": { "description": "Resources contains a list of fully qualified function paths to load resources\ndefined in Python code.\n\nExample: [\"my_project.resources:load_resources\"]", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.238.0" }, "venv_path": { "description": "VEnvPath is path to the virtual environment.\n\nIf enabled, Python code will execute within this environment. If disabled,\nit defaults to using the Python interpreter available in the current shell.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.238.0" } }, "additionalProperties": false @@ -2437,86 +2715,104 @@ "type": "object", "properties": { "alerts": { - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Alert" + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Alert", + "x-since-version": "v0.279.0" }, "apps": { "description": "The app resource defines a Databricks app.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.App", - "markdownDescription": "The app resource defines a [Databricks app](https://docs.databricks.com/api/workspace/apps/create). For information about Databricks Apps, see [link](https://docs.databricks.com/dev-tools/databricks-apps/index.html)." + "markdownDescription": "The app resource defines a [Databricks app](https://docs.databricks.com/api/workspace/apps/create). For information about Databricks Apps, see [link](https://docs.databricks.com/dev-tools/databricks-apps/index.html).", + "x-since-version": "v0.239.0" }, "clusters": { "description": "The cluster definitions for the bundle, where each key is the name of a cluster.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Cluster", - "markdownDescription": "The cluster definitions for the bundle, where each key is the name of a cluster. See [clusters](https://docs.databricks.com/dev-tools/bundles/resources.html#clusters)." + "markdownDescription": "The cluster definitions for the bundle, where each key is the name of a cluster. See [clusters](https://docs.databricks.com/dev-tools/bundles/resources.html#clusters).", + "x-since-version": "v0.229.0" }, "dashboards": { "description": "The dashboard definitions for the bundle, where each key is the name of the dashboard.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Dashboard", - "markdownDescription": "The dashboard definitions for the bundle, where each key is the name of the dashboard. See [dashboards](https://docs.databricks.com/dev-tools/bundles/resources.html#dashboards)." + "markdownDescription": "The dashboard definitions for the bundle, where each key is the name of the dashboard. See [dashboards](https://docs.databricks.com/dev-tools/bundles/resources.html#dashboards).", + "x-since-version": "v0.232.0" }, "database_catalogs": { - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.DatabaseCatalog" + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.DatabaseCatalog", + "x-since-version": "v0.265.0" }, "database_instances": { - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.DatabaseInstance" + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.DatabaseInstance", + "x-since-version": "v0.265.0" }, "experiments": { "description": "The experiment definitions for the bundle, where each key is the name of the experiment.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.MlflowExperiment", - "markdownDescription": "The experiment definitions for the bundle, where each key is the name of the experiment. See [experiments](https://docs.databricks.com/dev-tools/bundles/resources.html#experiments)." + "markdownDescription": "The experiment definitions for the bundle, where each key is the name of the experiment. See [experiments](https://docs.databricks.com/dev-tools/bundles/resources.html#experiments).", + "x-since-version": "v0.228.1" }, "jobs": { "description": "The job definitions for the bundle, where each key is the name of the job.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Job", - "markdownDescription": "The job definitions for the bundle, where each key is the name of the job. See [jobs](https://docs.databricks.com/dev-tools/bundles/resources.html#jobs)." + "markdownDescription": "The job definitions for the bundle, where each key is the name of the job. See [jobs](https://docs.databricks.com/dev-tools/bundles/resources.html#jobs).", + "x-since-version": "v0.228.1" }, "model_serving_endpoints": { "description": "The model serving endpoint definitions for the bundle, where each key is the name of the model serving endpoint.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint", - "markdownDescription": "The model serving endpoint definitions for the bundle, where each key is the name of the model serving endpoint. See [model_serving_endpoints](https://docs.databricks.com/dev-tools/bundles/resources.html#model_serving_endpoints)." + "markdownDescription": "The model serving endpoint definitions for the bundle, where each key is the name of the model serving endpoint. See [model_serving_endpoints](https://docs.databricks.com/dev-tools/bundles/resources.html#model_serving_endpoints).", + "x-since-version": "v0.228.1" }, "models": { "description": "The model definitions for the bundle, where each key is the name of the model.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.MlflowModel", - "markdownDescription": "The model definitions for the bundle, where each key is the name of the model. See [models](https://docs.databricks.com/dev-tools/bundles/resources.html#models)." + "markdownDescription": "The model definitions for the bundle, where each key is the name of the model. See [models](https://docs.databricks.com/dev-tools/bundles/resources.html#models).", + "x-since-version": "v0.228.1" }, "pipelines": { "description": "The pipeline definitions for the bundle, where each key is the name of the pipeline.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Pipeline", - "markdownDescription": "The pipeline definitions for the bundle, where each key is the name of the pipeline. See [pipelines](https://docs.databricks.com/dev-tools/bundles/resources.html#pipelines)." + "markdownDescription": "The pipeline definitions for the bundle, where each key is the name of the pipeline. See [pipelines](https://docs.databricks.com/dev-tools/bundles/resources.html#pipelines).", + "x-since-version": "v0.228.1" }, "quality_monitors": { "description": "The quality monitor definitions for the bundle, where each key is the name of the quality monitor.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.QualityMonitor", - "markdownDescription": "The quality monitor definitions for the bundle, where each key is the name of the quality monitor. See [quality_monitors](https://docs.databricks.com/dev-tools/bundles/resources.html#quality_monitors)." + "markdownDescription": "The quality monitor definitions for the bundle, where each key is the name of the quality monitor. See [quality_monitors](https://docs.databricks.com/dev-tools/bundles/resources.html#quality_monitors).", + "x-since-version": "v0.228.1" }, "registered_models": { "description": "The registered model definitions for the bundle, where each key is the name of the Unity Catalog registered model.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.RegisteredModel", - "markdownDescription": "The registered model definitions for the bundle, where each key is the name of the Unity Catalog registered model. See [registered_models](https://docs.databricks.com/dev-tools/bundles/resources.html#registered_models)" + "markdownDescription": "The registered model definitions for the bundle, where each key is the name of the Unity Catalog registered model. See [registered_models](https://docs.databricks.com/dev-tools/bundles/resources.html#registered_models)", + "x-since-version": "v0.228.1" }, "schemas": { "description": "The schema definitions for the bundle, where each key is the name of the schema.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Schema", - "markdownDescription": "The schema definitions for the bundle, where each key is the name of the schema. See [schemas](https://docs.databricks.com/dev-tools/bundles/resources.html#schemas)." + "markdownDescription": "The schema definitions for the bundle, where each key is the name of the schema. See [schemas](https://docs.databricks.com/dev-tools/bundles/resources.html#schemas).", + "x-since-version": "v0.228.1" }, "secret_scopes": { "description": "The secret scope definitions for the bundle, where each key is the name of the secret scope.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.SecretScope", - "markdownDescription": "The secret scope definitions for the bundle, where each key is the name of the secret scope. See [secret_scopes](https://docs.databricks.com/dev-tools/bundles/resources.html#secret_scopes)." + "markdownDescription": "The secret scope definitions for the bundle, where each key is the name of the secret scope. See [secret_scopes](https://docs.databricks.com/dev-tools/bundles/resources.html#secret_scopes).", + "x-since-version": "v0.252.0" }, "sql_warehouses": { "description": "The SQL warehouse definitions for the bundle, where each key is the name of the warehouse.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.SqlWarehouse", - "markdownDescription": "The SQL warehouse definitions for the bundle, where each key is the name of the warehouse. See [sql_warehouses](https://docs.databricks.com/dev-tools/bundles/resources.html#sql_warehouses)." + "markdownDescription": "The SQL warehouse definitions for the bundle, where each key is the name of the warehouse. See [sql_warehouses](https://docs.databricks.com/dev-tools/bundles/resources.html#sql_warehouses).", + "x-since-version": "v0.260.0" }, "synced_database_tables": { - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.SyncedDatabaseTable" + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.SyncedDatabaseTable", + "x-since-version": "v0.266.0" }, "volumes": { "description": "The volume definitions for the bundle, where each key is the name of the volume.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Volume", - "markdownDescription": "The volume definitions for the bundle, where each key is the name of the volume. See [volumes](https://docs.databricks.com/dev-tools/bundles/resources.html#volumes)." + "markdownDescription": "The volume definitions for the bundle, where each key is the name of the volume. See [volumes](https://docs.databricks.com/dev-tools/bundles/resources.html#volumes).", + "x-since-version": "v0.236.0" } }, "additionalProperties": false @@ -2533,7 +2829,8 @@ "type": "object", "properties": { "content": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.259.0" } }, "additionalProperties": false, @@ -2554,15 +2851,18 @@ "properties": { "exclude": { "description": "A list of files or folders to exclude from the bundle.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" }, "include": { "description": "A list of files or folders to include in the bundle.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" }, "paths": { "description": "The local folder paths, which can be outside the bundle root, to synchronize to the workspace when the bundle is deployed.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -2580,63 +2880,77 @@ "properties": { "artifacts": { "description": "The artifacts to include in the target deployment.", - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Artifact" + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Artifact", + "x-since-version": "v0.228.1" }, "bundle": { "description": "The bundle attributes when deploying to this target.", - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Bundle" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Bundle", + "x-since-version": "v0.228.1" }, "cluster_id": { "description": "The ID of the cluster to use for this target.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.229.0" }, "compute_id": { "description": "Deprecated. The ID of the compute to use for this target.", "$ref": "#/$defs/string", "deprecationMessage": "Deprecated: please use cluster_id instead", + "x-since-version": "v0.228.1", "deprecated": true }, "default": { "description": "Whether this target is the default target.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" }, "git": { "description": "The Git version control settings for the target.", - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Git" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Git", + "x-since-version": "v0.228.1" }, "mode": { "description": "The deployment mode for the target.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Mode", - "markdownDescription": "The deployment mode for the target. Valid values are `development` or `production`. See [link](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html)." + "markdownDescription": "The deployment mode for the target. Valid values are `development` or `production`. See [link](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html).", + "x-since-version": "v0.228.1" }, "permissions": { "description": "The permissions for deploying and running the bundle in the target.", - "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission" + "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission", + "x-since-version": "v0.228.1" }, "presets": { "description": "The deployment presets for the target.", - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Presets" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Presets", + "x-since-version": "v0.228.1" }, "resources": { "description": "The resource definitions for the target.", - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Resources" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Resources", + "x-since-version": "v0.228.1" }, "run_as": { "description": "The identity to use to run the bundle.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs", - "markdownDescription": "The identity to use to run the bundle, see [link](https://docs.databricks.com/dev-tools/bundles/run-as.html)." + "markdownDescription": "The identity to use to run the bundle, see [link](https://docs.databricks.com/dev-tools/bundles/run-as.html).", + "x-since-version": "v0.228.1" }, "sync": { "description": "The local paths to sync to the target workspace when a bundle is run or deployed.", - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Sync" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Sync", + "x-since-version": "v0.228.1" }, "variables": { "description": "The custom variable definitions for the target.", - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/variable.TargetVariable" + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/variable.TargetVariable", + "x-since-version": "v0.228.1" }, "workspace": { "description": "The Databricks workspace for the target.", - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Workspace" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Workspace", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -2654,67 +2968,83 @@ "properties": { "artifact_path": { "description": "The artifact path to use within the workspace for both deployments and workflow runs", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "auth_type": { "description": "The authentication type.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "azure_client_id": { "description": "The Azure client ID", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "azure_environment": { "description": "The Azure environment", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "azure_login_app_id": { "description": "The Azure login app ID", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "azure_tenant_id": { "description": "The Azure tenant ID", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "azure_use_msi": { "description": "Whether to use MSI for Azure", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" }, "azure_workspace_resource_id": { "description": "The Azure workspace resource ID", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "client_id": { "description": "The client ID for the workspace", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "file_path": { "description": "The file path to use within the workspace for both deployments and workflow runs", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "google_service_account": { "description": "The Google service account name", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "host": { "description": "The Databricks workspace host URL", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "profile": { "description": "The Databricks workspace profile name", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "resource_path": { "description": "The workspace resource path", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.230.0" }, "root_path": { "description": "The Databricks workspace root path", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "state_path": { "description": "The workspace state path", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -2740,13 +3070,16 @@ "type": "object", "properties": { "deployment_id": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.239.0" }, "mode": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentMode" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentMode", + "x-since-version": "v0.239.0" }, "source_code_path": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.239.0" } }, "additionalProperties": false @@ -2763,7 +3096,8 @@ "type": "object", "properties": { "source_code_path": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.239.0" } }, "additionalProperties": false @@ -2824,33 +3158,42 @@ "type": "object", "properties": { "database": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceDatabase" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceDatabase", + "x-since-version": "v0.260.0" }, "description": { "description": "Description of the App Resource.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.239.0" }, "genie_space": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceGenieSpace" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceGenieSpace", + "x-since-version": "v0.273.0" }, "job": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceJob" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceJob", + "x-since-version": "v0.239.0" }, "name": { "description": "Name of the App Resource.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.239.0" }, "secret": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceSecret" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceSecret", + "x-since-version": "v0.239.0" }, "serving_endpoint": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceServingEndpoint" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceServingEndpoint", + "x-since-version": "v0.239.0" }, "sql_warehouse": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceSqlWarehouse" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceSqlWarehouse", + "x-since-version": "v0.239.0" }, "uc_securable": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceUcSecurable" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceUcSecurable", + "x-since-version": "v0.253.0" } }, "additionalProperties": false, @@ -2870,13 +3213,16 @@ "type": "object", "properties": { "database_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.260.0" }, "instance_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.260.0" }, "permission": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceDatabaseDatabasePermission" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceDatabaseDatabasePermission", + "x-since-version": "v0.260.0" } }, "additionalProperties": false, @@ -2912,13 +3258,16 @@ "type": "object", "properties": { "name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.273.0" }, "permission": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceGenieSpaceGenieSpacePermission" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceGenieSpaceGenieSpacePermission", + "x-since-version": "v0.273.0" }, "space_id": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.273.0" } }, "additionalProperties": false, @@ -2957,10 +3306,12 @@ "type": "object", "properties": { "id": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.239.0" }, "permission": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceJobJobPermission" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceJobJobPermission", + "x-since-version": "v0.239.0" } }, "additionalProperties": false, @@ -2998,13 +3349,16 @@ "type": "object", "properties": { "key": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.239.0" }, "permission": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceSecretSecretPermission" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceSecretSecretPermission", + "x-since-version": "v0.239.0" }, "scope": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.239.0" } }, "additionalProperties": false, @@ -3043,10 +3397,12 @@ "type": "object", "properties": { "name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.239.0" }, "permission": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceServingEndpointServingEndpointPermission" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceServingEndpointServingEndpointPermission", + "x-since-version": "v0.239.0" } }, "additionalProperties": false, @@ -3083,10 +3439,12 @@ "type": "object", "properties": { "id": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.239.0" }, "permission": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceSqlWarehouseSqlWarehousePermission" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceSqlWarehouseSqlWarehousePermission", + "x-since-version": "v0.239.0" } }, "additionalProperties": false, @@ -3123,13 +3481,16 @@ "type": "object", "properties": { "permission": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceUcSecurableUcSecurablePermission" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceUcSecurableUcSecurablePermission", + "x-since-version": "v0.253.0" }, "securable_full_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.253.0" }, "securable_type": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceUcSecurableUcSecurableType" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceUcSecurableUcSecurableType", + "x-since-version": "v0.253.0" } }, "additionalProperties": false, @@ -3257,15 +3618,18 @@ "properties": { "pause_status": { "description": "Read only field that indicates whether a schedule is paused or not.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorCronSchedulePauseStatus" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorCronSchedulePauseStatus", + "x-since-version": "v0.228.1" }, "quartz_cron_expression": { "description": "The expression that determines when to run the monitor. See [examples](https://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html).", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "timezone_id": { "description": "The timezone id (e.g., ``PST``) in which to evaluate the quartz expression.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -3305,7 +3669,8 @@ "properties": { "enabled": { "description": "Whether to enable data classification.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -3323,7 +3688,8 @@ "properties": { "email_addresses": { "description": "The list of email addresses to send the notification to. A maximum of 5 email addresses is supported.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -3341,31 +3707,38 @@ "properties": { "granularities": { "description": "Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" }, "label_col": { "description": "Column for the label.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "model_id_col": { "description": "Column for the model identifier.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "prediction_col": { "description": "Column for the prediction.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "prediction_proba_col": { "description": "Column for prediction probabilities", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "problem_type": { "description": "Problem type the model aims to solve.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorInferenceLogProblemType" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorInferenceLogProblemType", + "x-since-version": "v0.228.1" }, "timestamp_col": { "description": "Column for the timestamp.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -3406,23 +3779,28 @@ "properties": { "definition": { "description": "Jinja template for a SQL expression that specifies how to compute the metric. See [create metric definition](https://docs.databricks.com/en/lakehouse-monitoring/custom-metrics.html#create-definition).", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "input_columns": { "description": "A list of column names in the input table the metric should be computed for.\nCan use ``\":table\"`` to indicate that the metric needs information from multiple columns.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" }, "name": { "description": "Name of the metric in the output tables.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "output_data_type": { "description": "The output type of the custom metric.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "type": { "description": "Can only be one of ``\"CUSTOM_METRIC_TYPE_AGGREGATE\"``, ``\"CUSTOM_METRIC_TYPE_DERIVED\"``, or ``\"CUSTOM_METRIC_TYPE_DRIFT\"``.\nThe ``\"CUSTOM_METRIC_TYPE_AGGREGATE\"`` and ``\"CUSTOM_METRIC_TYPE_DERIVED\"`` metrics\nare computed on a single table, whereas the ``\"CUSTOM_METRIC_TYPE_DRIFT\"`` compare metrics across\nbaseline and input table, or across the two consecutive time windows.\n- CUSTOM_METRIC_TYPE_AGGREGATE: only depend on the existing columns in your table\n- CUSTOM_METRIC_TYPE_DERIVED: depend on previously computed aggregate metrics\n- CUSTOM_METRIC_TYPE_DRIFT: depend on previously computed aggregate or derived metrics", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorMetricType" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorMetricType", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -3464,13 +3842,15 @@ "properties": { "on_failure": { "description": "Destinations to send notifications on failure/timeout.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorDestination" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorDestination", + "x-since-version": "v0.228.1" }, "on_new_classification_tag_detected": { "description": "Destinations to send notifications on new classification tag detected.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorDestination", "x-databricks-preview": "PRIVATE", - "doNotSuggest": true + "doNotSuggest": true, + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -3502,11 +3882,13 @@ "properties": { "granularities": { "description": "Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" }, "timestamp_col": { "description": "Column for the timestamp.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -3528,23 +3910,29 @@ "properties": { "alias_name": { "description": "Name of the alias, e.g. 'champion' or 'latest_stable'", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.273.0" }, "catalog_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.273.0" }, "id": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.273.0" }, "model_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.273.0" }, "schema_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.273.0" }, "version_num": { "description": "Integer version number of the model version to which this alias points.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.273.0" } }, "additionalProperties": false @@ -3578,7 +3966,8 @@ "properties": { "destination": { "description": "abfss destination, e.g. `abfss://\u003ccontainer-name\u003e@\u003cstorage-account-name\u003e.dfs.core.windows.net/\u003cdirectory-name\u003e`.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -3599,11 +3988,13 @@ "properties": { "max_workers": { "description": "The maximum number of workers to which the cluster can scale up when overloaded.\nNote that `max_workers` must be strictly greater than `min_workers`.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" }, "min_workers": { "description": "The minimum number of workers to which the cluster can scale down when underutilized.\nIt is also the initial number of workers the cluster will have after creation.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -3621,42 +4012,52 @@ "description": "Attributes set during cluster creation which are related to Amazon Web Services.", "properties": { "availability": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AwsAvailability" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AwsAvailability", + "x-since-version": "v0.228.1" }, "ebs_volume_count": { "description": "The number of volumes launched for each instance. Users can choose up to 10 volumes.\nThis feature is only enabled for supported node types. Legacy node types cannot specify\ncustom EBS volumes.\nFor node types with no instance store, at least one EBS volume needs to be specified;\notherwise, cluster creation will fail.\n\nThese EBS volumes will be mounted at `/ebs0`, `/ebs1`, and etc.\nInstance store volumes will be mounted at `/local_disk0`, `/local_disk1`, and etc.\n\nIf EBS volumes are attached, Databricks will configure Spark to use only the EBS volumes for\nscratch storage because heterogenously sized scratch devices can lead to inefficient disk\nutilization. If no EBS volumes are attached, Databricks will configure Spark to use instance\nstore volumes.\n\nPlease note that if EBS volumes are specified, then the Spark configuration `spark.local.dir`\nwill be overridden.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" }, "ebs_volume_iops": { "description": "If using gp3 volumes, what IOPS to use for the disk. If this is not set, the maximum performance of a gp2 volume with the same volume size will be used.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" }, "ebs_volume_size": { "description": "The size of each EBS volume (in GiB) launched for each instance. For general purpose\nSSD, this value must be within the range 100 - 4096. For throughput optimized HDD,\nthis value must be within the range 500 - 4096.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" }, "ebs_volume_throughput": { "description": "If using gp3 volumes, what throughput to use for the disk. If this is not set, the maximum performance of a gp2 volume with the same volume size will be used.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" }, "ebs_volume_type": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.EbsVolumeType" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.EbsVolumeType", + "x-since-version": "v0.228.1" }, "first_on_demand": { "description": "The first `first_on_demand` nodes of the cluster will be placed on on-demand instances.\nIf this value is greater than 0, the cluster driver node in particular will be placed on an\non-demand instance. If this value is greater than or equal to the current cluster size, all\nnodes will be placed on on-demand instances. If this value is less than the current cluster\nsize, `first_on_demand` nodes will be placed on on-demand instances and the remainder will\nbe placed on `availability` instances. Note that this value does not affect\ncluster size and cannot currently be mutated over the lifetime of a cluster.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" }, "instance_profile_arn": { "description": "Nodes for this cluster will only be placed on AWS instances with this instance profile. If\nommitted, nodes will be placed on instances without an IAM instance profile. The instance\nprofile must have previously been added to the Databricks environment by an account\nadministrator.\n\nThis feature may only be available to certain customer plans.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "spot_bid_price_percent": { "description": "The bid price for AWS spot instances, as a percentage of the corresponding instance type's\non-demand price.\nFor example, if this field is set to 50, and the cluster needs a new `r3.xlarge` spot\ninstance, then the bid price is half of the price of\non-demand `r3.xlarge` instances. Similarly, if this field is set to 200, the bid price is twice\nthe price of on-demand `r3.xlarge` instances. If not specified, the default value is 100.\nWhen spot instances are requested for this cluster, only spot instances whose bid price\npercentage matches this field will be considered.\nNote that, for safety, we enforce this field to be no more than 10000.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" }, "zone_id": { "description": "Identifier for the availability zone/datacenter in which the cluster resides.\nThis string will be of a form like \"us-west-2a\". The provided availability\nzone must be in the same region as the Databricks deployment. For example, \"us-west-2a\"\nis not a valid zone id if the Databricks deployment resides in the \"us-east-1\" region.\nThis is an optional field at cluster creation, and if not specified, the zone \"auto\" will be used.\nIf the zone specified is \"auto\", will try to place cluster in a zone with high availability,\nand will retry placement in a different AZ if there is not enough capacity.\n\nThe list of available zones as well as the default value can be found by using the\n`List Zones` method.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -3691,19 +4092,23 @@ "description": "Attributes set during cluster creation which are related to Microsoft Azure.", "properties": { "availability": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AzureAvailability" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AzureAvailability", + "x-since-version": "v0.228.1" }, "first_on_demand": { "description": "The first `first_on_demand` nodes of the cluster will be placed on on-demand instances.\nThis value should be greater than 0, to make sure the cluster driver node is placed on an\non-demand instance. If this value is greater than or equal to the current cluster size, all\nnodes will be placed on on-demand instances. If this value is less than the current cluster\nsize, `first_on_demand` nodes will be placed on on-demand instances and the remainder will\nbe placed on `availability` instances. Note that this value does not affect\ncluster size and cannot currently be mutated over the lifetime of a cluster.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" }, "log_analytics_info": { "description": "Defines values necessary to configure and run Azure Log Analytics agent", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.LogAnalyticsInfo" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.LogAnalyticsInfo", + "x-since-version": "v0.228.1" }, "spot_bid_max_price": { "description": "The max bid price to be used for Azure spot instances.\nThe Max price for the bid cannot be higher than the on-demand price of the instance.\nIf not specified, the default value is -1, which specifies that the instance cannot be evicted\non the basis of price, and only on the basis of availability. Further, the value should \u003e 0 or -1.", - "$ref": "#/$defs/float64" + "$ref": "#/$defs/float64", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -3738,11 +4143,13 @@ "properties": { "jobs": { "description": "With jobs set, the cluster can be used for jobs", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" }, "notebooks": { "description": "With notebooks set, this cluster can be used for notebooks", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -3761,15 +4168,18 @@ "properties": { "dbfs": { "description": "destination needs to be provided. e.g.\n`{ \"dbfs\" : { \"destination\" : \"dbfs:/home/cluster_log\" } }`", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DbfsStorageInfo" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DbfsStorageInfo", + "x-since-version": "v0.228.1" }, "s3": { "description": "destination and either the region or endpoint need to be provided. e.g.\n`{ \"s3\": { \"destination\" : \"s3://cluster_log_bucket/prefix\", \"region\" : \"us-west-2\" } }`\nCluster iam role is used to access s3, please make sure the cluster iam role in\n`instance_profile_arn` has permission to write data to the s3 destination.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.S3StorageInfo" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.S3StorageInfo", + "x-since-version": "v0.228.1" }, "volumes": { "description": "destination needs to be provided, e.g.\n`{ \"volumes\": { \"destination\": \"/Volumes/catalog/schema/volume/cluster_log\" } }`", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.VolumesStorageInfo" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.VolumesStorageInfo", + "x-since-version": "v0.242.0" } }, "additionalProperties": false @@ -3788,126 +4198,158 @@ "properties": { "apply_policy_default_values": { "description": "When set to true, fixed and default values from the policy will be used for fields that are omitted. When set to false, only fixed values from the policy will be applied.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" }, "autoscale": { "description": "Parameters needed in order to automatically scale clusters up and down based on load.\nNote: autoscaling works best with DB runtime versions 3.0 or later.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AutoScale" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AutoScale", + "x-since-version": "v0.228.1" }, "autotermination_minutes": { "description": "Automatically terminates the cluster after it is inactive for this time in minutes. If not set,\nthis cluster will not be automatically terminated. If specified, the threshold must be between\n10 and 10000 minutes.\nUsers can also set this value to 0 to explicitly disable automatic termination.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" }, "aws_attributes": { "description": "Attributes related to clusters running on Amazon Web Services.\nIf not specified at cluster creation, a set of default values will be used.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AwsAttributes" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AwsAttributes", + "x-since-version": "v0.228.1" }, "azure_attributes": { "description": "Attributes related to clusters running on Microsoft Azure.\nIf not specified at cluster creation, a set of default values will be used.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AzureAttributes" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AzureAttributes", + "x-since-version": "v0.228.1" }, "cluster_log_conf": { "description": "The configuration for delivering spark logs to a long-term storage destination.\nThree kinds of destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be specified\nfor one cluster. If the conf is given, the logs will be delivered to the destination every\n`5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while\nthe destination of executor logs is `$destination/$clusterId/executor`.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClusterLogConf" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClusterLogConf", + "x-since-version": "v0.228.1" }, "cluster_name": { "description": "Cluster name requested by the user. This doesn't have to be unique.\nIf not specified at creation, the cluster name will be an empty string.\nFor job clusters, the cluster name is automatically set based on the job and job run IDs.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "custom_tags": { "description": "Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS\ninstances and EBS volumes) with these tags in addition to `default_tags`. Notes:\n\n- Currently, Databricks allows at most 45 custom tags\n\n- Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags", - "$ref": "#/$defs/map/string" + "$ref": "#/$defs/map/string", + "x-since-version": "v0.228.1" }, "data_security_mode": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DataSecurityMode" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DataSecurityMode", + "x-since-version": "v0.228.1" }, "docker_image": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DockerImage" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DockerImage", + "x-since-version": "v0.228.1" }, "driver_instance_pool_id": { "description": "The optional ID of the instance pool for the driver of the cluster belongs.\nThe pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not\nassigned.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "driver_node_type_id": { "description": "The node type of the Spark driver.\nNote that this field is optional; if unset, the driver node type will be set as the same value\nas `node_type_id` defined above.\n\nThis field, along with node_type_id, should not be set if virtual_cluster_size is set.\nIf both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id and node_type_id take precedence.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "enable_elastic_disk": { "description": "Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk\nspace when its Spark workers are running low on disk space.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" }, "enable_local_disk_encryption": { "description": "Whether to enable LUKS on cluster VMs' local disks", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" }, "gcp_attributes": { "description": "Attributes related to clusters running on Google Cloud Platform.\nIf not specified at cluster creation, a set of default values will be used.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.GcpAttributes" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.GcpAttributes", + "x-since-version": "v0.228.1" }, "init_scripts": { "description": "The configuration for storing init scripts. Any number of destinations can be specified.\nThe scripts are executed sequentially in the order provided.\nIf `cluster_log_conf` is specified, init script logs are sent to `\u003cdestination\u003e/\u003ccluster-ID\u003e/init_scripts`.", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/compute.InitScriptInfo" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/compute.InitScriptInfo", + "x-since-version": "v0.228.1" }, "instance_pool_id": { "description": "The optional ID of the instance pool to which the cluster belongs.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "is_single_node": { "description": "This field can only be used when `kind = CLASSIC_PREVIEW`.\n\nWhen set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers`", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.237.0" }, "kind": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Kind" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Kind", + "x-since-version": "v0.237.0" }, "node_type_id": { "description": "This field encodes, through a single value, the resources available to each of\nthe Spark nodes in this cluster. For example, the Spark nodes can be provisioned\nand optimized for memory or compute intensive workloads. A list of available node\ntypes can be retrieved by using the :method:clusters/listNodeTypes API call.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "num_workers": { "description": "Number of worker nodes that this cluster should have. A cluster has one Spark Driver\nand `num_workers` Executors for a total of `num_workers` + 1 Spark nodes.\n\nNote: When reading the properties of a cluster, this field reflects the desired number\nof workers rather than the actual current number of workers. For instance, if a cluster\nis resized from 5 to 10 workers, this field will immediately be updated to reflect\nthe target size of 10 workers, whereas the workers listed in `spark_info` will gradually\nincrease from 5 to 10 as the new nodes are provisioned.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" }, "policy_id": { "description": "The ID of the cluster policy used to create the cluster if applicable.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "remote_disk_throughput": { "description": "If set, what the configurable throughput (in Mb/s) for the remote disk is. Currently only supported for GCP HYPERDISK_BALANCED disks.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.257.0" }, "runtime_engine": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.RuntimeEngine" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.RuntimeEngine", + "x-since-version": "v0.228.1" }, "single_user_name": { "description": "Single user name if data_security_mode is `SINGLE_USER`", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "spark_conf": { "description": "An object containing a set of optional, user-specified Spark configuration key-value pairs.\nUsers can also pass in a string of extra JVM options to the driver and the executors via\n`spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.", - "$ref": "#/$defs/map/string" + "$ref": "#/$defs/map/string", + "x-since-version": "v0.228.1" }, "spark_env_vars": { "description": "An object containing a set of optional, user-specified environment variable key-value pairs.\nPlease note that key-value pair of the form (X,Y) will be exported as is (i.e.,\n`export X='Y'`) while launching the driver and workers.\n\nIn order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending\nthem to `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all\ndefault databricks managed environmental variables are included as well.\n\nExample Spark environment variables:\n`{\"SPARK_WORKER_MEMORY\": \"28000m\", \"SPARK_LOCAL_DIRS\": \"/local_disk0\"}` or\n`{\"SPARK_DAEMON_JAVA_OPTS\": \"$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true\"}`", - "$ref": "#/$defs/map/string" + "$ref": "#/$defs/map/string", + "x-since-version": "v0.228.1" }, "spark_version": { "description": "The Spark version of the cluster, e.g. `3.3.x-scala2.11`.\nA list of available Spark versions can be retrieved by using\nthe :method:clusters/sparkVersions API call.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "ssh_public_keys": { "description": "SSH public key contents that will be added to each Spark node in this cluster. The\ncorresponding private keys can be used to login with the user name `ubuntu` on port `2200`.\nUp to 10 keys can be specified.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" }, "total_initial_remote_disk_size": { "description": "If set, what the total initial volume size (in GB) of the remote disks should be. Currently only supported for GCP HYPERDISK_BALANCED disks.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.257.0" }, "use_ml_runtime": { "description": "This field can only be used when `kind = CLASSIC_PREVIEW`.\n\n`effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.237.0" }, "workload_type": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.WorkloadType" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.WorkloadType", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -3950,7 +4392,8 @@ "properties": { "destination": { "description": "dbfs destination, e.g. `dbfs:/my/path`", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -3971,11 +4414,13 @@ "properties": { "password": { "description": "Password of the user", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "username": { "description": "Name of the user", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -3992,11 +4437,13 @@ "type": "object", "properties": { "basic_auth": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DockerBasicAuth" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DockerBasicAuth", + "x-since-version": "v0.228.1" }, "url": { "description": "URL of the docker image.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -4033,18 +4480,22 @@ "description": "Use `environment_version` instead.", "$ref": "#/$defs/string", "deprecationMessage": "This field is deprecated", + "x-since-version": "v0.228.1", "deprecated": true }, "dependencies": { "description": "List of pip dependencies, as supported by the version of pip in this environment.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" }, "environment_version": { "description": "Required. Environment version used by the environment.\nEach version comes with a specific Python version and a set of Python packages.\nThe version is a string, consisting of an integer.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.252.0" }, "java_dependencies": { - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.271.0" } }, "additionalProperties": false @@ -4062,33 +4513,40 @@ "description": "Attributes set during cluster creation which are related to GCP.", "properties": { "availability": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.GcpAvailability" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.GcpAvailability", + "x-since-version": "v0.228.1" }, "boot_disk_size": { "description": "Boot disk size in GB", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" }, "first_on_demand": { "description": "The first `first_on_demand` nodes of the cluster will be placed on on-demand instances.\nThis value should be greater than 0, to make sure the cluster driver node is placed on an\non-demand instance. If this value is greater than or equal to the current cluster size, all\nnodes will be placed on on-demand instances. If this value is less than the current cluster\nsize, `first_on_demand` nodes will be placed on on-demand instances and the remainder will\nbe placed on `availability` instances. Note that this value does not affect\ncluster size and cannot currently be mutated over the lifetime of a cluster.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.265.0" }, "google_service_account": { "description": "If provided, the cluster will impersonate the google service account when accessing\ngcloud services (like GCS). The google service account\nmust have previously been added to the Databricks environment by an account\nadministrator.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "local_ssd_count": { "description": "If provided, each node (workers and driver) in the cluster will have this number of local SSDs attached.\nEach local SSD is 375GB in size.\nRefer to [GCP documentation](https://cloud.google.com/compute/docs/disks/local-ssd#choose_number_local_ssds)\nfor the supported number of local SSDs for each instance type.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" }, "use_preemptible_executors": { "description": "This field determines whether the spark executors will be scheduled to run on preemptible\nVMs (when set to true) versus standard compute engine VMs (when set to false; default).\nNote: Soon to be deprecated, use the 'availability' field instead.", "$ref": "#/$defs/bool", "deprecationMessage": "This field is deprecated", + "x-since-version": "v0.228.1", "deprecated": true }, "zone_id": { "description": "Identifier for the availability zone in which the cluster resides.\nThis can be one of the following:\n- \"HA\" =\u003e High availability, spread nodes across availability zones for a Databricks deployment region [default].\n- \"AUTO\" =\u003e Databricks picks an availability zone to schedule the cluster on.\n- A GCP availability zone =\u003e Pick One of the available zones for (machine type + region) from\nhttps://cloud.google.com/compute/docs/regions-zones.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -4124,7 +4582,8 @@ "properties": { "destination": { "description": "GCS destination/URI, e.g. `gs://my-bucket/some-prefix`", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -4146,33 +4605,40 @@ "properties": { "abfss": { "description": "Contains the Azure Data Lake Storage destination path", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Adlsgen2Info" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Adlsgen2Info", + "x-since-version": "v0.228.1" }, "dbfs": { "description": "destination needs to be provided. e.g.\n`{ \"dbfs\": { \"destination\" : \"dbfs:/home/cluster_log\" } }`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DbfsStorageInfo", "deprecationMessage": "This field is deprecated", + "x-since-version": "v0.228.1", "deprecated": true }, "file": { "description": "destination needs to be provided, e.g.\n`{ \"file\": { \"destination\": \"file:/my/local/file.sh\" } }`", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.LocalFileInfo" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.LocalFileInfo", + "x-since-version": "v0.228.1" }, "gcs": { "description": "destination needs to be provided, e.g.\n`{ \"gcs\": { \"destination\": \"gs://my-bucket/file.sh\" } }`", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.GcsStorageInfo" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.GcsStorageInfo", + "x-since-version": "v0.228.1" }, "s3": { "description": "destination and either the region or endpoint need to be provided. e.g.\n`{ \\\"s3\\\": { \\\"destination\\\": \\\"s3://cluster_log_bucket/prefix\\\", \\\"region\\\": \\\"us-west-2\\\" } }`\nCluster iam role is used to access s3, please make sure the cluster iam role in\n`instance_profile_arn` has permission to write data to the s3 destination.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.S3StorageInfo" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.S3StorageInfo", + "x-since-version": "v0.228.1" }, "volumes": { "description": "destination needs to be provided. e.g.\n`{ \\\"volumes\\\" : { \\\"destination\\\" : \\\"/Volumes/my-init.sh\\\" } }`", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.VolumesStorageInfo" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.VolumesStorageInfo", + "x-since-version": "v0.228.1" }, "workspace": { "description": "destination needs to be provided, e.g.\n`{ \"workspace\": { \"destination\": \"/cluster-init-scripts/setup-datadog.sh\" } }`", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.WorkspaceStorageInfo" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.WorkspaceStorageInfo", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -4204,33 +4670,40 @@ "properties": { "cran": { "description": "Specification of a CRAN library to be installed as part of the library", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.RCranLibrary" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.RCranLibrary", + "x-since-version": "v0.228.1" }, "egg": { "description": "Deprecated. URI of the egg library to install. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above.", "$ref": "#/$defs/string", "deprecationMessage": "This field is deprecated", + "x-since-version": "v0.228.1", "deprecated": true }, "jar": { "description": "URI of the JAR library to install. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs.\nFor example: `{ \"jar\": \"/Workspace/path/to/library.jar\" }`, `{ \"jar\" : \"/Volumes/path/to/library.jar\" }` or\n`{ \"jar\": \"s3://my-bucket/library.jar\" }`.\nIf S3 is used, please make sure the cluster has read access on the library. You may need to\nlaunch the cluster with an IAM role to access the S3 URI.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "maven": { "description": "Specification of a maven library to be installed. For example:\n`{ \"coordinates\": \"org.jsoup:jsoup:1.7.2\" }`", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.MavenLibrary" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.MavenLibrary", + "x-since-version": "v0.228.1" }, "pypi": { "description": "Specification of a PyPi library to be installed. For example:\n`{ \"package\": \"simplejson\" }`", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.PythonPyPiLibrary" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.PythonPyPiLibrary", + "x-since-version": "v0.228.1" }, "requirements": { "description": "URI of the requirements.txt file to install. Only Workspace paths and Unity Catalog Volumes paths are supported.\nFor example: `{ \"requirements\": \"/Workspace/path/to/requirements.txt\" }` or `{ \"requirements\" : \"/Volumes/path/to/requirements.txt\" }`", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "whl": { "description": "URI of the wheel library to install. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs.\nFor example: `{ \"whl\": \"/Workspace/path/to/library.whl\" }`, `{ \"whl\" : \"/Volumes/path/to/library.whl\" }` or\n`{ \"whl\": \"s3://my-bucket/library.whl\" }`.\nIf S3 is used, please make sure the cluster has read access on the library. You may need to\nlaunch the cluster with an IAM role to access the S3 URI.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -4248,7 +4721,8 @@ "properties": { "destination": { "description": "local file destination, e.g. `file:/my/local/file.sh`", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -4269,11 +4743,13 @@ "properties": { "log_analytics_primary_key": { "description": "The primary key for the Azure Log Analytics agent configuration", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "log_analytics_workspace_id": { "description": "The workspace ID for the Azure Log Analytics agent configuration", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -4291,15 +4767,18 @@ "properties": { "coordinates": { "description": "Gradle-style maven coordinates. For example: \"org.jsoup:jsoup:1.7.2\".", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "exclusions": { "description": "List of dependences to exclude. For example: `[\"slf4j:slf4j\", \"*:hadoop-client\"]`.\n\nMaven dependency exclusions:\nhttps://maven.apache.org/guides/introduction/introduction-to-optional-and-excludes-dependencies.html.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" }, "repo": { "description": "Maven repo to install the Maven package from. If omitted, both Maven Central Repository\nand Spark Packages are searched.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -4320,11 +4799,13 @@ "properties": { "package": { "description": "The name of the pypi package to install. An optional exact version specification is also\nsupported. Examples: \"simplejson\" and \"simplejson==3.8.0\".", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "repo": { "description": "The repository where the package can be found. If not specified, the default pip index is\nused.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -4345,11 +4826,13 @@ "properties": { "package": { "description": "The name of the CRAN package to install.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "repo": { "description": "The repository where the package can be found. If not specified, the default CRAN repo is used.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -4387,31 +4870,38 @@ "properties": { "canned_acl": { "description": "(Optional) Set canned access control list for the logs, e.g. `bucket-owner-full-control`.\nIf `canned_cal` is set, please make sure the cluster iam role has `s3:PutObjectAcl` permission on\nthe destination bucket and prefix. The full list of possible canned acl can be found at\nhttp://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl.\nPlease also note that by default only the object owner gets full controls. If you are using cross account\nrole for writing data, you may want to set `bucket-owner-full-control` to make bucket owner able to\nread the logs.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "destination": { "description": "S3 destination, e.g. `s3://my-bucket/some-prefix` Note that logs will be delivered using\ncluster iam role, please make sure you set cluster iam role and the role has write access to the\ndestination. Please also note that you cannot use AWS keys to deliver logs.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "enable_encryption": { "description": "(Optional) Flag to enable server side encryption, `false` by default.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" }, "encryption_type": { "description": "(Optional) The encryption type, it could be `sse-s3` or `sse-kms`. It will be used only when\nencryption is enabled and the default type is `sse-s3`.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "endpoint": { "description": "S3 endpoint, e.g. `https://s3-us-west-2.amazonaws.com`. Either region or endpoint needs to be set.\nIf both are set, endpoint will be used.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "kms_key": { "description": "(Optional) Kms key which will be used if encryption is enabled and encryption type is set to `sse-kms`.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "region": { "description": "S3 region, e.g. `us-west-2`. Either region or endpoint needs to be set. If both are set,\nendpoint will be used.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -4433,7 +4923,8 @@ "properties": { "destination": { "description": "UC Volumes destination, e.g. `/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh`\nor `dbfs:/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh`", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -4455,7 +4946,8 @@ "properties": { "clients": { "description": "defined what type of clients can use the cluster. E.g. Notebooks, Jobs", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClientsTypes" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClientsTypes", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -4477,7 +4969,8 @@ "properties": { "destination": { "description": "wsfs destination, e.g. `workspace:/cluster-init-scripts/setup-datadog.sh`", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -4513,11 +5006,13 @@ "properties": { "key": { "description": "The key of the custom tag.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.273.0" }, "value": { "description": "The value of the custom tag.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.273.0" } }, "additionalProperties": false @@ -4536,15 +5031,18 @@ "properties": { "branch_time": { "description": "Branch time of the ref database instance.\nFor a parent ref instance, this is the point in time on the parent instance from which the\ninstance was created.\nFor a child ref instance, this is the point in time on the instance from which the child\ninstance was created.\nInput: For specifying the point in time to create a child instance. Optional.\nOutput: Only populated if provided as input to create a child instance.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.265.0" }, "lsn": { "description": "User-specified WAL LSN of the ref database instance.\n\nInput: For specifying the WAL LSN to create a child instance. Optional.\nOutput: Only populated if provided as input to create a child instance.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.265.0" }, "name": { "description": "Name of the ref database instance.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.265.0" } }, "additionalProperties": false @@ -4594,15 +5092,18 @@ "properties": { "budget_policy_id": { "description": "Budget policy to set on the newly created pipeline.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.279.0" }, "storage_catalog": { "description": "This field needs to be specified if the destination catalog is a managed postgres catalog.\n\nUC catalog for the pipeline to store intermediate files (checkpoints, event logs etc).\nThis needs to be a standard catalog where the user has permissions to create Delta tables.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.266.0" }, "storage_schema": { "description": "This field needs to be specified if the destination catalog is a managed postgres catalog.\n\nUC schema for the pipeline to store intermediate files (checkpoints, event logs etc).\nThis needs to be in the standard catalog where the user has permissions to create Delta tables.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.266.0" } }, "additionalProperties": false @@ -4736,31 +5237,38 @@ "properties": { "create_database_objects_if_missing": { "description": "If true, the synced table's logical database and schema resources in PG\nwill be created if they do not already exist.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.266.0" }, "existing_pipeline_id": { "description": "At most one of existing_pipeline_id and new_pipeline_spec should be defined.\n\nIf existing_pipeline_id is defined, the synced table will be bin packed into the existing pipeline\nreferenced. This avoids creating a new pipeline and allows sharing existing compute.\nIn this case, the scheduling_policy of this synced table must match the scheduling policy of the existing pipeline.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.266.0" }, "new_pipeline_spec": { "description": "At most one of existing_pipeline_id and new_pipeline_spec should be defined.\n\nIf new_pipeline_spec is defined, a new pipeline is created for this synced table. The location pointed to is used\nto store intermediate files (checkpoints, event logs etc). The caller must have write permissions to create Delta\ntables in the specified catalog and schema. Again, note this requires write permissions, whereas the source table\nonly requires read permissions.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.NewPipelineSpec" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.NewPipelineSpec", + "x-since-version": "v0.266.0" }, "primary_key_columns": { "description": "Primary Key columns to be used for data insert/update in the destination.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.266.0" }, "scheduling_policy": { "description": "Scheduling policy of the underlying pipeline.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.SyncedTableSchedulingPolicy" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.SyncedTableSchedulingPolicy", + "x-since-version": "v0.266.0" }, "source_table_full_name": { "description": "Three-part (catalog, schema, table) name of the source Delta table.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.266.0" }, "timeseries_key": { "description": "Time series key to deduplicate (tie-break) rows with the same primary key.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.266.0" } }, "additionalProperties": false @@ -4804,19 +5312,23 @@ "properties": { "continuous_update_status": { "description": "Detailed status of a synced table. Shown if the synced table is in the SYNCED_CONTINUOUS_UPDATE\nor the SYNCED_UPDATING_PIPELINE_RESOURCES state.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.SyncedTableContinuousUpdateStatus" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.SyncedTableContinuousUpdateStatus", + "x-since-version": "v0.266.0" }, "failed_status": { "description": "Detailed status of a synced table. Shown if the synced table is in the OFFLINE_FAILED or the\nSYNCED_PIPELINE_FAILED state.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.SyncedTableFailedStatus" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.SyncedTableFailedStatus", + "x-since-version": "v0.266.0" }, "provisioning_status": { "description": "Detailed status of a synced table. Shown if the synced table is in the\nPROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.SyncedTableProvisioningStatus" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.SyncedTableProvisioningStatus", + "x-since-version": "v0.266.0" }, "triggered_update_status": { "description": "Detailed status of a synced table. Shown if the synced table is in the SYNCED_TRIGGERED_UPDATE\nor the SYNCED_NO_PENDING_UPDATE state.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.SyncedTableTriggeredUpdateStatus" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.SyncedTableTriggeredUpdateStatus", + "x-since-version": "v0.266.0" } }, "additionalProperties": false @@ -4863,19 +5375,23 @@ "properties": { "clean_room_name": { "description": "The clean room that the notebook belongs to.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.237.0" }, "etag": { "description": "Checksum to validate the freshness of the notebook resource (i.e. the notebook being run is the latest version).\nIt can be fetched by calling the :method:cleanroomassets/get API.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.237.0" }, "notebook_base_parameters": { "description": "Base parameters to be used for the clean room notebook job.", - "$ref": "#/$defs/map/string" + "$ref": "#/$defs/map/string", + "x-since-version": "v0.237.0" }, "notebook_name": { "description": "Name of the notebook being run.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.237.0" } }, "additionalProperties": false, @@ -4897,15 +5413,18 @@ "properties": { "gpu_node_pool_id": { "description": "IDof the GPU pool to use.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.243.0" }, "gpu_type": { "description": "GPU type.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.243.0" }, "num_gpus": { "description": "Number of GPUs.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.243.0" } }, "additionalProperties": false, @@ -4941,15 +5460,18 @@ "properties": { "left": { "description": "The left operand of the condition task. Can be either a string value or a job state or parameter reference.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "op": { "description": "* `EQUAL_TO`, `NOT_EQUAL` operators perform string comparison of their operands. This means that `“12.0” == “12”` will evaluate to `false`.\n* `GREATER_THAN`, `GREATER_THAN_OR_EQUAL`, `LESS_THAN`, `LESS_THAN_OR_EQUAL` operators perform numeric comparison of their operands. `“12.0” \u003e= “12”` will evaluate to `true`, `“10.0” \u003e= “12”` will evaluate to `false`.\n\nThe boolean comparison to task values can be implemented with operators `EQUAL_TO`, `NOT_EQUAL`. If a task value was set to a boolean value, it will be serialized to `“true”` or `“false”` for the comparison.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ConditionTaskOp" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ConditionTaskOp", + "x-since-version": "v0.228.1" }, "right": { "description": "The right operand of the condition task. Can be either a string value or a job state or parameter reference.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -4992,11 +5514,13 @@ "properties": { "pause_status": { "description": "Indicate whether the continuous execution of the job is paused or not. Defaults to UNPAUSED.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PauseStatus" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PauseStatus", + "x-since-version": "v0.228.1" }, "task_retry_mode": { "description": "Indicate whether the continuous job is applying task level retries or not. Defaults to NEVER.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TaskRetryMode" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TaskRetryMode", + "x-since-version": "v0.267.0" } }, "additionalProperties": false @@ -5014,15 +5538,18 @@ "properties": { "pause_status": { "description": "Indicate whether this schedule is paused or not.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PauseStatus" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PauseStatus", + "x-since-version": "v0.228.1" }, "quartz_cron_expression": { "description": "A Cron expression using Quartz syntax that describes the schedule for a job. See [Cron Trigger](http://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html) for details. This field is required.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "timezone_id": { "description": "A Java timezone ID. The schedule for a job is resolved with respect to this timezone. See [Java TimeZone](https://docs.oracle.com/javase/7/docs/api/java/util/TimeZone.html) for details. This field is required.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -5044,14 +5571,17 @@ "description": "Configures the Lakeview Dashboard job task type.", "properties": { "dashboard_id": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.248.0" }, "subscription": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Subscription" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Subscription", + "x-since-version": "v0.248.0" }, "warehouse_id": { "description": "Optional: The warehouse id to execute the dashboard with for the schedule.\nIf not specified, the default warehouse of the dashboard will be used.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.248.0" } }, "additionalProperties": false @@ -5070,11 +5600,13 @@ "properties": { "connection_resource_name": { "description": "The resource name of the UC connection that authenticates the dbt Cloud for this task", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.256.0" }, "dbt_cloud_job_id": { "description": "Id of the dbt Cloud job to be triggered", - "$ref": "#/$defs/int64" + "$ref": "#/$defs/int64", + "x-since-version": "v0.256.0" } }, "additionalProperties": false @@ -5092,11 +5624,13 @@ "properties": { "connection_resource_name": { "description": "The resource name of the UC connection that authenticates the dbt platform for this task", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.257.0" }, "dbt_platform_job_id": { "description": "Id of the dbt platform job to be triggered. Specified as a string for maximum compatibility with clients.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.257.0" } }, "additionalProperties": false @@ -5114,31 +5648,38 @@ "properties": { "catalog": { "description": "Optional name of the catalog to use. The value is the top level in the 3-level namespace of Unity Catalog (catalog / schema / relation). The catalog value can only be specified if a warehouse_id is specified. Requires dbt-databricks \u003e= 1.1.1.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "commands": { "description": "A list of dbt commands to execute. All commands must start with `dbt`. This parameter must not be empty. A maximum of up to 10 commands can be provided.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" }, "profiles_directory": { "description": "Optional (relative) path to the profiles directory. Can only be specified if no warehouse_id is specified. If no warehouse_id is specified and this folder is unset, the root directory is used.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "project_directory": { "description": "Path to the project directory. Optional for Git sourced tasks, in which\ncase if no value is provided, the root of the Git repository is used.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "schema": { "description": "Optional schema to write to. This parameter is only used when a warehouse_id is also provided. If not provided, the `default` schema is used.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "source": { "description": "Optional location type of the project directory. When set to `WORKSPACE`, the project will be retrieved\nfrom the local Databricks workspace. When set to `GIT`, the project will be retrieved from a Git repository\ndefined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise.\n\n* `WORKSPACE`: Project is located in Databricks workspace.\n* `GIT`: Project is located in cloud Git provider.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Source" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Source", + "x-since-version": "v0.228.1" }, "warehouse_id": { "description": "ID of the SQL warehouse to connect to. If provided, we automatically generate and provide the profile and connection details to dbt. It can be overridden on a per-command basis by using the `--profiles-dir` command line argument.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -5159,15 +5700,18 @@ "properties": { "min_time_between_triggers_seconds": { "description": "If set, the trigger starts a run only after the specified amount of time passed since\nthe last time the trigger fired. The minimum allowed value is 60 seconds", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" }, "url": { "description": "URL to be monitored for file arrivals. The path must point to the root or a subpath of the external location.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "wait_after_last_change_seconds": { "description": "If set, the trigger starts a run only after no file activity has occurred for the specified amount of time.\nThis makes it possible to wait for a batch of incoming files to arrive before triggering a run. The\nminimum allowed value is 60 seconds.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -5188,15 +5732,18 @@ "properties": { "concurrency": { "description": "An optional maximum allowed number of concurrent runs of the task.\nSet this value if you want to be able to execute multiple runs of the task concurrently.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" }, "inputs": { "description": "Array for task to iterate on. This can be a JSON string or a reference to\nan array parameter.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "task": { "description": "Configuration for the task that will be run for each element in the array", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Task" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Task", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -5233,34 +5780,42 @@ "properties": { "command": { "description": "Command launcher to run the actual script, e.g. bash, python etc.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.243.0" }, "compute": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ComputeConfig" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ComputeConfig", + "x-since-version": "v0.243.0" }, "dl_runtime_image": { "description": "Runtime image", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.243.0" }, "mlflow_experiment_name": { "description": "Optional string containing the name of the MLflow experiment to log the run to. If name is not\nfound, backend will create the mlflow experiment using the name.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.243.0" }, "source": { "description": "Optional location type of the training script. When set to `WORKSPACE`, the script will be retrieved from the local Databricks workspace. When set to `GIT`, the script will be retrieved from a Git repository\ndefined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise.\n* `WORKSPACE`: Script is located in Databricks workspace.\n* `GIT`: Script is located in cloud Git provider.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Source" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Source", + "x-since-version": "v0.243.0" }, "training_script_path": { "description": "The training script file path to be executed. Cloud file URIs (such as dbfs:/, s3:/, adls:/, gcs:/) and workspace paths are supported. For python files stored in the Databricks workspace, the path must be absolute and begin with `/`. For files stored in a remote repository, the path must be relative. This field is required.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.243.0" }, "yaml_parameters": { "description": "Optional string containing model parameters passed to the training script in yaml format.\nIf present, then the content in yaml_parameters_file_path will be ignored.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.243.0" }, "yaml_parameters_file_path": { "description": "Optional path to a YAML file containing model parameters passed to the training script.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.243.0" } }, "additionalProperties": false, @@ -5303,7 +5858,8 @@ "properties": { "used_commit": { "description": "Commit that was used to execute the run. If git_branch was specified, this points to the HEAD of the branch at the time of the run; if git_tag was specified, this points to the commit the tag points to.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -5322,23 +5878,28 @@ "properties": { "git_branch": { "description": "Name of the branch to be checked out and used by this job. This field cannot be specified in conjunction with git_tag or git_commit.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "git_commit": { "description": "Commit to be checked out and used by this job. This field cannot be specified in conjunction with git_branch or git_tag.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "git_provider": { "description": "Unique identifier of the service used to host the Git repository. The value is case insensitive.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.GitProvider" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.GitProvider", + "x-since-version": "v0.228.1" }, "git_tag": { "description": "Name of the tag to be checked out and used by this job. This field cannot be specified in conjunction with git_branch or git_commit.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "git_url": { "description": "URL of the repository to be cloned by this job.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -5360,11 +5921,13 @@ "properties": { "job_cluster_key": { "description": "A unique name for the job cluster. This field is required and must be unique within the job.\n`JobTaskSettings` may refer to this field to determine which cluster to launch for the task execution.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "new_cluster": { "description": "If new_cluster, a description of a cluster that is created for each task.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClusterSpec" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClusterSpec", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -5386,11 +5949,13 @@ "properties": { "kind": { "description": "The kind of deployment that manages the job.\n\n* `BUNDLE`: The job is managed by Databricks Asset Bundle.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobDeploymentKind" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobDeploymentKind", + "x-since-version": "v0.228.1" }, "metadata_file_path": { "description": "Path of the file that contains deployment metadata.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -5444,27 +6009,33 @@ "description": "If true, do not send email to recipients specified in `on_failure` if the run is skipped.\nThis field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.", "$ref": "#/$defs/bool", "deprecationMessage": "This field is deprecated", + "x-since-version": "v0.228.1", "deprecated": true }, "on_duration_warning_threshold_exceeded": { "description": "A list of email addresses to be notified when the duration of a run exceeds the threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. If no rule for the `RUN_DURATION_SECONDS` metric is specified in the `health` field for the job, notifications are not sent.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" }, "on_failure": { "description": "A list of email addresses to be notified when a run unsuccessfully completes. A run is considered to have completed unsuccessfully if it ends with an `INTERNAL_ERROR` `life_cycle_state` or a `FAILED`, or `TIMED_OUT` result_state. If this is not specified on job creation, reset, or update the list is empty, and notifications are not sent.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" }, "on_start": { "description": "A list of email addresses to be notified when a run begins. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" }, "on_streaming_backlog_exceeded": { "description": "A list of email addresses to notify when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" }, "on_success": { "description": "A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -5482,10 +6053,12 @@ "properties": { "environment_key": { "description": "The key of an environment. It has to be unique within a job.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "spec": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Environment" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Environment", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -5506,11 +6079,13 @@ "properties": { "no_alert_for_canceled_runs": { "description": "If true, do not send notifications to recipients specified in `on_failure` if the run is canceled.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" }, "no_alert_for_skipped_runs": { "description": "If true, do not send notifications to recipients specified in `on_failure` if the run is skipped.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -5528,11 +6103,13 @@ "properties": { "default": { "description": "Default value of the parameter.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "name": { "description": "The name of the defined parameter. May only contain alphanumeric characters, `_`, `-`, and `.`", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -5578,15 +6155,18 @@ "properties": { "dirty_state": { "description": "Dirty state indicates the job is not fully synced with the job specification in the remote repository.\n\nPossible values are:\n* `NOT_SYNCED`: The job is not yet synced with the remote job specification. Import the remote job specification from UI to make the job fully synced.\n* `DISCONNECTED`: The job is temporary disconnected from the remote job specification and is allowed for live edit. Import the remote job specification again from UI to make the job fully synced.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobSourceDirtyState" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobSourceDirtyState", + "x-since-version": "v0.228.1" }, "import_from_git_branch": { "description": "Name of the branch which the job is imported from.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "job_config_path": { "description": "Path of the job YAML file that contains the job specification.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -5657,14 +6237,17 @@ "type": "object", "properties": { "metric": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthMetric" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthMetric", + "x-since-version": "v0.228.1" }, "op": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthOperator" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthOperator", + "x-since-version": "v0.228.1" }, "value": { "description": "Specifies the threshold value that the health metric should obey to satisfy the health rule.", - "$ref": "#/$defs/int64" + "$ref": "#/$defs/int64", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -5687,7 +6270,8 @@ "description": "An optional set of health rules that can be defined for this job.", "properties": { "rules": { - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRule" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRule", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -5705,23 +6289,28 @@ "properties": { "aliases": { "description": "Aliases of the model versions to monitor. Can only be used in conjunction with condition MODEL_ALIAS_SET.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.279.0" }, "condition": { "description": "The condition based on which to trigger a job run.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ModelTriggerConfigurationCondition" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ModelTriggerConfigurationCondition", + "x-since-version": "v0.279.0" }, "min_time_between_triggers_seconds": { "description": "If set, the trigger starts a run only after the specified amount of time has passed since\nthe last time the trigger fired. The minimum allowed value is 60 seconds.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.279.0" }, "securable_name": { "description": "Name of the securable to monitor (\"mycatalog.myschema.mymodel\" in the case of model-level triggers,\n\"mycatalog.myschema\" in the case of schema-level triggers) or empty in the case of metastore-level triggers.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.279.0" }, "wait_after_last_change_seconds": { "description": "If set, the trigger starts a run only after no model updates have occurred for the specified time\nand can be used to wait for a series of model updates before triggering a run. The\nminimum allowed value is 60 seconds.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.279.0" } }, "additionalProperties": false, @@ -5758,19 +6347,23 @@ "properties": { "base_parameters": { "description": "Base parameters to be used for each run of this job. If the run is initiated by a call to :method:jobs/run\nNow with parameters specified, the two parameters maps are merged. If the same key is specified in\n`base_parameters` and in `run-now`, the value from `run-now` is used.\nUse [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.\n\nIf the notebook takes a parameter that is not specified in the job’s `base_parameters` or the `run-now` override parameters,\nthe default value from the notebook is used.\n\nRetrieve these parameters in a notebook using [dbutils.widgets.get](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-widgets).\n\nThe JSON representation of this field cannot exceed 1MB.", - "$ref": "#/$defs/map/string" + "$ref": "#/$defs/map/string", + "x-since-version": "v0.228.1" }, "notebook_path": { "description": "The path of the notebook to be run in the Databricks workspace or remote repository.\nFor notebooks stored in the Databricks workspace, the path must be absolute and begin with a slash.\nFor notebooks stored in a remote repository, the path must be relative. This field is required.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "source": { "description": "Optional location type of the notebook. When set to `WORKSPACE`, the notebook will be retrieved from the local Databricks workspace. When set to `GIT`, the notebook will be retrieved from a Git repository\ndefined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise.\n* `WORKSPACE`: Notebook is located in Databricks workspace.\n* `GIT`: Notebook is located in cloud Git provider.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Source" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Source", + "x-since-version": "v0.228.1" }, "warehouse_id": { "description": "Optional `warehouse_id` to run the notebook on a SQL warehouse. Classic SQL warehouses are NOT supported, please use serverless or pro SQL warehouses.\n\nNote that SQL warehouses only support SQL cells; if the notebook contains non-SQL cells, the run will fail.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -5822,11 +6415,13 @@ "properties": { "interval": { "description": "The interval at which the trigger should run.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" }, "unit": { "description": "The unit of time for the interval.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PeriodicTriggerConfigurationTimeUnit" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PeriodicTriggerConfigurationTimeUnit", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -5864,7 +6459,8 @@ "properties": { "full_refresh": { "description": "If true, triggers a full refresh on the delta live table.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -5882,11 +6478,13 @@ "properties": { "full_refresh": { "description": "If true, triggers a full refresh on the delta live table.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" }, "pipeline_id": { "description": "The full name of the pipeline task to execute.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -5907,23 +6505,28 @@ "properties": { "authentication_method": { "description": "How the published Power BI model authenticates to Databricks", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.AuthenticationMethod" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.AuthenticationMethod", + "x-since-version": "v0.248.0" }, "model_name": { "description": "The name of the Power BI model", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.248.0" }, "overwrite_existing": { "description": "Whether to overwrite existing Power BI models", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.248.0" }, "storage_mode": { "description": "The default storage mode of the Power BI model", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.StorageMode" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.StorageMode", + "x-since-version": "v0.248.0" }, "workspace_name": { "description": "The name of the Power BI workspace of the model", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.248.0" } }, "additionalProperties": false @@ -5941,19 +6544,23 @@ "properties": { "catalog": { "description": "The catalog name in Databricks", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.248.0" }, "name": { "description": "The table name in Databricks", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.248.0" }, "schema": { "description": "The schema name in Databricks", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.248.0" }, "storage_mode": { "description": "The Power BI storage mode of the table", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.StorageMode" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.StorageMode", + "x-since-version": "v0.248.0" } }, "additionalProperties": false @@ -5971,23 +6578,28 @@ "properties": { "connection_resource_name": { "description": "The resource name of the UC connection to authenticate from Databricks to Power BI", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.248.0" }, "power_bi_model": { "description": "The semantic model to update", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PowerBiModel" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PowerBiModel", + "x-since-version": "v0.248.0" }, "refresh_after_update": { "description": "Whether the model should be refreshed after the update", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.248.0" }, "tables": { "description": "The tables to be exported to Power BI", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.PowerBiTable" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.PowerBiTable", + "x-since-version": "v0.248.0" }, "warehouse_id": { "description": "The SQL warehouse ID to use as the Power BI data source", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.248.0" } }, "additionalProperties": false @@ -6005,19 +6617,23 @@ "properties": { "entry_point": { "description": "Named entry point to use, if it does not exist in the metadata of the package it executes the function from the package directly using `$packageName.$entryPoint()`", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "named_parameters": { "description": "Command-line parameters passed to Python wheel task in the form of `[\"--name=task\", \"--data=dbfs:/path/to/data.json\"]`. Leave it empty if `parameters` is not null.", - "$ref": "#/$defs/map/string" + "$ref": "#/$defs/map/string", + "x-since-version": "v0.228.1" }, "package_name": { "description": "Name of the package to execute", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "parameters": { "description": "Command-line parameters passed to Python wheel task. Leave it empty if `named_parameters` is not null.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -6039,7 +6655,8 @@ "properties": { "enabled": { "description": "If true, enable queueing for the job. This is a required field.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -6084,6 +6701,7 @@ "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, + "x-since-version": "v0.228.1", "deprecated": true }, "jar_params": { @@ -6092,15 +6710,18 @@ "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, + "x-since-version": "v0.228.1", "deprecated": true }, "job_id": { "description": "ID of the job to trigger.", - "$ref": "#/$defs/int64" + "$ref": "#/$defs/int64", + "x-since-version": "v0.228.1" }, "job_parameters": { "description": "Job-level parameters used to trigger the job.", - "$ref": "#/$defs/map/string" + "$ref": "#/$defs/map/string", + "x-since-version": "v0.228.1" }, "notebook_params": { "description": "A map from keys to values for jobs with notebook task, for example `\"notebook_params\": {\"name\": \"john doe\", \"age\": \"35\"}`.\nThe map is passed to the notebook and is accessible through the [dbutils.widgets.get](https://docs.databricks.com/dev-tools/databricks-utils.html) function.\n\nIf not specified upon `run-now`, the triggered run uses the job’s base parameters.\n\nnotebook_params cannot be specified in conjunction with jar_params.\n\n⚠ **Deprecation note** Use [job parameters](https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown) to pass information down to tasks.\n\nThe JSON representation of this field (for example `{\"notebook_params\":{\"name\":\"john doe\",\"age\":\"35\"}}`) cannot exceed 10,000 bytes.", @@ -6108,17 +6729,20 @@ "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, + "x-since-version": "v0.228.1", "deprecated": true }, "pipeline_params": { "description": "Controls whether the pipeline should perform a full refresh", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PipelineParams" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PipelineParams", + "x-since-version": "v0.228.1" }, "python_named_params": { "$ref": "#/$defs/map/string", "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, + "x-since-version": "v0.228.1", "deprecated": true }, "python_params": { @@ -6127,6 +6751,7 @@ "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, + "x-since-version": "v0.228.1", "deprecated": true }, "spark_submit_params": { @@ -6135,6 +6760,7 @@ "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, + "x-since-version": "v0.228.1", "deprecated": true }, "sql_params": { @@ -6143,6 +6769,7 @@ "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, + "x-since-version": "v0.228.1", "deprecated": true } }, @@ -6182,20 +6809,24 @@ "description": "Deprecated since 04/2016. For classic compute, provide a `jar` through the `libraries` field instead. For serverless compute, provide a `jar` though the `java_dependencies` field inside the `environments` list.\n\nSee the examples of classic and serverless compute usage at the top of the page.", "$ref": "#/$defs/string", "deprecationMessage": "This field is deprecated", + "x-since-version": "v0.228.1", "deprecated": true }, "main_class_name": { "description": "The full name of the class containing the main method to be executed. This class must be contained in a JAR provided as a library.\n\nThe code must use `SparkContext.getOrCreate` to obtain a Spark context; otherwise, runs of the job fail.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "parameters": { "description": "Parameters passed to the main method.\n\nUse [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" }, "run_as_repl": { "description": "Deprecated. A value of `false` is no longer supported.", "$ref": "#/$defs/bool", "deprecationMessage": "This field is deprecated", + "x-since-version": "v0.240.0", "deprecated": true } }, @@ -6214,15 +6845,18 @@ "properties": { "parameters": { "description": "Command line parameters passed to the Python file.\n\nUse [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" }, "python_file": { "description": "The Python file to be executed. Cloud file URIs (such as dbfs:/, s3:/, adls:/, gcs:/) and workspace paths are supported. For python files stored in the Databricks workspace, the path must be absolute and begin with `/`. For files stored in a remote repository, the path must be relative. This field is required.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "source": { "description": "Optional location type of the Python file. When set to `WORKSPACE` or not specified, the file will be retrieved from the local\nDatabricks workspace or cloud location (if the `python_file` has a URI format). When set to `GIT`,\nthe Python file will be retrieved from a Git repository defined in `git_source`.\n\n* `WORKSPACE`: The Python file is located in a Databricks workspace or at a cloud filesystem URI.\n* `GIT`: The Python file is located in a remote Git repository.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Source" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Source", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -6243,7 +6877,8 @@ "properties": { "parameters": { "description": "Command-line parameters passed to spark submit.\n\nUse [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -6261,27 +6896,33 @@ "properties": { "alert": { "description": "If alert, indicates that this job must refresh a SQL alert.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskAlert" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskAlert", + "x-since-version": "v0.228.1" }, "dashboard": { "description": "If dashboard, indicates that this job must refresh a SQL dashboard.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskDashboard" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskDashboard", + "x-since-version": "v0.228.1" }, "file": { "description": "If file, indicates that this job runs a SQL file in a remote Git repository.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskFile" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskFile", + "x-since-version": "v0.228.1" }, "parameters": { "description": "Parameters to be used for each run of this job. The SQL alert task does not support custom parameters.", - "$ref": "#/$defs/map/string" + "$ref": "#/$defs/map/string", + "x-since-version": "v0.228.1" }, "query": { "description": "If query, indicates that this job must execute a SQL query.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskQuery" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskQuery", + "x-since-version": "v0.228.1" }, "warehouse_id": { "description": "The canonical identifier of the SQL warehouse. Recommended to use with serverless or pro SQL warehouses. Classic SQL warehouses are only supported for SQL alert, dashboard and query tasks and are limited to scheduled single-task jobs.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -6302,15 +6943,18 @@ "properties": { "alert_id": { "description": "The canonical identifier of the SQL alert.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "pause_subscriptions": { "description": "If true, the alert notifications are not sent to subscribers.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" }, "subscriptions": { "description": "If specified, alert notifications are sent to subscribers.", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskSubscription" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskSubscription", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -6331,19 +6975,23 @@ "properties": { "custom_subject": { "description": "Subject of the email sent to subscribers of this task.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "dashboard_id": { "description": "The canonical identifier of the SQL dashboard.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "pause_subscriptions": { "description": "If true, the dashboard snapshot is not taken, and emails are not sent to subscribers.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" }, "subscriptions": { "description": "If specified, dashboard snapshots are sent to subscriptions.", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskSubscription" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskSubscription", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -6364,11 +7012,13 @@ "properties": { "path": { "description": "Path of the SQL file. Must be relative if the source is a remote Git repository and absolute for workspace paths.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "source": { "description": "Optional location type of the SQL file. When set to `WORKSPACE`, the SQL file will be retrieved\nfrom the local Databricks workspace. When set to `GIT`, the SQL file will be retrieved from a Git repository\ndefined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise.\n\n* `WORKSPACE`: SQL file is located in Databricks workspace.\n* `GIT`: SQL file is located in cloud Git provider.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Source" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Source", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -6389,7 +7039,8 @@ "properties": { "query_id": { "description": "The canonical identifier of the SQL query.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -6410,11 +7061,13 @@ "properties": { "destination_id": { "description": "The canonical identifier of the destination to receive email notification. This parameter is mutually exclusive with user_name. You cannot set both destination_id and user_name for subscription notifications.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "user_name": { "description": "The user name to receive the subscription email. This parameter is mutually exclusive with destination_id. You cannot set both destination_id and user_name for subscription notifications.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -6448,14 +7101,17 @@ "properties": { "custom_subject": { "description": "Optional: Allows users to specify a custom subject line on the email sent\nto subscribers.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.248.0" }, "paused": { "description": "When true, the subscription will not send emails.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.248.0" }, "subscribers": { - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.SubscriptionSubscriber" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.SubscriptionSubscriber", + "x-since-version": "v0.248.0" } }, "additionalProperties": false @@ -6472,10 +7128,12 @@ "type": "object", "properties": { "destination_id": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.248.0" }, "user_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.248.0" } }, "additionalProperties": false @@ -6493,19 +7151,23 @@ "properties": { "condition": { "description": "The table(s) condition based on which to trigger a job run.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Condition" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Condition", + "x-since-version": "v0.228.1" }, "min_time_between_triggers_seconds": { "description": "If set, the trigger starts a run only after the specified amount of time has passed since\nthe last time the trigger fired. The minimum allowed value is 60 seconds.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" }, "table_names": { "description": "A list of tables to monitor for changes. The table name must be in the format `catalog_name.schema_name.table_name`.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" }, "wait_after_last_change_seconds": { "description": "If set, the trigger starts a run only after no table updates have occurred for the specified time\nand can be used to wait for a series of table updates before triggering a run. The\nminimum allowed value is 60 seconds.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -6526,15 +7188,18 @@ "properties": { "clean_rooms_notebook_task": { "description": "The task runs a [clean rooms](https://docs.databricks.com/clean-rooms/index.html) notebook\nwhen the `clean_rooms_notebook_task` field is present.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.CleanRoomsNotebookTask" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.CleanRoomsNotebookTask", + "x-since-version": "v0.237.0" }, "condition_task": { "description": "The task evaluates a condition that can be used to control the execution of other tasks when the `condition_task` field is present.\nThe condition task does not require a cluster to execute and does not support retries or notifications.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ConditionTask" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ConditionTask", + "x-since-version": "v0.228.1" }, "dashboard_task": { "description": "The task refreshes a dashboard and sends a snapshot to subscribers.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.DashboardTask" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.DashboardTask", + "x-since-version": "v0.248.0" }, "dbt_cloud_task": { "description": "Task type for dbt cloud, deprecated in favor of the new name dbt_platform_task", @@ -6542,140 +7207,173 @@ "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, + "x-since-version": "v0.256.0", "deprecated": true }, "dbt_platform_task": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.DbtPlatformTask", "x-databricks-preview": "PRIVATE", - "doNotSuggest": true + "doNotSuggest": true, + "x-since-version": "v0.257.0" }, "dbt_task": { "description": "The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.DbtTask" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.DbtTask", + "x-since-version": "v0.228.1" }, "depends_on": { "description": "An optional array of objects specifying the dependency graph of the task. All tasks specified in this field must complete before executing this task. The task will run only if the `run_if` condition is true.\nThe key is `task_key`, and the value is the name assigned to the dependent task.", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.TaskDependency" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.TaskDependency", + "x-since-version": "v0.228.1" }, "description": { "description": "An optional description for this task.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "disable_auto_optimization": { "description": "An option to disable auto optimization in serverless", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" }, "disabled": { "description": "An optional flag to disable the task. If set to true, the task will not run even if it is part of a job.", "$ref": "#/$defs/bool", "x-databricks-preview": "PRIVATE", - "doNotSuggest": true + "doNotSuggest": true, + "x-since-version": "v0.271.0" }, "email_notifications": { "description": "An optional set of email addresses that is notified when runs of this task begin or complete as well as when this task is deleted. The default behavior is to not send any emails.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TaskEmailNotifications" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TaskEmailNotifications", + "x-since-version": "v0.228.1" }, "environment_key": { "description": "The key that references an environment spec in a job. This field is required for Python script, Python wheel and dbt tasks when using serverless compute.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "existing_cluster_id": { "description": "If existing_cluster_id, the ID of an existing cluster that is used for all runs.\nWhen running jobs or tasks on an existing cluster, you may need to manually restart\nthe cluster if it stops responding. We suggest running jobs and tasks on new clusters for\ngreater reliability", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "for_each_task": { "description": "The task executes a nested task for every input provided when the `for_each_task` field is present.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ForEachTask" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ForEachTask", + "x-since-version": "v0.228.1" }, "gen_ai_compute_task": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.GenAiComputeTask", "x-databricks-preview": "PRIVATE", - "doNotSuggest": true + "doNotSuggest": true, + "x-since-version": "v0.243.0" }, "health": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRules" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRules", + "x-since-version": "v0.228.1" }, "job_cluster_key": { "description": "If job_cluster_key, this task is executed reusing the cluster specified in `job.settings.job_clusters`.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "libraries": { "description": "An optional list of libraries to be installed on the cluster.\nThe default value is an empty list.", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/compute.Library" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/compute.Library", + "x-since-version": "v0.228.1" }, "max_retries": { "description": "An optional maximum number of times to retry an unsuccessful run. A run is considered to be unsuccessful if it completes with the `FAILED` result_state or `INTERNAL_ERROR` `life_cycle_state`. The value `-1` means to retry indefinitely and the value `0` means to never retry.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" }, "min_retry_interval_millis": { "description": "An optional minimal interval in milliseconds between the start of the failed run and the subsequent retry run. The default behavior is that unsuccessful runs are immediately retried.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" }, "new_cluster": { "description": "If new_cluster, a description of a new cluster that is created for each run.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClusterSpec" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClusterSpec", + "x-since-version": "v0.228.1" }, "notebook_task": { "description": "The task runs a notebook when the `notebook_task` field is present.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.NotebookTask" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.NotebookTask", + "x-since-version": "v0.228.1" }, "notification_settings": { "description": "Optional notification settings that are used when sending notifications to each of the `email_notifications` and `webhook_notifications` for this task.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TaskNotificationSettings" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TaskNotificationSettings", + "x-since-version": "v0.228.1" }, "pipeline_task": { "description": "The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines configured to use triggered more are supported.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PipelineTask" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PipelineTask", + "x-since-version": "v0.228.1" }, "power_bi_task": { "description": "The task triggers a Power BI semantic model update when the `power_bi_task` field is present.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PowerBiTask" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PowerBiTask", + "x-since-version": "v0.248.0" }, "python_wheel_task": { "description": "The task runs a Python wheel when the `python_wheel_task` field is present.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PythonWheelTask" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PythonWheelTask", + "x-since-version": "v0.228.1" }, "retry_on_timeout": { "description": "An optional policy to specify whether to retry a job when it times out. The default behavior\nis to not retry on timeout.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" }, "run_if": { "description": "An optional value specifying the condition determining whether the task is run once its dependencies have been completed.\n\n* `ALL_SUCCESS`: All dependencies have executed and succeeded\n* `AT_LEAST_ONE_SUCCESS`: At least one dependency has succeeded\n* `NONE_FAILED`: None of the dependencies have failed and at least one was executed\n* `ALL_DONE`: All dependencies have been completed\n* `AT_LEAST_ONE_FAILED`: At least one dependency failed\n* `ALL_FAILED`: ALl dependencies have failed", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.RunIf" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.RunIf", + "x-since-version": "v0.228.1" }, "run_job_task": { "description": "The task triggers another job when the `run_job_task` field is present.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.RunJobTask" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.RunJobTask", + "x-since-version": "v0.228.1" }, "spark_jar_task": { "description": "The task runs a JAR when the `spark_jar_task` field is present.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SparkJarTask" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SparkJarTask", + "x-since-version": "v0.228.1" }, "spark_python_task": { "description": "The task runs a Python file when the `spark_python_task` field is present.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SparkPythonTask" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SparkPythonTask", + "x-since-version": "v0.228.1" }, "spark_submit_task": { "description": "(Legacy) The task runs the spark-submit script when the spark_submit_task field is present. Databricks recommends using the spark_jar_task instead; see [Spark Submit task for jobs](/jobs/spark-submit).", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SparkSubmitTask", "deprecationMessage": "This field is deprecated", + "x-since-version": "v0.228.1", "deprecated": true }, "sql_task": { "description": "The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when the `sql_task` field is present.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTask" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTask", + "x-since-version": "v0.228.1" }, "task_key": { "description": "A unique name for the task. This field is used to refer to this task from other tasks.\nThis field is required and must be unique within its parent job.\nOn Update or Reset, this field is used to reference the tasks to be updated or reset.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "timeout_seconds": { "description": "An optional timeout applied to each run of this job task. A value of `0` means no timeout.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" }, "webhook_notifications": { "description": "A collection of system notification IDs to notify when runs of this task begin or complete. The default behavior is to not send any system notifications.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.WebhookNotifications" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.WebhookNotifications", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -6696,11 +7394,13 @@ "properties": { "outcome": { "description": "Can only be specified on condition task dependencies. The outcome of the dependent task that must be met for this task to run.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "task_key": { "description": "The name of the task this task depends on.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -6723,27 +7423,33 @@ "description": "If true, do not send email to recipients specified in `on_failure` if the run is skipped.\nThis field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.", "$ref": "#/$defs/bool", "deprecationMessage": "This field is deprecated", + "x-since-version": "v0.228.1", "deprecated": true }, "on_duration_warning_threshold_exceeded": { "description": "A list of email addresses to be notified when the duration of a run exceeds the threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. If no rule for the `RUN_DURATION_SECONDS` metric is specified in the `health` field for the job, notifications are not sent.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" }, "on_failure": { "description": "A list of email addresses to be notified when a run unsuccessfully completes. A run is considered to have completed unsuccessfully if it ends with an `INTERNAL_ERROR` `life_cycle_state` or a `FAILED`, or `TIMED_OUT` result_state. If this is not specified on job creation, reset, or update the list is empty, and notifications are not sent.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" }, "on_start": { "description": "A list of email addresses to be notified when a run begins. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" }, "on_streaming_backlog_exceeded": { "description": "A list of email addresses to notify when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" }, "on_success": { "description": "A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -6761,15 +7467,18 @@ "properties": { "alert_on_last_attempt": { "description": "If true, do not send notifications to recipients specified in `on_start` for the retried runs and do not send notifications to recipients specified in `on_failure` until the last retry of the run.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" }, "no_alert_for_canceled_runs": { "description": "If true, do not send notifications to recipients specified in `on_failure` if the run is canceled.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" }, "no_alert_for_skipped_runs": { "description": "If true, do not send notifications to recipients specified in `on_failure` if the run is skipped.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -6803,23 +7512,28 @@ "properties": { "file_arrival": { "description": "File arrival trigger settings.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.FileArrivalTriggerConfiguration" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.FileArrivalTriggerConfiguration", + "x-since-version": "v0.228.1" }, "model": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ModelTriggerConfiguration", "x-databricks-preview": "PRIVATE", - "doNotSuggest": true + "doNotSuggest": true, + "x-since-version": "v0.279.0" }, "pause_status": { "description": "Whether this trigger is paused or not.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PauseStatus" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PauseStatus", + "x-since-version": "v0.228.1" }, "periodic": { "description": "Periodic trigger settings.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PeriodicTriggerConfiguration" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PeriodicTriggerConfiguration", + "x-since-version": "v0.228.1" }, "table_update": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TableUpdateTriggerConfiguration" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TableUpdateTriggerConfiguration", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -6836,7 +7550,8 @@ "type": "object", "properties": { "id": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -6857,23 +7572,28 @@ "properties": { "on_duration_warning_threshold_exceeded": { "description": "An optional list of system notification IDs to call when the duration of a run exceeds the threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. A maximum of 3 destinations can be specified for the `on_duration_warning_threshold_exceeded` property.", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Webhook" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Webhook", + "x-since-version": "v0.228.1" }, "on_failure": { "description": "An optional list of system notification IDs to call when the run fails. A maximum of 3 destinations can be specified for the `on_failure` property.", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Webhook" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Webhook", + "x-since-version": "v0.228.1" }, "on_start": { "description": "An optional list of system notification IDs to call when the run starts. A maximum of 3 destinations can be specified for the `on_start` property.", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Webhook" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Webhook", + "x-since-version": "v0.228.1" }, "on_streaming_backlog_exceeded": { "description": "An optional list of system notification IDs to call when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.\nA maximum of 3 destinations can be specified for the `on_streaming_backlog_exceeded` property.", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Webhook" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Webhook", + "x-since-version": "v0.228.1" }, "on_success": { "description": "An optional list of system notification IDs to call when the run completes successfully. A maximum of 3 destinations can be specified for the `on_success` property.", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Webhook" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Webhook", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -6892,11 +7612,13 @@ "properties": { "key": { "description": "The tag key.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "value": { "description": "The tag value.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -6915,11 +7637,13 @@ "properties": { "key": { "description": "The tag key.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "value": { "description": "The tag value.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -6939,7 +7663,8 @@ "description": "Source catalog for initial connection.\nThis is necessary for schema exploration in some database systems like Oracle, and optional but nice-to-have\nin some other database systems like Postgres.\nFor Oracle databases, this maps to a service name.", "$ref": "#/$defs/string", "x-databricks-preview": "PRIVATE", - "doNotSuggest": true + "doNotSuggest": true, + "x-since-version": "v0.279.0" } }, "additionalProperties": false @@ -6956,10 +7681,12 @@ "type": "object", "properties": { "quartz_cron_schedule": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "timezone_id": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -7014,15 +7741,18 @@ "properties": { "catalog": { "description": "The UC catalog the event log is published under.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.246.0" }, "name": { "description": "The name the event log is published to in UC.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.246.0" }, "schema": { "description": "The UC schema the event log is published under.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.246.0" } }, "additionalProperties": false @@ -7040,7 +7770,8 @@ "properties": { "path": { "description": "The absolute path of the source code.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -7058,11 +7789,13 @@ "properties": { "exclude": { "description": "Paths to exclude.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" }, "include": { "description": "Paths to include.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -7080,15 +7813,18 @@ "properties": { "report": { "description": "Select a specific source report.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.ReportSpec" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.ReportSpec", + "x-since-version": "v0.231.0" }, "schema": { "description": "Select all tables from a specific source schema.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.SchemaSpec" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.SchemaSpec", + "x-since-version": "v0.228.1" }, "table": { "description": "Select a specific source table.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpec" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpec", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -7108,29 +7844,35 @@ "description": "[Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.", "$ref": "#/$defs/string", "deprecationMessage": "This field is deprecated", + "x-since-version": "v0.228.1", "deprecated": true }, "connection_name": { "description": "Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.234.0" }, "connection_parameters": { "description": "Optional, Internal. Parameters required to establish an initial connection with the source.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.ConnectionParameters", "x-databricks-preview": "PRIVATE", - "doNotSuggest": true + "doNotSuggest": true, + "x-since-version": "v0.279.0" }, "gateway_storage_catalog": { "description": "Required, Immutable. The name of the catalog for the gateway pipeline's storage location.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "gateway_storage_name": { "description": "Optional. The Unity Catalog-compatible name for the gateway storage location.\nThis is the destination to use for the data that is extracted by the gateway.\nSpark Declarative Pipelines system will automatically create the storage location under the catalog and schema.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "gateway_storage_schema": { "description": "Required, Immutable. The name of the schema for the gateway pipelines's storage location.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -7153,34 +7895,41 @@ "properties": { "connection_name": { "description": "Immutable. The Unity Catalog connection that this ingestion pipeline uses to communicate with the source. This is used with connectors for applications like Salesforce, Workday, and so on.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "ingest_from_uc_foreign_catalog": { "description": "Immutable. If set to true, the pipeline will ingest tables from the\nUC foreign catalogs directly without the need to specify a UC connection or ingestion gateway.\nThe `source_catalog` fields in objects of IngestionConfig are interpreted as\nthe UC foreign catalogs to ingest from.", "$ref": "#/$defs/bool", "x-databricks-preview": "PRIVATE", - "doNotSuggest": true + "doNotSuggest": true, + "x-since-version": "v0.279.0" }, "ingestion_gateway_id": { "description": "Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate with the source database. This is used with connectors to databases like SQL Server.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "netsuite_jar_path": { "$ref": "#/$defs/string", "x-databricks-preview": "PRIVATE", - "doNotSuggest": true + "doNotSuggest": true, + "x-since-version": "v0.271.0" }, "objects": { "description": "Required. Settings specifying tables to replicate and the destination for the replicated tables.", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionConfig" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionConfig", + "x-since-version": "v0.228.1" }, "source_configurations": { "description": "Top-level source configurations", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.SourceConfig" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.SourceConfig", + "x-since-version": "v0.267.0" }, "table_configuration": { "description": "Configuration settings to control the ingestion of tables. These settings are applied to all tables in the pipeline.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -7201,19 +7950,22 @@ "description": "The names of the monotonically increasing columns in the source table that are used to enable\nthe table to be read and ingested incrementally through structured streaming.\nThe columns are allowed to have repeated values but have to be non-decreasing.\nIf the source data is merged into the destination (e.g., using SCD Type 1 or Type 2), these\ncolumns will implicitly define the `sequence_by` behavior. You can still explicitly set\n`sequence_by` to override this default.", "$ref": "#/$defs/slice/string", "x-databricks-preview": "PRIVATE", - "doNotSuggest": true + "doNotSuggest": true, + "x-since-version": "v0.264.0" }, "deletion_condition": { "description": "Specifies a SQL WHERE condition that specifies that the source row has been deleted.\nThis is sometimes referred to as \"soft-deletes\".\nFor example: \"Operation = 'DELETE'\" or \"is_deleted = true\".\nThis field is orthogonal to `hard_deletion_sync_interval_in_seconds`,\none for soft-deletes and the other for hard-deletes.\nSee also the hard_deletion_sync_min_interval_in_seconds field for\nhandling of \"hard deletes\" where the source rows are physically removed from the table.", "$ref": "#/$defs/string", "x-databricks-preview": "PRIVATE", - "doNotSuggest": true + "doNotSuggest": true, + "x-since-version": "v0.264.0" }, "hard_deletion_sync_min_interval_in_seconds": { "description": "Specifies the minimum interval (in seconds) between snapshots on primary keys\nfor detecting and synchronizing hard deletions—i.e., rows that have been\nphysically removed from the source table.\nThis interval acts as a lower bound. If ingestion runs less frequently than\nthis value, hard deletion synchronization will align with the actual ingestion\nfrequency instead of happening more often.\nIf not set, hard deletion synchronization via snapshots is disabled.\nThis field is mutable and can be updated without triggering a full snapshot.", "$ref": "#/$defs/int64", "x-databricks-preview": "PRIVATE", - "doNotSuggest": true + "doNotSuggest": true, + "x-since-version": "v0.264.0" } }, "additionalProperties": false @@ -7233,16 +7985,19 @@ "description": "(Optional) Marks the report as incremental.\nThis field is deprecated and should not be used. Use `parameters` instead. The incremental behavior is now\ncontrolled by the `parameters` field.", "$ref": "#/$defs/bool", "deprecationMessage": "This field is deprecated", + "x-since-version": "v0.271.0", "deprecated": true }, "parameters": { "description": "Parameters for the Workday report. Each key represents the parameter name (e.g., \"start_date\", \"end_date\"),\nand the corresponding value is a SQL-like expression used to compute the parameter value at runtime.\nExample:\n{\n\"start_date\": \"{ coalesce(current_offset(), date(\\\"2025-02-01\\\")) }\",\n\"end_date\": \"{ current_date() - INTERVAL 1 DAY }\"\n}", - "$ref": "#/$defs/map/string" + "$ref": "#/$defs/map/string", + "x-since-version": "v0.271.0" }, "report_parameters": { "description": "(Optional) Additional custom parameters for Workday Report\nThis field is deprecated and should not be used. Use `parameters` instead.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue", "deprecationMessage": "This field is deprecated", + "x-since-version": "v0.271.0", "deprecated": true } }, @@ -7261,11 +8016,13 @@ "properties": { "key": { "description": "Key for the report parameter, can be a column name or other metadata", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.271.0" }, "value": { "description": "Value for the report parameter.\nPossible values it can take are these sql functions:\n1. coalesce(current_offset(), date(\"YYYY-MM-DD\")) -\u003e if current_offset() is null, then the passed date, else current_offset()\n2. current_date()\n3. date_sub(current_date(), x) -\u003e subtract x (some non-negative integer) days from current date", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.271.0" } }, "additionalProperties": false @@ -7323,7 +8080,8 @@ "properties": { "path": { "description": "The absolute path of the source code.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -7341,11 +8099,13 @@ "properties": { "alerts": { "description": "A list of alerts that trigger the sending of notifications to the configured\ndestinations. The supported alerts are:\n\n* `on-update-success`: A pipeline update completes successfully.\n* `on-update-failure`: Each time a pipeline update fails.\n* `on-update-fatal-failure`: A pipeline update fails with a non-retryable (fatal) error.\n* `on-flow-failure`: A single data flow fails.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" }, "email_recipients": { "description": "A list of email addresses notified when a configured alert is triggered.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -7363,7 +8123,8 @@ "properties": { "include": { "description": "The source code to include for pipelines", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.252.0" } }, "additionalProperties": false @@ -7381,79 +8142,98 @@ "properties": { "apply_policy_default_values": { "description": "Note: This field won't be persisted. Only API users will check this field.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" }, "autoscale": { "description": "Parameters needed in order to automatically scale clusters up and down based on load.\nNote: autoscaling works best with DB runtime versions 3.0 or later.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineClusterAutoscale" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineClusterAutoscale", + "x-since-version": "v0.228.1" }, "aws_attributes": { "description": "Attributes related to clusters running on Amazon Web Services.\nIf not specified at cluster creation, a set of default values will be used.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AwsAttributes" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AwsAttributes", + "x-since-version": "v0.228.1" }, "azure_attributes": { "description": "Attributes related to clusters running on Microsoft Azure.\nIf not specified at cluster creation, a set of default values will be used.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AzureAttributes" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AzureAttributes", + "x-since-version": "v0.228.1" }, "cluster_log_conf": { "description": "The configuration for delivering spark logs to a long-term storage destination.\nOnly dbfs destinations are supported. Only one destination can be specified\nfor one cluster. If the conf is given, the logs will be delivered to the destination every\n`5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while\nthe destination of executor logs is `$destination/$clusterId/executor`.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClusterLogConf" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClusterLogConf", + "x-since-version": "v0.228.1" }, "custom_tags": { "description": "Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS\ninstances and EBS volumes) with these tags in addition to `default_tags`. Notes:\n\n- Currently, Databricks allows at most 45 custom tags\n\n- Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags", - "$ref": "#/$defs/map/string" + "$ref": "#/$defs/map/string", + "x-since-version": "v0.228.1" }, "driver_instance_pool_id": { "description": "The optional ID of the instance pool for the driver of the cluster belongs.\nThe pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not\nassigned.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "driver_node_type_id": { "description": "The node type of the Spark driver.\nNote that this field is optional; if unset, the driver node type will be set as the same value\nas `node_type_id` defined above.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "enable_local_disk_encryption": { "description": "Whether to enable local disk encryption for the cluster.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" }, "gcp_attributes": { "description": "Attributes related to clusters running on Google Cloud Platform.\nIf not specified at cluster creation, a set of default values will be used.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.GcpAttributes" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.GcpAttributes", + "x-since-version": "v0.228.1" }, "init_scripts": { "description": "The configuration for storing init scripts. Any number of destinations can be specified. The scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, init script logs are sent to `\u003cdestination\u003e/\u003ccluster-ID\u003e/init_scripts`.", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/compute.InitScriptInfo" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/compute.InitScriptInfo", + "x-since-version": "v0.228.1" }, "instance_pool_id": { "description": "The optional ID of the instance pool to which the cluster belongs.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "label": { "description": "A label for the cluster specification, either `default` to configure the default cluster, or `maintenance` to configure the maintenance cluster. This field is optional. The default value is `default`.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "node_type_id": { "description": "This field encodes, through a single value, the resources available to each of\nthe Spark nodes in this cluster. For example, the Spark nodes can be provisioned\nand optimized for memory or compute intensive workloads. A list of available node\ntypes can be retrieved by using the :method:clusters/listNodeTypes API call.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "num_workers": { "description": "Number of worker nodes that this cluster should have. A cluster has one Spark Driver\nand `num_workers` Executors for a total of `num_workers` + 1 Spark nodes.\n\nNote: When reading the properties of a cluster, this field reflects the desired number\nof workers rather than the actual current number of workers. For instance, if a cluster\nis resized from 5 to 10 workers, this field will immediately be updated to reflect\nthe target size of 10 workers, whereas the workers listed in `spark_info` will gradually\nincrease from 5 to 10 as the new nodes are provisioned.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" }, "policy_id": { "description": "The ID of the cluster policy used to create the cluster if applicable.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "spark_conf": { "description": "An object containing a set of optional, user-specified Spark configuration key-value pairs.\nSee :method:clusters/create for more details.", - "$ref": "#/$defs/map/string" + "$ref": "#/$defs/map/string", + "x-since-version": "v0.228.1" }, "spark_env_vars": { "description": "An object containing a set of optional, user-specified environment variable key-value pairs.\nPlease note that key-value pair of the form (X,Y) will be exported as is (i.e.,\n`export X='Y'`) while launching the driver and workers.\n\nIn order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending\nthem to `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all\ndefault databricks managed environmental variables are included as well.\n\nExample Spark environment variables:\n`{\"SPARK_WORKER_MEMORY\": \"28000m\", \"SPARK_LOCAL_DIRS\": \"/local_disk0\"}` or\n`{\"SPARK_DAEMON_JAVA_OPTS\": \"$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true\"}`", - "$ref": "#/$defs/map/string" + "$ref": "#/$defs/map/string", + "x-since-version": "v0.228.1" }, "ssh_public_keys": { "description": "SSH public key contents that will be added to each Spark node in this cluster. The\ncorresponding private keys can be used to login with the user name `ubuntu` on port `2200`.\nUp to 10 keys can be specified.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -7471,15 +8251,18 @@ "properties": { "max_workers": { "description": "The maximum number of workers to which the cluster can scale up when overloaded. `max_workers` must be strictly greater than `min_workers`.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" }, "min_workers": { "description": "The minimum number of workers the cluster can scale down to when underutilized.\nIt is also the initial number of workers the cluster will have after creation.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" }, "mode": { "description": "Databricks Enhanced Autoscaling optimizes cluster utilization by automatically\nallocating cluster resources based on workload volume, with minimal impact to\nthe data processing latency of your pipelines. Enhanced Autoscaling is available\nfor `updates` clusters only. The legacy autoscaling feature is used for `maintenance`\nclusters.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineClusterAutoscaleMode" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineClusterAutoscaleMode", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -7517,11 +8300,13 @@ "properties": { "kind": { "description": "The deployment method that manages the pipeline.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.DeploymentKind" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.DeploymentKind", + "x-since-version": "v0.228.1" }, "metadata_file_path": { "description": "The path to the file containing metadata about the deployment.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -7542,32 +8327,38 @@ "properties": { "file": { "description": "The path to a file that defines a pipeline and is stored in the Databricks Repos.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.FileLibrary" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.FileLibrary", + "x-since-version": "v0.228.1" }, "glob": { "description": "The unified field to include source codes.\nEach entry can be a notebook path, a file path, or a folder path that ends `/**`.\nThis field cannot be used together with `notebook` or `file`.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PathPattern" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PathPattern", + "x-since-version": "v0.252.0" }, "jar": { "description": "URI of the jar to be installed. Currently only DBFS is supported.", "$ref": "#/$defs/string", "x-databricks-preview": "PRIVATE", - "doNotSuggest": true + "doNotSuggest": true, + "x-since-version": "v0.228.1" }, "maven": { "description": "Specification of a maven library to be installed.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.MavenLibrary", "x-databricks-preview": "PRIVATE", - "doNotSuggest": true + "doNotSuggest": true, + "x-since-version": "v0.228.1" }, "notebook": { "description": "The path to a notebook that defines a pipeline and is stored in the Databricks workspace.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.NotebookLibrary" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.NotebookLibrary", + "x-since-version": "v0.228.1" }, "whl": { "description": "URI of the whl to be installed.", "$ref": "#/$defs/string", "deprecationMessage": "This field is deprecated", + "x-since-version": "v0.228.1", "deprecated": true } }, @@ -7585,10 +8376,12 @@ "type": "object", "properties": { "cron": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.CronTrigger" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.CronTrigger", + "x-since-version": "v0.228.1" }, "manual": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.ManualTrigger" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.ManualTrigger", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -7607,7 +8400,8 @@ "properties": { "dependencies": { "description": "List of pip dependencies, as supported by the version of pip in this environment.\nEach dependency is a pip requirement file line https://pip.pypa.io/en/stable/reference/requirements-file-format/\nAllowed dependency could be \u003crequirement specifier\u003e, \u003carchive url/path\u003e, \u003clocal project path\u003e(WSFS or Volumes in Databricks), \u003cvcs project url\u003e", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.257.0" } }, "additionalProperties": false @@ -7626,7 +8420,8 @@ "properties": { "slot_config": { "description": "Optional. The Postgres slot configuration to use for logical replication", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PostgresSlotConfig" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PostgresSlotConfig", + "x-since-version": "v0.267.0" } }, "additionalProperties": false @@ -7645,11 +8440,13 @@ "properties": { "publication_name": { "description": "The name of the publication to use for the Postgres source", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.267.0" }, "slot_name": { "description": "The name of the logical replication slot to use for the Postgres source", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.267.0" } }, "additionalProperties": false @@ -7667,23 +8464,28 @@ "properties": { "destination_catalog": { "description": "Required. Destination catalog to store table.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.231.0" }, "destination_schema": { "description": "Required. Destination schema to store table.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.231.0" }, "destination_table": { "description": "Required. Destination table name. The pipeline fails if a table with that name already exists.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.231.0" }, "source_url": { "description": "Required. Report URL in the source system.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.231.0" }, "table_configuration": { "description": "Configuration settings to control the ingestion of tables. These settings override the table_configuration defined in the IngestionPipelineDefinition object.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig", + "x-since-version": "v0.231.0" } }, "additionalProperties": false, @@ -7706,15 +8508,18 @@ "properties": { "days_of_week": { "description": "Days of week in which the restart is allowed to happen (within a five-hour window starting at start_hour).\nIf not specified all days of the week will be used.", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.DayOfWeek" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.DayOfWeek", + "x-since-version": "v0.234.0" }, "start_hour": { "description": "An integer between 0 and 23 denoting the start hour for the restart window in the 24-hour day.\nContinuous pipeline restart is triggered only within a five-hour window starting at this hour.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.234.0" }, "time_zone_id": { "description": "Time zone id of restart window. See https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html for details.\nIf not specified, UTC will be used.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.234.0" } }, "additionalProperties": false, @@ -7736,11 +8541,13 @@ "properties": { "service_principal_name": { "description": "Application ID of an active service principal. Setting this field requires the `servicePrincipal/user` role.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.241.0" }, "user_name": { "description": "The email of an active workspace user. Users can only set this field to their own email.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.241.0" } }, "additionalProperties": false @@ -7758,23 +8565,28 @@ "properties": { "destination_catalog": { "description": "Required. Destination catalog to store tables.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "destination_schema": { "description": "Required. Destination schema to store tables in. Tables with the same name as the source tables are created in this destination schema. The pipeline fails If a table with the same name already exists.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "source_catalog": { "description": "The source catalog name. Might be optional depending on the type of source.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "source_schema": { "description": "Required. Schema name in the source database.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "table_configuration": { "description": "Configuration settings to control the ingestion of tables. These settings are applied to all tables in this schema and override the table_configuration defined in the IngestionPipelineDefinition object.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -7798,11 +8610,13 @@ "properties": { "postgres": { "description": "Postgres-specific catalog-level configuration parameters", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PostgresCatalogConfig" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PostgresCatalogConfig", + "x-since-version": "v0.267.0" }, "source_catalog": { "description": "Source catalog name", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.267.0" } }, "additionalProperties": false @@ -7820,7 +8634,8 @@ "properties": { "catalog": { "description": "Catalog-level source configuration parameters", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.SourceCatalogConfig" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.SourceCatalogConfig", + "x-since-version": "v0.267.0" } }, "additionalProperties": false @@ -7838,31 +8653,38 @@ "properties": { "destination_catalog": { "description": "Required. Destination catalog to store table.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "destination_schema": { "description": "Required. Destination schema to store table.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "destination_table": { "description": "Optional. Destination table name. The pipeline fails if a table with that name already exists. If not set, the source table name is used.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "source_catalog": { "description": "Source catalog name. Might be optional depending on the type of source.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "source_schema": { "description": "Schema name in the source database. Might be optional depending on the type of source.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "source_table": { "description": "Required. Table name in the source database.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "table_configuration": { "description": "Configuration settings to control the ingestion of tables. These settings override the table_configuration defined in the IngestionPipelineDefinition object and the SchemaSpec.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -7885,42 +8707,50 @@ "properties": { "exclude_columns": { "description": "A list of column names to be excluded for the ingestion.\nWhen not specified, include_columns fully controls what columns to be ingested.\nWhen specified, all other columns including future ones will be automatically included for ingestion.\nThis field in mutually exclusive with `include_columns`.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.251.0" }, "include_columns": { "description": "A list of column names to be included for the ingestion.\nWhen not specified, all columns except ones in exclude_columns will be included. Future\ncolumns will be automatically included.\nWhen specified, all other future columns will be automatically excluded from ingestion.\nThis field in mutually exclusive with `exclude_columns`.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.251.0" }, "primary_keys": { "description": "The primary key of the table used to apply changes.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.228.1" }, "query_based_connector_config": { "description": "Configurations that are only applicable for query-based ingestion connectors.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig", "x-databricks-preview": "PRIVATE", - "doNotSuggest": true + "doNotSuggest": true, + "x-since-version": "v0.264.0" }, "salesforce_include_formula_fields": { "description": "If true, formula fields defined in the table are included in the ingestion. This setting is only valid for the Salesforce connector", "$ref": "#/$defs/bool", "x-databricks-preview": "PRIVATE", - "doNotSuggest": true + "doNotSuggest": true, + "x-since-version": "v0.228.1" }, "scd_type": { "description": "The SCD type to use to ingest the table.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfigScdType", "x-databricks-preview": "PRIVATE", - "doNotSuggest": true + "doNotSuggest": true, + "x-since-version": "v0.228.1" }, "sequence_by": { "description": "The column names specifying the logical order of events in the source data. Spark Declarative Pipelines uses this sequencing to handle change events that arrive out of order.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.231.0" }, "workday_report_parameters": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinitionWorkdayReportParameters", "x-databricks-preview": "PRIVATE", - "doNotSuggest": true + "doNotSuggest": true, + "x-since-version": "v0.271.0" } }, "additionalProperties": false @@ -7955,11 +8785,13 @@ "properties": { "ai21labs_api_key": { "description": "The Databricks secret key reference for an AI21 Labs API key. If you\nprefer to paste your API key directly, see `ai21labs_api_key_plaintext`.\nYou must provide an API key using one of the following fields:\n`ai21labs_api_key` or `ai21labs_api_key_plaintext`.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "ai21labs_api_key_plaintext": { "description": "An AI21 Labs API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `ai21labs_api_key`. You\nmust provide an API key using one of the following fields:\n`ai21labs_api_key` or `ai21labs_api_key_plaintext`.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -7977,23 +8809,28 @@ "properties": { "fallback_config": { "description": "Configuration for traffic fallback which auto fallbacks to other served entities if the request to a served\nentity fails with certain error codes, to increase availability.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.FallbackConfig" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.FallbackConfig", + "x-since-version": "v0.246.0" }, "guardrails": { "description": "Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and responses.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrails" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrails", + "x-since-version": "v0.230.0" }, "inference_table_config": { "description": "Configuration for payload logging using inference tables.\nUse these tables to monitor and audit data being sent to and received from model APIs and to improve model quality.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayInferenceTableConfig" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayInferenceTableConfig", + "x-since-version": "v0.230.0" }, "rate_limits": { "description": "Configuration for rate limits which can be set to limit endpoint traffic.", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimit" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimit", + "x-since-version": "v0.230.0" }, "usage_tracking_config": { "description": "Configuration to enable usage tracking using system tables.\nThese tables allow you to monitor operational usage on endpoints and their associated costs.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayUsageTrackingConfig" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayUsageTrackingConfig", + "x-since-version": "v0.230.0" } }, "additionalProperties": false @@ -8013,20 +8850,24 @@ "description": "List of invalid keywords.\nAI guardrail uses keyword or string matching to decide if the keyword exists in the request or response content.", "$ref": "#/$defs/slice/string", "deprecationMessage": "This field is deprecated", + "x-since-version": "v0.230.0", "deprecated": true }, "pii": { "description": "Configuration for guardrail PII filter.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailPiiBehavior" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailPiiBehavior", + "x-since-version": "v0.230.0" }, "safety": { "description": "Indicates whether the safety filter is enabled.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.230.0" }, "valid_topics": { "description": "The list of allowed topics.\nGiven a chat request, this guardrail flags the request if its topic is not in the allowed topics.", "$ref": "#/$defs/slice/string", "deprecationMessage": "This field is deprecated", + "x-since-version": "v0.230.0", "deprecated": true } }, @@ -8045,7 +8886,8 @@ "properties": { "behavior": { "description": "Configuration for input guardrail filters.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailPiiBehaviorBehavior" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailPiiBehaviorBehavior", + "x-since-version": "v0.230.0" } }, "additionalProperties": false @@ -8079,11 +8921,13 @@ "properties": { "input": { "description": "Configuration for input guardrail filters.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailParameters" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailParameters", + "x-since-version": "v0.230.0" }, "output": { "description": "Configuration for output guardrail filters.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailParameters" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailParameters", + "x-since-version": "v0.230.0" } }, "additionalProperties": false @@ -8101,19 +8945,23 @@ "properties": { "catalog_name": { "description": "The name of the catalog in Unity Catalog. Required when enabling inference tables.\nNOTE: On update, you have to disable inference table first in order to change the catalog name.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.230.0" }, "enabled": { "description": "Indicates whether the inference table is enabled.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.230.0" }, "schema_name": { "description": "The name of the schema in Unity Catalog. Required when enabling inference tables.\nNOTE: On update, you have to disable inference table first in order to change the schema name.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.230.0" }, "table_name_prefix": { "description": "The prefix of the table in Unity Catalog.\nNOTE: On update, you have to disable inference table first in order to change the prefix name.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.230.0" } }, "additionalProperties": false @@ -8131,23 +8979,28 @@ "properties": { "calls": { "description": "Used to specify how many calls are allowed for a key within the renewal_period.", - "$ref": "#/$defs/int64" + "$ref": "#/$defs/int64", + "x-since-version": "v0.230.0" }, "key": { "description": "Key field for a rate limit. Currently, 'user', 'user_group, 'service_principal', and 'endpoint' are supported,\nwith 'endpoint' being the default if not specified.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitKey" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitKey", + "x-since-version": "v0.230.0" }, "principal": { "description": "Principal field for a user, user group, or service principal to apply rate limiting to. Accepts a user email, group name, or service principal application ID.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.260.0" }, "renewal_period": { "description": "Renewal period field for a rate limit. Currently, only 'minute' is supported.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitRenewalPeriod" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitRenewalPeriod", + "x-since-version": "v0.230.0" }, "tokens": { "description": "Used to specify how many tokens are allowed for a key within the renewal_period.", - "$ref": "#/$defs/int64" + "$ref": "#/$defs/int64", + "x-since-version": "v0.265.0" } }, "additionalProperties": false, @@ -8199,7 +9052,8 @@ "properties": { "enabled": { "description": "Whether to enable usage tracking.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.230.0" } }, "additionalProperties": false @@ -8217,31 +9071,38 @@ "properties": { "aws_access_key_id": { "description": "The Databricks secret key reference for an AWS access key ID with\npermissions to interact with Bedrock services. If you prefer to paste\nyour API key directly, see `aws_access_key_id_plaintext`. You must provide an API\nkey using one of the following fields: `aws_access_key_id` or\n`aws_access_key_id_plaintext`.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "aws_access_key_id_plaintext": { "description": "An AWS access key ID with permissions to interact with Bedrock services\nprovided as a plaintext string. If you prefer to reference your key using\nDatabricks Secrets, see `aws_access_key_id`. You must provide an API key\nusing one of the following fields: `aws_access_key_id` or\n`aws_access_key_id_plaintext`.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "aws_region": { "description": "The AWS region to use. Bedrock has to be enabled there.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "aws_secret_access_key": { "description": "The Databricks secret key reference for an AWS secret access key paired\nwith the access key ID, with permissions to interact with Bedrock\nservices. If you prefer to paste your API key directly, see\n`aws_secret_access_key_plaintext`. You must provide an API key using one\nof the following fields: `aws_secret_access_key` or\n`aws_secret_access_key_plaintext`.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "aws_secret_access_key_plaintext": { "description": "An AWS secret access key paired with the access key ID, with permissions\nto interact with Bedrock services provided as a plaintext string. If you\nprefer to reference your key using Databricks Secrets, see\n`aws_secret_access_key`. You must provide an API key using one of the\nfollowing fields: `aws_secret_access_key` or\n`aws_secret_access_key_plaintext`.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "bedrock_provider": { "description": "The underlying provider in Amazon Bedrock. Supported values (case\ninsensitive) include: Anthropic, Cohere, AI21Labs, Amazon.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfigBedrockProvider" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfigBedrockProvider", + "x-since-version": "v0.228.1" }, "instance_profile_arn": { "description": "ARN of the instance profile that the external model will use to access AWS resources.\nYou must authenticate using an instance profile or access keys.\nIf you prefer to authenticate using access keys, see `aws_access_key_id`,\n`aws_access_key_id_plaintext`, `aws_secret_access_key` and `aws_secret_access_key_plaintext`.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.243.0" } }, "additionalProperties": false, @@ -8280,11 +9141,13 @@ "properties": { "anthropic_api_key": { "description": "The Databricks secret key reference for an Anthropic API key. If you\nprefer to paste your API key directly, see `anthropic_api_key_plaintext`.\nYou must provide an API key using one of the following fields:\n`anthropic_api_key` or `anthropic_api_key_plaintext`.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "anthropic_api_key_plaintext": { "description": "The Anthropic API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `anthropic_api_key`. You\nmust provide an API key using one of the following fields:\n`anthropic_api_key` or `anthropic_api_key_plaintext`.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -8302,15 +9165,18 @@ "properties": { "key": { "description": "The name of the API key parameter used for authentication.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.246.0" }, "value": { "description": "The Databricks secret key reference for an API Key.\nIf you prefer to paste your token directly, see `value_plaintext`.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.246.0" }, "value_plaintext": { "description": "The API Key provided as a plaintext string. If you prefer to reference your\ntoken using Databricks Secrets, see `value`.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.246.0" } }, "additionalProperties": false, @@ -8331,19 +9197,23 @@ "properties": { "catalog_name": { "description": "The name of the catalog in Unity Catalog. NOTE: On update, you cannot change the catalog name if the inference table is already enabled.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "enabled": { "description": "Indicates whether the inference table is enabled.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" }, "schema_name": { "description": "The name of the schema in Unity Catalog. NOTE: On update, you cannot change the schema name if the inference table is already enabled.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "table_name_prefix": { "description": "The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if the inference table is already enabled.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -8361,11 +9231,13 @@ "properties": { "token": { "description": "The Databricks secret key reference for a token.\nIf you prefer to paste your token directly, see `token_plaintext`.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.246.0" }, "token_plaintext": { "description": "The token provided as a plaintext string. If you prefer to reference your\ntoken using Databricks Secrets, see `token`.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.246.0" } }, "additionalProperties": false @@ -8383,15 +9255,18 @@ "properties": { "cohere_api_base": { "description": "This is an optional field to provide a customized base URL for the Cohere\nAPI. If left unspecified, the standard Cohere base URL is used.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "cohere_api_key": { "description": "The Databricks secret key reference for a Cohere API key. If you prefer\nto paste your API key directly, see `cohere_api_key_plaintext`. You must\nprovide an API key using one of the following fields: `cohere_api_key` or\n`cohere_api_key_plaintext`.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "cohere_api_key_plaintext": { "description": "The Cohere API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `cohere_api_key`. You\nmust provide an API key using one of the following fields:\n`cohere_api_key` or `cohere_api_key_plaintext`.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -8410,15 +9285,18 @@ "properties": { "api_key_auth": { "description": "This is a field to provide API key authentication for the custom provider API.\nYou can only specify one authentication method.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ApiKeyAuth" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ApiKeyAuth", + "x-since-version": "v0.246.0" }, "bearer_token_auth": { "description": "This is a field to provide bearer token authentication for the custom provider API.\nYou can only specify one authentication method.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.BearerTokenAuth" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.BearerTokenAuth", + "x-since-version": "v0.246.0" }, "custom_provider_url": { "description": "This is a field to provide the URL of the custom provider API.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.246.0" } }, "additionalProperties": false, @@ -8439,15 +9317,18 @@ "properties": { "databricks_api_token": { "description": "The Databricks secret key reference for a Databricks API token that\ncorresponds to a user or service principal with Can Query access to the\nmodel serving endpoint pointed to by this external model. If you prefer\nto paste your API key directly, see `databricks_api_token_plaintext`. You\nmust provide an API key using one of the following fields:\n`databricks_api_token` or `databricks_api_token_plaintext`.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "databricks_api_token_plaintext": { "description": "The Databricks API token that corresponds to a user or service principal\nwith Can Query access to the model serving endpoint pointed to by this\nexternal model provided as a plaintext string. If you prefer to reference\nyour key using Databricks Secrets, see `databricks_api_token`. You must\nprovide an API key using one of the following fields:\n`databricks_api_token` or `databricks_api_token_plaintext`.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "databricks_workspace_url": { "description": "The URL of the Databricks workspace containing the model serving endpoint\npointed to by this external model.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -8468,11 +9349,13 @@ "properties": { "on_update_failure": { "description": "A list of email addresses to be notified when an endpoint fails to update its configuration or state.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.264.0" }, "on_update_success": { "description": "A list of email addresses to be notified when an endpoint successfully updates its configuration or state.", - "$ref": "#/$defs/slice/string" + "$ref": "#/$defs/slice/string", + "x-since-version": "v0.264.0" } }, "additionalProperties": false @@ -8490,19 +9373,23 @@ "properties": { "auto_capture_config": { "description": "Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.\nNote: this field is deprecated for creating new provisioned throughput endpoints,\nor updating existing provisioned throughput endpoints that never have inference table configured;\nin these cases please use AI Gateway to manage inference tables.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AutoCaptureConfigInput" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AutoCaptureConfigInput", + "x-since-version": "v0.228.1" }, "served_entities": { "description": "The list of served entities under the serving endpoint config.", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput", + "x-since-version": "v0.228.1" }, "served_models": { "description": "(Deprecated, use served_entities instead) The list of served models under the serving endpoint config.", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.ServedModelInput" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.ServedModelInput", + "x-since-version": "v0.228.1" }, "traffic_config": { "description": "The traffic configuration associated with the serving endpoint config.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.TrafficConfig" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.TrafficConfig", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -8520,11 +9407,13 @@ "properties": { "key": { "description": "Key field for a serving endpoint tag.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "value": { "description": "Optional value field for a serving endpoint tag.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -8545,51 +9434,63 @@ "properties": { "ai21labs_config": { "description": "AI21Labs Config. Only required if the provider is 'ai21labs'.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.Ai21LabsConfig" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.Ai21LabsConfig", + "x-since-version": "v0.228.1" }, "amazon_bedrock_config": { "description": "Amazon Bedrock Config. Only required if the provider is 'amazon-bedrock'.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfig" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfig", + "x-since-version": "v0.228.1" }, "anthropic_config": { "description": "Anthropic Config. Only required if the provider is 'anthropic'.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AnthropicConfig" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AnthropicConfig", + "x-since-version": "v0.228.1" }, "cohere_config": { "description": "Cohere Config. Only required if the provider is 'cohere'.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.CohereConfig" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.CohereConfig", + "x-since-version": "v0.228.1" }, "custom_provider_config": { "description": "Custom Provider Config. Only required if the provider is 'custom'.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.CustomProviderConfig" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.CustomProviderConfig", + "x-since-version": "v0.246.0" }, "databricks_model_serving_config": { "description": "Databricks Model Serving Config. Only required if the provider is 'databricks-model-serving'.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.DatabricksModelServingConfig" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.DatabricksModelServingConfig", + "x-since-version": "v0.228.1" }, "google_cloud_vertex_ai_config": { "description": "Google Cloud Vertex AI Config. Only required if the provider is 'google-cloud-vertex-ai'.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.GoogleCloudVertexAiConfig" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.GoogleCloudVertexAiConfig", + "x-since-version": "v0.228.1" }, "name": { "description": "The name of the external model.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "openai_config": { "description": "OpenAI Config. Only required if the provider is 'openai'.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.OpenAiConfig" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.OpenAiConfig", + "x-since-version": "v0.228.1" }, "palm_config": { "description": "PaLM Config. Only required if the provider is 'palm'.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.PaLmConfig" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.PaLmConfig", + "x-since-version": "v0.228.1" }, "provider": { "description": "The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', 'palm', and 'custom'.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ExternalModelProvider" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ExternalModelProvider", + "x-since-version": "v0.228.1" }, "task": { "description": "The task type of the external model.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -8634,7 +9535,8 @@ "properties": { "enabled": { "description": "Whether to enable traffic fallback. When a served entity in the serving endpoint returns specific error\ncodes (e.g. 500), the request will automatically be round-robin attempted with other served entities in the same\nendpoint, following the order of served entity list, until a successful response is returned.\nIf all attempts fail, return the last response with the error code.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.246.0" } }, "additionalProperties": false, @@ -8655,19 +9557,23 @@ "properties": { "private_key": { "description": "The Databricks secret key reference for a private key for the service\naccount which has access to the Google Cloud Vertex AI Service. See [Best\npractices for managing service account keys]. If you prefer to paste your\nAPI key directly, see `private_key_plaintext`. You must provide an API\nkey using one of the following fields: `private_key` or\n`private_key_plaintext`\n\n[Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "private_key_plaintext": { "description": "The private key for the service account which has access to the Google\nCloud Vertex AI Service provided as a plaintext secret. See [Best\npractices for managing service account keys]. If you prefer to reference\nyour key using Databricks Secrets, see `private_key`. You must provide an\nAPI key using one of the following fields: `private_key` or\n`private_key_plaintext`.\n\n[Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "project_id": { "description": "This is the Google Cloud project id that the service account is\nassociated with.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "region": { "description": "This is the region for the Google Cloud Vertex AI Service. See [supported\nregions] for more details. Some models are only available in specific\nregions.\n\n[supported regions]: https://cloud.google.com/vertex-ai/docs/general/locations", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -8690,47 +9596,58 @@ "properties": { "microsoft_entra_client_id": { "description": "This field is only required for Azure AD OpenAI and is the Microsoft\nEntra Client ID.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "microsoft_entra_client_secret": { "description": "The Databricks secret key reference for a client secret used for\nMicrosoft Entra ID authentication. If you prefer to paste your client\nsecret directly, see `microsoft_entra_client_secret_plaintext`. You must\nprovide an API key using one of the following fields:\n`microsoft_entra_client_secret` or\n`microsoft_entra_client_secret_plaintext`.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "microsoft_entra_client_secret_plaintext": { "description": "The client secret used for Microsoft Entra ID authentication provided as\na plaintext string. If you prefer to reference your key using Databricks\nSecrets, see `microsoft_entra_client_secret`. You must provide an API key\nusing one of the following fields: `microsoft_entra_client_secret` or\n`microsoft_entra_client_secret_plaintext`.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "microsoft_entra_tenant_id": { "description": "This field is only required for Azure AD OpenAI and is the Microsoft\nEntra Tenant ID.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "openai_api_base": { "description": "This is a field to provide a customized base URl for the OpenAI API. For\nAzure OpenAI, this field is required, and is the base URL for the Azure\nOpenAI API service provided by Azure. For other OpenAI API types, this\nfield is optional, and if left unspecified, the standard OpenAI base URL\nis used.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "openai_api_key": { "description": "The Databricks secret key reference for an OpenAI API key using the\nOpenAI or Azure service. If you prefer to paste your API key directly,\nsee `openai_api_key_plaintext`. You must provide an API key using one of\nthe following fields: `openai_api_key` or `openai_api_key_plaintext`.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "openai_api_key_plaintext": { "description": "The OpenAI API key using the OpenAI or Azure service provided as a\nplaintext string. If you prefer to reference your key using Databricks\nSecrets, see `openai_api_key`. You must provide an API key using one of\nthe following fields: `openai_api_key` or `openai_api_key_plaintext`.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "openai_api_type": { "description": "This is an optional field to specify the type of OpenAI API to use. For\nAzure OpenAI, this field is required, and adjust this parameter to\nrepresent the preferred security access validation protocol. For access\ntoken validation, use azure. For authentication using Azure Active\nDirectory (Azure AD) use, azuread.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "openai_api_version": { "description": "This is an optional field to specify the OpenAI API version. For Azure\nOpenAI, this field is required, and is the version of the Azure OpenAI\nservice to utilize, specified by a date.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "openai_deployment_name": { "description": "This field is only required for Azure OpenAI and is the name of the\ndeployment resource for the Azure OpenAI service.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "openai_organization": { "description": "This is an optional field to specify the organization in OpenAI or Azure\nOpenAI.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -8748,11 +9665,13 @@ "properties": { "palm_api_key": { "description": "The Databricks secret key reference for a PaLM API key. If you prefer to\npaste your API key directly, see `palm_api_key_plaintext`. You must\nprovide an API key using one of the following fields: `palm_api_key` or\n`palm_api_key_plaintext`.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "palm_api_key_plaintext": { "description": "The PaLM API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `palm_api_key`. You must\nprovide an API key using one of the following fields: `palm_api_key` or\n`palm_api_key_plaintext`.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -8770,15 +9689,18 @@ "properties": { "calls": { "description": "Used to specify how many calls are allowed for a key within the renewal_period.", - "$ref": "#/$defs/int64" + "$ref": "#/$defs/int64", + "x-since-version": "v0.228.1" }, "key": { "description": "Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.RateLimitKey" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.RateLimitKey", + "x-since-version": "v0.228.1" }, "renewal_period": { "description": "Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.RateLimitRenewalPeriod" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.RateLimitRenewalPeriod", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -8828,15 +9750,18 @@ "type": "object", "properties": { "served_entity_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.260.0" }, "served_model_name": { "description": "The name of the served model this route configures traffic for.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "traffic_percentage": { "description": "The percentage of endpoint traffic to send to this route. It must be an integer between 0 and 100 inclusive.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -8857,58 +9782,72 @@ "properties": { "entity_name": { "description": "The name of the entity to be served. The entity may be a model in the Databricks Model Registry, a model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC object, the full name of the object should be given in the form of **catalog_name.schema_name.model_name**.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "entity_version": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "environment_vars": { "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`", - "$ref": "#/$defs/map/string" + "$ref": "#/$defs/map/string", + "x-since-version": "v0.228.1" }, "external_model": { "description": "The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled) can be specified with the latter set being used for custom model serving for a Databricks registered model. For an existing endpoint with external_model, it cannot be updated to an endpoint without external_model. If the endpoint is created without external_model, users cannot update it to add external_model later. The task type of all external models within an endpoint must be the same.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ExternalModel" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ExternalModel", + "x-since-version": "v0.228.1" }, "instance_profile_arn": { "description": "ARN of the instance profile that the served entity uses to access AWS resources.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "max_provisioned_concurrency": { "description": "The maximum provisioned concurrency that the endpoint can scale up to. Do not use if workload_size is specified.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.256.0" }, "max_provisioned_throughput": { "description": "The maximum tokens per second that the endpoint can scale up to.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" }, "min_provisioned_concurrency": { "description": "The minimum provisioned concurrency that the endpoint can scale down to. Do not use if workload_size is specified.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.256.0" }, "min_provisioned_throughput": { "description": "The minimum tokens per second that the endpoint can scale down to.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" }, "name": { "description": "The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "provisioned_model_units": { "description": "The number of model units provisioned.", - "$ref": "#/$defs/int64" + "$ref": "#/$defs/int64", + "x-since-version": "v0.252.0" }, "scale_to_zero_enabled": { "description": "Whether the compute resources for the served entity should scale down to zero.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" }, "workload_size": { "description": "The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency). Additional custom workload sizes can also be used when available in the workspace. If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0. Do not use if min_provisioned_concurrency and max_provisioned_concurrency are specified.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "workload_type": { "description": "The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is \"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types](https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ServingModelWorkloadType" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ServingModelWorkloadType", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -8926,53 +9865,66 @@ "properties": { "environment_vars": { "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`", - "$ref": "#/$defs/map/string" + "$ref": "#/$defs/map/string", + "x-since-version": "v0.228.1" }, "instance_profile_arn": { "description": "ARN of the instance profile that the served entity uses to access AWS resources.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "max_provisioned_concurrency": { "description": "The maximum provisioned concurrency that the endpoint can scale up to. Do not use if workload_size is specified.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.256.0" }, "max_provisioned_throughput": { "description": "The maximum tokens per second that the endpoint can scale up to.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" }, "min_provisioned_concurrency": { "description": "The minimum provisioned concurrency that the endpoint can scale down to. Do not use if workload_size is specified.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.256.0" }, "min_provisioned_throughput": { "description": "The minimum tokens per second that the endpoint can scale down to.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.228.1" }, "model_name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "model_version": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "name": { "description": "The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "provisioned_model_units": { "description": "The number of model units provisioned.", - "$ref": "#/$defs/int64" + "$ref": "#/$defs/int64", + "x-since-version": "v0.252.0" }, "scale_to_zero_enabled": { "description": "Whether the compute resources for the served entity should scale down to zero.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.228.1" }, "workload_size": { "description": "The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency). Additional custom workload sizes can also be used when available in the workspace. If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0. Do not use if min_provisioned_concurrency and max_provisioned_concurrency are specified.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.228.1" }, "workload_type": { "description": "The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is \"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types](https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ServedModelInputWorkloadType" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ServedModelInputWorkloadType", + "x-since-version": "v0.228.1" } }, "additionalProperties": false, @@ -9033,7 +9985,8 @@ "properties": { "routes": { "description": "The list of routes that define traffic to each served entity.", - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.Route" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.Route", + "x-since-version": "v0.228.1" } }, "additionalProperties": false @@ -9105,23 +10058,28 @@ "properties": { "comparison_operator": { "description": "Operator used for comparison in alert evaluation.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.ComparisonOperator" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.ComparisonOperator", + "x-since-version": "v0.279.0" }, "empty_result_state": { "description": "Alert state if result is empty. Please avoid setting this field to be `UNKNOWN` because `UNKNOWN` state is planned to be deprecated.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertEvaluationState" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertEvaluationState", + "x-since-version": "v0.279.0" }, "notification": { "description": "User or Notification Destination to notify when alert is triggered.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2Notification" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2Notification", + "x-since-version": "v0.279.0" }, "source": { "description": "Source column from result to use to evaluate alert", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2OperandColumn" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2OperandColumn", + "x-since-version": "v0.279.0" }, "threshold": { "description": "Threshold to user for alert evaluation, can be a column or a value.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2Operand" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2Operand", + "x-since-version": "v0.279.0" } }, "additionalProperties": false, @@ -9143,14 +10101,17 @@ "properties": { "notify_on_ok": { "description": "Whether to notify alert subscribers when alert returns back to normal.", - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.279.0" }, "retrigger_seconds": { "description": "Number of seconds an alert waits after being triggered before it is allowed to send another notification.\nIf set to 0 or omitted, the alert will not send any further notifications after the first trigger\nSetting this value to 1 allows the alert to send a notification on every evaluation where the condition is met, effectively making it always retrigger for notification purposes.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int", + "x-since-version": "v0.279.0" }, "subscriptions": { - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/sql.AlertV2Subscription" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/sql.AlertV2Subscription", + "x-since-version": "v0.279.0" } }, "additionalProperties": false @@ -9167,10 +10128,12 @@ "type": "object", "properties": { "column": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2OperandColumn" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2OperandColumn", + "x-since-version": "v0.279.0" }, "value": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2OperandValue" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2OperandValue", + "x-since-version": "v0.279.0" } }, "additionalProperties": false @@ -9187,13 +10150,16 @@ "type": "object", "properties": { "aggregation": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.Aggregation" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.Aggregation", + "x-since-version": "v0.279.0" }, "display": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.279.0" }, "name": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.279.0" } }, "additionalProperties": false, @@ -9213,13 +10179,16 @@ "type": "object", "properties": { "bool_value": { - "$ref": "#/$defs/bool" + "$ref": "#/$defs/bool", + "x-since-version": "v0.279.0" }, "double_value": { - "$ref": "#/$defs/float64" + "$ref": "#/$defs/float64", + "x-since-version": "v0.279.0" }, "string_value": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.279.0" } }, "additionalProperties": false @@ -9237,11 +10206,13 @@ "properties": { "service_principal_name": { "description": "Application ID of an active service principal. Setting this field requires the `servicePrincipal/user` role.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.279.0" }, "user_name": { "description": "The email of an active workspace user. Can only set this field to their own email.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.279.0" } }, "additionalProperties": false @@ -9258,10 +10229,12 @@ "type": "object", "properties": { "destination_id": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.279.0" }, "user_email": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.279.0" } }, "additionalProperties": false @@ -9279,10 +10252,12 @@ "description": "Configures the channel name and DBSQL version of the warehouse. CHANNEL_NAME_CUSTOM should be chosen only when `dbsql_version` is specified.", "properties": { "dbsql_version": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.260.0" }, "name": { - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.ChannelName" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.ChannelName", + "x-since-version": "v0.260.0" } }, "additionalProperties": false @@ -9354,15 +10329,18 @@ "properties": { "pause_status": { "description": "Indicate whether this schedule is paused or not.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.SchedulePauseStatus" + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.SchedulePauseStatus", + "x-since-version": "v0.279.0" }, "quartz_cron_schedule": { "description": "A cron expression using quartz syntax that specifies the schedule for this pipeline.\nShould use the quartz format described here: http://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/tutorial-lesson-06.html", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.279.0" }, "timezone_id": { "description": "A Java timezone id. The schedule will be resolved using this timezone.\nThis will be combined with the quartz_cron_schedule to determine the schedule.\nSee https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html for details.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.279.0" } }, "additionalProperties": false, @@ -9383,10 +10361,12 @@ "type": "object", "properties": { "key": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.260.0" }, "value": { - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.260.0" } }, "additionalProperties": false @@ -9403,7 +10383,8 @@ "type": "object", "properties": { "custom_tags": { - "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/sql.EndpointTagPair" + "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/sql.EndpointTagPair", + "x-since-version": "v0.260.0" } }, "additionalProperties": false @@ -9454,11 +10435,13 @@ "properties": { "dns_name": { "description": "The DNS of the KeyVault", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.252.0" }, "resource_id": { "description": "The resource id of the azure KeyVault that user wants to associate the scope with.", - "$ref": "#/$defs/string" + "$ref": "#/$defs/string", + "x-since-version": "v0.252.0" } }, "additionalProperties": false, @@ -10691,71 +11674,86 @@ "artifacts": { "description": "Defines the attributes to build an artifact", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Artifact", - "markdownDescription": "Defines the attributes to build artifacts, where each key is the name of the artifact, and the value is a Map that defines the artifact build settings. For information about the `artifacts` mapping, see [artifacts](https://docs.databricks.com/dev-tools/bundles/settings.html#artifacts).\n\nArtifact settings defined in the top level of the bundle configuration can be overridden in the `targets` mapping. See [link](https://docs.databricks.com/dev-tools/bundles/artifact-overrides.html)." + "markdownDescription": "Defines the attributes to build artifacts, where each key is the name of the artifact, and the value is a Map that defines the artifact build settings. For information about the `artifacts` mapping, see [artifacts](https://docs.databricks.com/dev-tools/bundles/settings.html#artifacts).\n\nArtifact settings defined in the top level of the bundle configuration can be overridden in the `targets` mapping. See [link](https://docs.databricks.com/dev-tools/bundles/artifact-overrides.html).", + "x-since-version": "v0.228.1" }, "bundle": { "description": "The bundle attributes when deploying to this target.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Bundle", - "markdownDescription": "The bundle attributes when deploying to this target," + "markdownDescription": "The bundle attributes when deploying to this target,", + "x-since-version": "v0.228.1" }, "environments": { "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Target", "deprecationMessage": "Deprecated: please use targets instead", + "x-since-version": "v0.243.0", "deprecated": true }, "experimental": { "description": "Defines attributes for experimental features.", - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Experimental" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Experimental", + "x-since-version": "v0.228.1" }, "include": { "description": "Specifies a list of path globs that contain configuration files to include within the bundle.", "$ref": "#/$defs/slice/string", - "markdownDescription": "Specifies a list of path globs that contain configuration files to include within the bundle. See [include](https://docs.databricks.com/dev-tools/bundles/settings.html#include)." + "markdownDescription": "Specifies a list of path globs that contain configuration files to include within the bundle. See [include](https://docs.databricks.com/dev-tools/bundles/settings.html#include).", + "x-since-version": "v0.228.1" }, "permissions": { "description": "Defines a permission for a specific entity.", "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission", - "markdownDescription": "A Sequence that defines the permissions to apply to experiments, jobs, pipelines, and models defined in the bundle, where each item in the sequence is a permission for a specific entity.\n\nSee [permissions](https://docs.databricks.com/dev-tools/bundles/settings.html#permissions) and [link](https://docs.databricks.com/dev-tools/bundles/permissions.html)." + "markdownDescription": "A Sequence that defines the permissions to apply to experiments, jobs, pipelines, and models defined in the bundle, where each item in the sequence is a permission for a specific entity.\n\nSee [permissions](https://docs.databricks.com/dev-tools/bundles/settings.html#permissions) and [link](https://docs.databricks.com/dev-tools/bundles/permissions.html).", + "x-since-version": "v0.228.1" }, "presets": { "description": "Defines bundle deployment presets.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Presets", - "markdownDescription": "Defines bundle deployment presets. See [presets](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html#presets)." + "markdownDescription": "Defines bundle deployment presets. See [presets](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html#presets).", + "x-since-version": "v0.228.1" }, "python": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Python" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Python", + "x-since-version": "v0.275.0" }, "resources": { "description": "A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Resources", - "markdownDescription": "A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource. For more information about Databricks Asset Bundles supported resources, and resource definition reference, see [link](https://docs.databricks.com/dev-tools/bundles/resources.html).\n\n```yaml\nresources:\n \u003cresource-type\u003e:\n \u003cresource-name\u003e:\n \u003cresource-field-name\u003e: \u003cresource-field-value\u003e\n```" + "markdownDescription": "A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource. For more information about Databricks Asset Bundles supported resources, and resource definition reference, see [link](https://docs.databricks.com/dev-tools/bundles/resources.html).\n\n```yaml\nresources:\n \u003cresource-type\u003e:\n \u003cresource-name\u003e:\n \u003cresource-field-name\u003e: \u003cresource-field-value\u003e\n```", + "x-since-version": "v0.228.1" }, "run_as": { "description": "The identity to use when running Databricks Asset Bundles workflows.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs", - "markdownDescription": "The identity to use when running Databricks Asset Bundles workflows. See [link](https://docs.databricks.com/dev-tools/bundles/run-as.html)." + "markdownDescription": "The identity to use when running Databricks Asset Bundles workflows. See [link](https://docs.databricks.com/dev-tools/bundles/run-as.html).", + "x-since-version": "v0.228.1" }, "scripts": { - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Script" + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Script", + "x-since-version": "v0.259.0" }, "sync": { "description": "The files and file paths to include or exclude in the bundle.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Sync", - "markdownDescription": "The files and file paths to include or exclude in the bundle. See [sync](https://docs.databricks.com/dev-tools/bundles/settings.html#sync)." + "markdownDescription": "The files and file paths to include or exclude in the bundle. See [sync](https://docs.databricks.com/dev-tools/bundles/settings.html#sync).", + "x-since-version": "v0.228.1" }, "targets": { "description": "Defines deployment targets for the bundle.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Target", - "markdownDescription": "Defines deployment targets for the bundle. See [targets](https://docs.databricks.com/dev-tools/bundles/settings.html#targets)" + "markdownDescription": "Defines deployment targets for the bundle. See [targets](https://docs.databricks.com/dev-tools/bundles/settings.html#targets)", + "x-since-version": "v0.228.1" }, "variables": { "description": "A Map that defines the custom variables for the bundle, where each key is the name of the variable, and the value is a Map that defines the variable.", - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/variable.Variable" + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/variable.Variable", + "x-since-version": "v0.228.1" }, "workspace": { "description": "Defines the Databricks workspace for the bundle.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Workspace", - "markdownDescription": "Defines the Databricks workspace for the bundle. See [workspace](https://docs.databricks.com/dev-tools/bundles/settings.html#workspace)." + "markdownDescription": "Defines the Databricks workspace for the bundle. See [workspace](https://docs.databricks.com/dev-tools/bundles/settings.html#workspace).", + "x-since-version": "v0.228.1" } }, "additionalProperties": {} diff --git a/libs/jsonschema/extension.go b/libs/jsonschema/extension.go index 55ee4748d3..b5d122b6d8 100644 --- a/libs/jsonschema/extension.go +++ b/libs/jsonschema/extension.go @@ -65,4 +65,7 @@ type Extension struct { // FieldBehaviors defines field behaviors from the OpenAPI spec (e.g. OUTPUT_ONLY) // This is used to filter out fields that should not be exposed to users FieldBehaviors []string `json:"x-databricks-field-behaviors,omitempty"` + + // SinceVersion indicates which CLI version introduced this field. + SinceVersion string `json:"x-since-version,omitempty"` } From 96a74f592e0a6fd8288fe04d89f3e53524109d99 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Wed, 14 Jan 2026 02:38:47 +0100 Subject: [PATCH 2/8] to revert - manual test --- bundle/schema/jsonschema.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index d5d29d5c9b..96de006433 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -11700,6 +11700,9 @@ "markdownDescription": "Specifies a list of path globs that contain configuration files to include within the bundle. See [include](https://docs.databricks.com/dev-tools/bundles/settings.html#include).", "x-since-version": "v0.228.1" }, + "new_test_field": { + "$ref": "#/$defs/string" + }, "permissions": { "description": "Defines a permission for a specific entity.", "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission", From ce12a3864a06e31894d982a9019a3d84e79c747a Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Wed, 14 Jan 2026 02:39:43 +0100 Subject: [PATCH 3/8] Revert "to revert - manual test" This reverts commit 96a74f592e0a6fd8288fe04d89f3e53524109d99. --- bundle/schema/jsonschema.json | 3 --- 1 file changed, 3 deletions(-) diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index 96de006433..d5d29d5c9b 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -11700,9 +11700,6 @@ "markdownDescription": "Specifies a list of path globs that contain configuration files to include within the bundle. See [include](https://docs.databricks.com/dev-tools/bundles/settings.html#include).", "x-since-version": "v0.228.1" }, - "new_test_field": { - "$ref": "#/$defs/string" - }, "permissions": { "description": "Defines a permission for a specific entity.", "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission", From 5866c2977d59323781351158bc4f3d972ca2127c Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Wed, 14 Jan 2026 02:43:04 +0100 Subject: [PATCH 4/8] merge --- bundle/schema/jsonschema.json | 1996 ++++++++++++++++----------------- libs/jsonschema/extension.go | 2 +- 2 files changed, 999 insertions(+), 999 deletions(-) diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index d5d29d5c9b..842ffb03db 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -66,15 +66,15 @@ "properties": { "custom_description": { "$ref": "#/$defs/string", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "custom_summary": { "$ref": "#/$defs/string", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "display_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "evaluation": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2Evaluation" @@ -84,23 +84,23 @@ }, "parent_path": { "$ref": "#/$defs/string", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.AlertPermission" }, "query_text": { "$ref": "#/$defs/string", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "run_as": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2RunAs", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "run_as_user_name": { "$ref": "#/$defs/string", "deprecationMessage": "This field is deprecated", - "x-since-version": "v0.279.0", + "since_version": "v0.279.0", "deprecated": true }, "schedule": { @@ -108,7 +108,7 @@ }, "warehouse_id": { "$ref": "#/$defs/string", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" } }, "additionalProperties": false, @@ -133,19 +133,19 @@ "properties": { "group_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "level": { "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.AlertPermissionLevel", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "service_principal_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "user_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" } }, "additionalProperties": false, @@ -190,7 +190,7 @@ "description": { "description": "The description of the app.", "$ref": "#/$defs/string", - "x-since-version": "v0.239.0" + "since_version": "v0.239.0" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", @@ -199,7 +199,7 @@ "name": { "description": "The name of the app. The name must contain only lowercase alphanumeric characters and hyphens.\nIt must be unique within the workspace.", "$ref": "#/$defs/string", - "x-since-version": "v0.239.0" + "since_version": "v0.239.0" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.AppPermission" @@ -207,7 +207,7 @@ "resources": { "description": "Resources for the app.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/apps.AppResource", - "x-since-version": "v0.239.0" + "since_version": "v0.239.0" }, "source_code_path": { "$ref": "#/$defs/string" @@ -235,19 +235,19 @@ "properties": { "group_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" }, "level": { "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.AppPermissionLevel", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" }, "service_principal_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" }, "user_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" } }, "additionalProperties": false, @@ -285,42 +285,42 @@ "apply_policy_default_values": { "description": "When set to true, fixed and default values from the policy will be used for fields that are omitted. When set to false, only fixed values from the policy will be applied.", "$ref": "#/$defs/bool", - "x-since-version": "v0.229.0" + "since_version": "v0.229.0" }, "autoscale": { "description": "Parameters needed in order to automatically scale clusters up and down based on load.\nNote: autoscaling works best with DB runtime versions 3.0 or later.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AutoScale", - "x-since-version": "v0.229.0" + "since_version": "v0.229.0" }, "autotermination_minutes": { "description": "Automatically terminates the cluster after it is inactive for this time in minutes. If not set,\nthis cluster will not be automatically terminated. If specified, the threshold must be between\n10 and 10000 minutes.\nUsers can also set this value to 0 to explicitly disable automatic termination.", "$ref": "#/$defs/int", - "x-since-version": "v0.229.0" + "since_version": "v0.229.0" }, "aws_attributes": { "description": "Attributes related to clusters running on Amazon Web Services.\nIf not specified at cluster creation, a set of default values will be used.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AwsAttributes", - "x-since-version": "v0.229.0" + "since_version": "v0.229.0" }, "azure_attributes": { "description": "Attributes related to clusters running on Microsoft Azure.\nIf not specified at cluster creation, a set of default values will be used.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AzureAttributes", - "x-since-version": "v0.229.0" + "since_version": "v0.229.0" }, "cluster_log_conf": { "description": "The configuration for delivering spark logs to a long-term storage destination.\nThree kinds of destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be specified\nfor one cluster. If the conf is given, the logs will be delivered to the destination every\n`5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while\nthe destination of executor logs is `$destination/$clusterId/executor`.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClusterLogConf", - "x-since-version": "v0.229.0" + "since_version": "v0.229.0" }, "cluster_name": { "description": "Cluster name requested by the user. This doesn't have to be unique.\nIf not specified at creation, the cluster name will be an empty string.\nFor job clusters, the cluster name is automatically set based on the job and job run IDs.", "$ref": "#/$defs/string", - "x-since-version": "v0.229.0" + "since_version": "v0.229.0" }, "custom_tags": { "description": "Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS\ninstances and EBS volumes) with these tags in addition to `default_tags`. Notes:\n\n- Currently, Databricks allows at most 45 custom tags\n\n- Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags", "$ref": "#/$defs/map/string", - "x-since-version": "v0.229.0" + "since_version": "v0.229.0" }, "data_security_mode": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DataSecurityMode" @@ -331,42 +331,42 @@ "driver_instance_pool_id": { "description": "The optional ID of the instance pool for the driver of the cluster belongs.\nThe pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not\nassigned.", "$ref": "#/$defs/string", - "x-since-version": "v0.229.0" + "since_version": "v0.229.0" }, "driver_node_type_id": { "description": "The node type of the Spark driver.\nNote that this field is optional; if unset, the driver node type will be set as the same value\nas `node_type_id` defined above.\n\nThis field, along with node_type_id, should not be set if virtual_cluster_size is set.\nIf both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id and node_type_id take precedence.", "$ref": "#/$defs/string", - "x-since-version": "v0.229.0" + "since_version": "v0.229.0" }, "enable_elastic_disk": { "description": "Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk\nspace when its Spark workers are running low on disk space.", "$ref": "#/$defs/bool", - "x-since-version": "v0.229.0" + "since_version": "v0.229.0" }, "enable_local_disk_encryption": { "description": "Whether to enable LUKS on cluster VMs' local disks", "$ref": "#/$defs/bool", - "x-since-version": "v0.229.0" + "since_version": "v0.229.0" }, "gcp_attributes": { "description": "Attributes related to clusters running on Google Cloud Platform.\nIf not specified at cluster creation, a set of default values will be used.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.GcpAttributes", - "x-since-version": "v0.229.0" + "since_version": "v0.229.0" }, "init_scripts": { "description": "The configuration for storing init scripts. Any number of destinations can be specified.\nThe scripts are executed sequentially in the order provided.\nIf `cluster_log_conf` is specified, init script logs are sent to `\u003cdestination\u003e/\u003ccluster-ID\u003e/init_scripts`.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/compute.InitScriptInfo", - "x-since-version": "v0.229.0" + "since_version": "v0.229.0" }, "instance_pool_id": { "description": "The optional ID of the instance pool to which the cluster belongs.", "$ref": "#/$defs/string", - "x-since-version": "v0.229.0" + "since_version": "v0.229.0" }, "is_single_node": { "description": "This field can only be used when `kind = CLASSIC_PREVIEW`.\n\nWhen set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers`", "$ref": "#/$defs/bool", - "x-since-version": "v0.237.0" + "since_version": "v0.237.0" }, "kind": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Kind" @@ -378,12 +378,12 @@ "node_type_id": { "description": "This field encodes, through a single value, the resources available to each of\nthe Spark nodes in this cluster. For example, the Spark nodes can be provisioned\nand optimized for memory or compute intensive workloads. A list of available node\ntypes can be retrieved by using the :method:clusters/listNodeTypes API call.", "$ref": "#/$defs/string", - "x-since-version": "v0.229.0" + "since_version": "v0.229.0" }, "num_workers": { "description": "Number of worker nodes that this cluster should have. A cluster has one Spark Driver\nand `num_workers` Executors for a total of `num_workers` + 1 Spark nodes.\n\nNote: When reading the properties of a cluster, this field reflects the desired number\nof workers rather than the actual current number of workers. For instance, if a cluster\nis resized from 5 to 10 workers, this field will immediately be updated to reflect\nthe target size of 10 workers, whereas the workers listed in `spark_info` will gradually\nincrease from 5 to 10 as the new nodes are provisioned.", "$ref": "#/$defs/int", - "x-since-version": "v0.229.0" + "since_version": "v0.229.0" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.ClusterPermission" @@ -391,12 +391,12 @@ "policy_id": { "description": "The ID of the cluster policy used to create the cluster if applicable.", "$ref": "#/$defs/string", - "x-since-version": "v0.229.0" + "since_version": "v0.229.0" }, "remote_disk_throughput": { "description": "If set, what the configurable throughput (in Mb/s) for the remote disk is. Currently only supported for GCP HYPERDISK_BALANCED disks.", "$ref": "#/$defs/int", - "x-since-version": "v0.257.0" + "since_version": "v0.257.0" }, "runtime_engine": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.RuntimeEngine" @@ -404,37 +404,37 @@ "single_user_name": { "description": "Single user name if data_security_mode is `SINGLE_USER`", "$ref": "#/$defs/string", - "x-since-version": "v0.229.0" + "since_version": "v0.229.0" }, "spark_conf": { "description": "An object containing a set of optional, user-specified Spark configuration key-value pairs.\nUsers can also pass in a string of extra JVM options to the driver and the executors via\n`spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.", "$ref": "#/$defs/map/string", - "x-since-version": "v0.229.0" + "since_version": "v0.229.0" }, "spark_env_vars": { "description": "An object containing a set of optional, user-specified environment variable key-value pairs.\nPlease note that key-value pair of the form (X,Y) will be exported as is (i.e.,\n`export X='Y'`) while launching the driver and workers.\n\nIn order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending\nthem to `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all\ndefault databricks managed environmental variables are included as well.\n\nExample Spark environment variables:\n`{\"SPARK_WORKER_MEMORY\": \"28000m\", \"SPARK_LOCAL_DIRS\": \"/local_disk0\"}` or\n`{\"SPARK_DAEMON_JAVA_OPTS\": \"$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true\"}`", "$ref": "#/$defs/map/string", - "x-since-version": "v0.229.0" + "since_version": "v0.229.0" }, "spark_version": { "description": "The Spark version of the cluster, e.g. `3.3.x-scala2.11`.\nA list of available Spark versions can be retrieved by using\nthe :method:clusters/sparkVersions API call.", "$ref": "#/$defs/string", - "x-since-version": "v0.229.0" + "since_version": "v0.229.0" }, "ssh_public_keys": { "description": "SSH public key contents that will be added to each Spark node in this cluster. The\ncorresponding private keys can be used to login with the user name `ubuntu` on port `2200`.\nUp to 10 keys can be specified.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.229.0" + "since_version": "v0.229.0" }, "total_initial_remote_disk_size": { "description": "If set, what the total initial volume size (in GB) of the remote disks should be. Currently only supported for GCP HYPERDISK_BALANCED disks.", "$ref": "#/$defs/int", - "x-since-version": "v0.257.0" + "since_version": "v0.257.0" }, "use_ml_runtime": { "description": "This field can only be used when `kind = CLASSIC_PREVIEW`.\n\n`effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not.", "$ref": "#/$defs/bool", - "x-since-version": "v0.237.0" + "since_version": "v0.237.0" }, "workload_type": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.WorkloadType" @@ -456,19 +456,19 @@ "properties": { "group_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" }, "level": { "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.ClusterPermissionLevel", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" }, "service_principal_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" }, "user_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" } }, "additionalProperties": false, @@ -514,12 +514,12 @@ "dataset_catalog": { "description": "Sets the default catalog for all datasets in this dashboard. When set, this overrides the catalog specified in individual dataset definitions.", "$ref": "#/$defs/string", - "x-since-version": "v0.281.0" + "since_version": "v0.281.0" }, "dataset_schema": { "description": "Sets the default schema for all datasets in this dashboard. When set, this overrides the schema specified in individual dataset definitions.", "$ref": "#/$defs/string", - "x-since-version": "v0.281.0" + "since_version": "v0.281.0" }, "display_name": { "description": "The display name of the dashboard.", @@ -583,19 +583,19 @@ "properties": { "group_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" }, "level": { "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.DashboardPermissionLevel", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" }, "service_principal_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" }, "user_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" } }, "additionalProperties": false, @@ -637,12 +637,12 @@ "database_instance_name": { "description": "The name of the DatabaseInstance housing the database.", "$ref": "#/$defs/string", - "x-since-version": "v0.265.0" + "since_version": "v0.265.0" }, "database_name": { "description": "The name of the database (in a instance) associated with the catalog.", "$ref": "#/$defs/string", - "x-since-version": "v0.265.0" + "since_version": "v0.265.0" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", @@ -651,7 +651,7 @@ "name": { "description": "The name of the catalog in UC.", "$ref": "#/$defs/string", - "x-since-version": "v0.265.0" + "since_version": "v0.265.0" } }, "additionalProperties": false, @@ -676,22 +676,22 @@ "capacity": { "description": "The sku of the instance. Valid values are \"CU_1\", \"CU_2\", \"CU_4\", \"CU_8\".", "$ref": "#/$defs/string", - "x-since-version": "v0.265.0" + "since_version": "v0.265.0" }, "custom_tags": { "description": "Custom tags associated with the instance. This field is only included on create and update responses.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/database.CustomTag", - "x-since-version": "v0.273.0" + "since_version": "v0.273.0" }, "enable_pg_native_login": { "description": "Whether to enable PG native password login on the instance. Defaults to false.", "$ref": "#/$defs/bool", - "x-since-version": "v0.267.0" + "since_version": "v0.267.0" }, "enable_readable_secondaries": { "description": "Whether to enable secondaries to serve read-only traffic. Defaults to false.", "$ref": "#/$defs/bool", - "x-since-version": "v0.265.0" + "since_version": "v0.265.0" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", @@ -700,17 +700,17 @@ "name": { "description": "The name of the instance. This is the unique identifier for the instance.", "$ref": "#/$defs/string", - "x-since-version": "v0.265.0" + "since_version": "v0.265.0" }, "node_count": { "description": "The number of nodes in the instance, composed of 1 primary and 0 or more secondaries. Defaults to\n1 primary and 0 secondaries. This field is input only, see effective_node_count for the output.", "$ref": "#/$defs/int", - "x-since-version": "v0.265.0" + "since_version": "v0.265.0" }, "parent_instance_ref": { "description": "The ref of the parent instance. This is only available if the instance is\nchild instance.\nInput: For specifying the parent instance to create a child instance. Optional.\nOutput: Only populated if provided as input to create a child instance.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.DatabaseInstanceRef", - "x-since-version": "v0.265.0" + "since_version": "v0.265.0" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.DatabaseInstancePermission" @@ -718,17 +718,17 @@ "retention_window_in_days": { "description": "The retention window for the instance. This is the time window in days\nfor which the historical data is retained. The default value is 7 days.\nValid values are 2 to 35 days.", "$ref": "#/$defs/int", - "x-since-version": "v0.265.0" + "since_version": "v0.265.0" }, "stopped": { "description": "Whether to stop the instance. An input only param, see effective_stopped for the output.", "$ref": "#/$defs/bool", - "x-since-version": "v0.265.0" + "since_version": "v0.265.0" }, "usage_policy_id": { "description": "The desired usage policy to associate with the instance.", "$ref": "#/$defs/string", - "x-since-version": "v0.273.0" + "since_version": "v0.273.0" } }, "additionalProperties": false, @@ -749,19 +749,19 @@ "properties": { "group_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.265.0" + "since_version": "v0.265.0" }, "level": { "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.DatabaseInstancePermissionLevel", - "x-since-version": "v0.265.0" + "since_version": "v0.265.0" }, "service_principal_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.265.0" + "since_version": "v0.265.0" }, "user_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.265.0" + "since_version": "v0.265.0" } }, "additionalProperties": false, @@ -799,12 +799,12 @@ "principal": { "description": "The name of the principal that will be granted privileges", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "privileges": { "description": "The privileges to grant to the specified entity", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -827,32 +827,32 @@ "budget_policy_id": { "description": "The id of the user specified budget policy to use for this job.\nIf not specified, a default budget policy may be applied when creating or modifying the job.\nSee `effective_budget_policy_id` for the budget policy used by this workload.", "$ref": "#/$defs/string", - "x-since-version": "v0.231.0" + "since_version": "v0.231.0" }, "continuous": { "description": "An optional continuous property for this job. The continuous property will ensure that there is always one run executing. Only one of `schedule` and `continuous` can be used.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Continuous", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "description": { "description": "An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "email_notifications": { "description": "An optional set of email addresses that is notified when runs of this job begin or complete as well as when this job is deleted.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobEmailNotifications", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "environments": { "description": "A list of task execution environment specifications that can be referenced by serverless tasks of this job.\nFor serverless notebook tasks, if the environment_key is not specified, the notebook environment will be used if present. If a jobs environment is specified, it will override the notebook environment.\nFor other serverless tasks, the task environment is required to be specified using environment_key in the task settings.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.JobEnvironment", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "git_source": { "description": "An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks.\n\nIf `git_source` is set, these tasks retrieve the file from the remote repository by default. However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task.\n\nNote: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.GitSource", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "health": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRules" @@ -860,7 +860,7 @@ "job_clusters": { "description": "A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.JobCluster", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", @@ -869,27 +869,27 @@ "max_concurrent_runs": { "description": "An optional maximum allowed number of concurrent runs of the job.\nSet this value if you want to be able to execute multiple runs of the same job concurrently.\nThis is useful for example if you trigger your job on a frequent schedule and want to allow consecutive runs to overlap with each other, or if you want to trigger multiple runs which differ by their input parameters.\nThis setting affects only new runs. For example, suppose the job’s concurrency is 4 and there are 4 concurrent active runs. Then setting the concurrency to 3 won’t kill any of the active runs.\nHowever, from then on, new runs are skipped unless there are fewer than 3 active runs.\nThis value cannot exceed 1000. Setting this value to `0` causes all new runs to be skipped.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "name": { "description": "An optional name for the job. The maximum length is 4096 bytes in UTF-8 encoding.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "notification_settings": { "description": "Optional notification settings that are used when sending notifications to each of the `email_notifications` and `webhook_notifications` for this job.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobNotificationSettings", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "parameters": { "description": "Job-level parameter definitions", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.JobParameterDefinition", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "performance_target": { "description": "The performance mode on a serverless job. This field determines the level of compute performance or cost-efficiency for the run.\nThe performance target does not apply to tasks that run on Serverless GPU compute.\n\n* `STANDARD`: Enables cost-efficient execution of serverless workloads.\n* `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PerformanceTarget", - "x-since-version": "v0.241.0" + "since_version": "v0.241.0" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.JobPermission" @@ -897,7 +897,7 @@ "queue": { "description": "The queue settings of the job.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.QueueSettings", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "run_as": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs" @@ -905,39 +905,39 @@ "schedule": { "description": "An optional periodic schedule for this job. The default behavior is that the job only runs when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.CronSchedule", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "tags": { "description": "A map of tags associated with the job. These are forwarded to the cluster as cluster tags for jobs clusters, and are subject to the same limitations as cluster tags. A maximum of 25 tags can be added to the job.", "$ref": "#/$defs/map/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "tasks": { "description": "A list of task specifications to be executed by this job.\nIt supports up to 1000 elements in write endpoints (:method:jobs/create, :method:jobs/reset, :method:jobs/update, :method:jobs/submit).\nRead endpoints return only 100 tasks. If more than 100 tasks are available, you can paginate through them using :method:jobs/get. Use the `next_page_token` field at the object root to determine if more results are available.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Task", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "timeout_seconds": { "description": "An optional timeout applied to each run of this job. A value of `0` means no timeout.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "trigger": { "description": "A configuration to trigger a run when certain conditions are met. The default behavior is that the job runs only when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TriggerSettings", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "usage_policy_id": { "description": "The id of the user specified usage policy to use for this job.\nIf not specified, a default usage policy may be applied when creating or modifying the job.\nSee `effective_usage_policy_id` for the usage policy used by this workload.", "$ref": "#/$defs/string", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "x-since-version": "v0.265.0" + "since_version": "v0.265.0" }, "webhook_notifications": { "description": "A collection of system notification IDs to notify when runs of this job begin or complete.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.WebhookNotifications", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -956,19 +956,19 @@ "properties": { "group_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" }, "level": { "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.JobPermissionLevel", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" }, "service_principal_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" }, "user_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" } }, "additionalProperties": false, @@ -1007,7 +1007,7 @@ "prevent_destroy": { "description": "Lifecycle setting to prevent the resource from being destroyed.", "$ref": "#/$defs/bool", - "x-since-version": "v0.268.0" + "since_version": "v0.268.0" } }, "additionalProperties": false @@ -1026,7 +1026,7 @@ "artifact_location": { "description": "Location where all artifacts for the experiment are stored.\nIf not provided, the remote server will select an appropriate default.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", @@ -1035,7 +1035,7 @@ "name": { "description": "Experiment name.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.MlflowExperimentPermission" @@ -1043,7 +1043,7 @@ "tags": { "description": "A collection of tags to set on the experiment. Maximum tag size and number of tags per request\ndepends on the storage backend. All storage backends are guaranteed to support tag keys up\nto 250 bytes in size and tag values up to 5000 bytes in size. All storage backends are also\nguaranteed to support up to 20 tags per request.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/ml.ExperimentTag", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -1065,19 +1065,19 @@ "properties": { "group_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" }, "level": { "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.MlflowExperimentPermissionLevel", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" }, "service_principal_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" }, "user_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" } }, "additionalProperties": false, @@ -1115,7 +1115,7 @@ "description": { "description": "Optional description for registered model.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", @@ -1124,7 +1124,7 @@ "name": { "description": "Register models under this name", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.MlflowModelPermission" @@ -1132,7 +1132,7 @@ "tags": { "description": "Additional metadata for registered model.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/ml.ModelTag", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -1154,19 +1154,19 @@ "properties": { "group_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" }, "level": { "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.MlflowModelPermissionLevel", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" }, "service_principal_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" }, "user_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" } }, "additionalProperties": false, @@ -1206,17 +1206,17 @@ "ai_gateway": { "description": "The AI Gateway configuration for the serving endpoint. NOTE: External model, provisioned throughput, and pay-per-token endpoints are fully supported; agent endpoints currently only support inference tables.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayConfig", - "x-since-version": "v0.230.0" + "since_version": "v0.230.0" }, "budget_policy_id": { "description": "The budget policy to be applied to the serving endpoint.", "$ref": "#/$defs/string", - "x-since-version": "v0.244.0" + "since_version": "v0.244.0" }, "config": { "description": "The core config of the serving endpoint.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.EndpointCoreConfigInput", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "description": { "$ref": "#/$defs/string" @@ -1224,7 +1224,7 @@ "email_notifications": { "description": "Email notification settings.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.EmailNotifications", - "x-since-version": "v0.264.0" + "since_version": "v0.264.0" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", @@ -1233,7 +1233,7 @@ "name": { "description": "The name of the serving endpoint. This field is required and must be unique across a Databricks workspace.\nAn endpoint name can consist of alphanumeric characters, dashes, and underscores.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.ModelServingEndpointPermission" @@ -1242,18 +1242,18 @@ "description": "Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI Gateway to manage rate limits.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.RateLimit", "deprecationMessage": "This field is deprecated", - "x-since-version": "v0.228.1", + "since_version": "v0.228.1", "deprecated": true }, "route_optimized": { "description": "Enable route optimization for the serving endpoint.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "tags": { "description": "Tags to be attached to the serving endpoint and automatically propagated to billing logs.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.EndpointTag", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -1275,19 +1275,19 @@ "properties": { "group_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" }, "level": { "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.ModelServingEndpointPermissionLevel", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" }, "service_principal_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" }, "user_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" } }, "additionalProperties": false, @@ -1325,22 +1325,22 @@ "group_name": { "description": "The name of the group that has the permission set in level.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "level": { "description": "The allowed permission for user, group, service principal defined for this permission.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "service_principal_name": { "description": "The name of the service principal that has the permission set in level.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "user_name": { "description": "The name of the user that has the permission set in level.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -1362,84 +1362,84 @@ "allow_duplicate_names": { "description": "If false, deployment will fail if name conflicts with that of another pipeline.", "$ref": "#/$defs/bool", - "x-since-version": "v0.261.0" + "since_version": "v0.261.0" }, "budget_policy_id": { "description": "Budget policy of this pipeline.", "$ref": "#/$defs/string", - "x-since-version": "v0.230.0" + "since_version": "v0.230.0" }, "catalog": { "description": "A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, tables in this pipeline are published to a `target` schema inside `catalog` (for example, `catalog`.`target`.`table`). If `target` is not specified, no data is published to Unity Catalog.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "channel": { "description": "DLT Release Channel that specifies which version to use.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "clusters": { "description": "Cluster settings for this pipeline deployment.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineCluster", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "configuration": { "description": "String-String configuration for this pipeline execution.", "$ref": "#/$defs/map/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "continuous": { "description": "Whether the pipeline is continuous or triggered. This replaces `trigger`.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "development": { "description": "Whether the pipeline is in Development mode. Defaults to false.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "edition": { "description": "Pipeline product edition.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "environment": { "description": "Environment specification for this pipeline used to install dependencies.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PipelinesEnvironment", - "x-since-version": "v0.257.0" + "since_version": "v0.257.0" }, "event_log": { "description": "Event log configuration for this pipeline", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.EventLogSpec", - "x-since-version": "v0.246.0" + "since_version": "v0.246.0" }, "filters": { "description": "Filters on which Pipeline packages to include in the deployed graph.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.Filters", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "gateway_definition": { "description": "The definition of a gateway pipeline to support change data capture.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionGatewayPipelineDefinition", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "id": { "description": "Unique identifier for this pipeline.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "ingestion_definition": { "description": "The configuration for a managed ingestion pipeline. These settings cannot be used with the 'libraries', 'schema', 'target', or 'catalog' settings.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinition", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "libraries": { "description": "Libraries or code needed by this deployment.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineLibrary", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", @@ -1448,12 +1448,12 @@ "name": { "description": "Friendly identifier for this pipeline.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "notifications": { "description": "List of notification settings for this pipeline.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.Notifications", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.PipelinePermission" @@ -1461,19 +1461,19 @@ "photon": { "description": "Whether Photon is enabled for this pipeline.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "restart_window": { "description": "Restart window of this pipeline.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.RestartWindow", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "x-since-version": "v0.234.0" + "since_version": "v0.234.0" }, "root_path": { "description": "Root path for this pipeline.\nThis is used as the root directory when editing the pipeline in the Databricks user interface and it is\nadded to sys.path when executing Python sources during pipeline execution.", "$ref": "#/$defs/string", - "x-since-version": "v0.253.0" + "since_version": "v0.253.0" }, "run_as": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.RunAs" @@ -1481,28 +1481,28 @@ "schema": { "description": "The default schema (database) where tables are read from or published to.", "$ref": "#/$defs/string", - "x-since-version": "v0.230.0" + "since_version": "v0.230.0" }, "serverless": { "description": "Whether serverless compute is enabled for this pipeline.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "storage": { "description": "DBFS root directory for storing checkpoints and tables.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "tags": { "description": "A map of tags associated with the pipeline.\nThese are forwarded to the cluster as cluster tags, and are therefore subject to the same limitations.\nA maximum of 25 tags can be added to the pipeline.", "$ref": "#/$defs/map/string", - "x-since-version": "v0.256.0" + "since_version": "v0.256.0" }, "target": { "description": "Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field.", "$ref": "#/$defs/string", "deprecationMessage": "This field is deprecated", - "x-since-version": "v0.228.1", + "since_version": "v0.228.1", "deprecated": true }, "trigger": { @@ -1516,7 +1516,7 @@ "$ref": "#/$defs/string", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "x-since-version": "v0.276.0" + "since_version": "v0.276.0" } }, "additionalProperties": false, @@ -1535,19 +1535,19 @@ "properties": { "group_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" }, "level": { "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.PipelinePermissionLevel", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" }, "service_principal_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" }, "user_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.247.0" + "since_version": "v0.247.0" } }, "additionalProperties": false, @@ -1586,24 +1586,24 @@ "assets_dir": { "description": "[Create:REQ Update:IGN] Field for specifying the absolute path to a custom directory to store data-monitoring\nassets. Normally prepopulated to a default user location via UI and Python APIs.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "baseline_table_name": { "description": "[Create:OPT Update:OPT] Baseline table name.\nBaseline data is used to compute drift from the data in the monitored `table_name`.\nThe baseline table and the monitored table shall have the same schema.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "custom_metrics": { "description": "[Create:OPT Update:OPT] Custom metrics.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/catalog.MonitorMetric", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "data_classification_config": { "description": "[Create:OPT Update:OPT] Data classification related config.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorDataClassificationConfig", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "inference_log": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorInferenceLog" @@ -1611,7 +1611,7 @@ "latest_monitor_failure_msg": { "description": "[Create:ERR Update:IGN] The latest error message for a monitor failure.", "$ref": "#/$defs/string", - "x-since-version": "v0.264.0" + "since_version": "v0.264.0" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", @@ -1620,32 +1620,32 @@ "notifications": { "description": "[Create:OPT Update:OPT] Field for specifying notification settings.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorNotifications", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "output_schema_name": { "description": "[Create:REQ Update:REQ] Schema where output tables are created. Needs to be in 2-level format {catalog}.{schema}", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "schedule": { "description": "[Create:OPT Update:OPT] The monitor schedule.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorCronSchedule", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "skip_builtin_dashboard": { "description": "Whether to skip creating a default dashboard summarizing data quality metrics.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "slicing_exprs": { "description": "[Create:OPT Update:OPT] List of column expressions to slice data with for targeted analysis. The data is grouped by\neach expression independently, resulting in a separate slice for each predicate and its\ncomplements. For example `slicing_exprs=[“col_1”, “col_2 \u003e 10”]` will generate the following\nslices: two slices for `col_2 \u003e 10` (True and False), and one slice per unique value in\n`col1`. For high-cardinality columns, only the top 100 unique values by frequency will\ngenerate slices.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "snapshot": { "description": "Configuration for monitoring snapshot tables.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorSnapshot", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "table_name": { "$ref": "#/$defs/string" @@ -1653,12 +1653,12 @@ "time_series": { "description": "Configuration for monitoring time series tables.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorTimeSeries", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "warehouse_id": { "description": "Optional argument to specify the warehouse for dashboard creation. If not specified, the first running\nwarehouse will be used.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -1689,12 +1689,12 @@ "catalog_name": { "description": "The name of the catalog where the schema and the registered model reside", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "comment": { "description": "The comment attached to the registered model", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "created_at": { "$ref": "#/$defs/int64" @@ -1718,7 +1718,7 @@ "name": { "description": "The name of the registered model", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "owner": { "$ref": "#/$defs/string" @@ -1726,12 +1726,12 @@ "schema_name": { "description": "The name of the schema where the registered model resides", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "storage_location": { "description": "The storage location on the cloud under which model version data files are stored", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "updated_at": { "$ref": "#/$defs/int64" @@ -1757,12 +1757,12 @@ "catalog_name": { "description": "Name of parent catalog.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "comment": { "description": "User-provided free-form text description.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "grants": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.SchemaGrant" @@ -1774,7 +1774,7 @@ "name": { "description": "Name of schema, relative to parent catalog.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "properties": { "$ref": "#/$defs/map/string" @@ -1782,7 +1782,7 @@ "storage_root": { "description": "Storage root URL for managed tables within schema.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -1805,11 +1805,11 @@ "properties": { "principal": { "$ref": "#/$defs/string", - "x-since-version": "v0.267.0" + "since_version": "v0.267.0" }, "privileges": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.SchemaGrantPrivilege", - "x-since-version": "v0.267.0" + "since_version": "v0.267.0" } }, "additionalProperties": false, @@ -1858,27 +1858,27 @@ "backend_type": { "description": "The backend type the scope will be created with. If not specified, will default to `DATABRICKS`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/workspace.ScopeBackendType", - "x-since-version": "v0.252.0" + "since_version": "v0.252.0" }, "keyvault_metadata": { "description": "The metadata for the secret scope if the `backend_type` is `AZURE_KEYVAULT`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/workspace.AzureKeyVaultSecretScopeMetadata", - "x-since-version": "v0.252.0" + "since_version": "v0.252.0" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle", - "x-since-version": "v0.268.0" + "since_version": "v0.268.0" }, "name": { "description": "Scope name requested by the user. Scope names are unique.", "$ref": "#/$defs/string", - "x-since-version": "v0.252.0" + "since_version": "v0.252.0" }, "permissions": { "description": "The permissions to apply to the secret scope. Permissions are managed via secret scope ACLs.", "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.SecretScopePermission", - "x-since-version": "v0.252.0" + "since_version": "v0.252.0" } }, "additionalProperties": false, @@ -1900,22 +1900,22 @@ "group_name": { "description": "The name of the group that has the permission set in level. This field translates to a `principal` field in secret scope ACL.", "$ref": "#/$defs/string", - "x-since-version": "v0.252.0" + "since_version": "v0.252.0" }, "level": { "description": "The allowed permission for user, group, service principal defined for this permission.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.SecretScopePermissionLevel", - "x-since-version": "v0.252.0" + "since_version": "v0.252.0" }, "service_principal_name": { "description": "The application ID of an active service principal. This field translates to a `principal` field in secret scope ACL.", "$ref": "#/$defs/string", - "x-since-version": "v0.252.0" + "since_version": "v0.252.0" }, "user_name": { "description": "The name of the user that has the permission set in level. This field translates to a `principal` field in secret scope ACL.", "$ref": "#/$defs/string", - "x-since-version": "v0.252.0" + "since_version": "v0.252.0" } }, "additionalProperties": false, @@ -1954,22 +1954,22 @@ "auto_stop_mins": { "description": "The amount of time in minutes that a SQL warehouse must be idle (i.e., no\nRUNNING queries) before it is automatically stopped.\n\nSupported values:\n- Must be == 0 or \u003e= 10 mins\n- 0 indicates no autostop.\n\nDefaults to 120 mins", "$ref": "#/$defs/int", - "x-since-version": "v0.260.0" + "since_version": "v0.260.0" }, "channel": { "description": "Channel Details", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.Channel", - "x-since-version": "v0.260.0" + "since_version": "v0.260.0" }, "cluster_size": { "description": "Size of the clusters allocated for this warehouse.\nIncreasing the size of a spark cluster allows you to run larger queries on\nit. If you want to increase the number of concurrent queries, please tune\nmax_num_clusters.\n\nSupported values:\n- 2X-Small\n- X-Small\n- Small\n- Medium\n- Large\n- X-Large\n- 2X-Large\n- 3X-Large\n- 4X-Large", "$ref": "#/$defs/string", - "x-since-version": "v0.260.0" + "since_version": "v0.260.0" }, "creator_name": { "description": "warehouse creator name", "$ref": "#/$defs/string", - "x-since-version": "v0.260.0" + "since_version": "v0.260.0" }, "enable_photon": { "description": "Configures whether the warehouse should use Photon optimized clusters.\n\nDefaults to true.", @@ -1978,13 +1978,13 @@ "enable_serverless_compute": { "description": "Configures whether the warehouse should use serverless compute", "$ref": "#/$defs/bool", - "x-since-version": "v0.260.0" + "since_version": "v0.260.0" }, "instance_profile_arn": { "description": "Deprecated. Instance profile used to pass IAM role to the cluster", "$ref": "#/$defs/string", "deprecationMessage": "This field is deprecated", - "x-since-version": "v0.260.0", + "since_version": "v0.260.0", "deprecated": true }, "lifecycle": { @@ -1994,17 +1994,17 @@ "max_num_clusters": { "description": "Maximum number of clusters that the autoscaler will create to handle\nconcurrent queries.\n\nSupported values:\n- Must be \u003e= min_num_clusters\n- Must be \u003c= 40.\n\nDefaults to min_clusters if unset.", "$ref": "#/$defs/int", - "x-since-version": "v0.260.0" + "since_version": "v0.260.0" }, "min_num_clusters": { "description": "Minimum number of available clusters that will be maintained for this SQL\nwarehouse. Increasing this will ensure that a larger number of clusters are\nalways running and therefore may reduce the cold start time for new\nqueries. This is similar to reserved vs. revocable cores in a resource\nmanager.\n\nSupported values:\n- Must be \u003e 0\n- Must be \u003c= min(max_num_clusters, 30)\n\nDefaults to 1", "$ref": "#/$defs/int", - "x-since-version": "v0.260.0" + "since_version": "v0.260.0" }, "name": { "description": "Logical name for the cluster.\n\nSupported values:\n- Must be unique within an org.\n- Must be less than 100 characters.", "$ref": "#/$defs/string", - "x-since-version": "v0.260.0" + "since_version": "v0.260.0" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.SqlWarehousePermission" @@ -2015,7 +2015,7 @@ "tags": { "description": "A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes) associated\nwith this SQL warehouse.\n\nSupported values:\n- Number of tags \u003c 45.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.EndpointTags", - "x-since-version": "v0.260.0" + "since_version": "v0.260.0" }, "warehouse_type": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.CreateWarehouseRequestWarehouseType" @@ -2036,19 +2036,19 @@ "properties": { "group_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.260.0" + "since_version": "v0.260.0" }, "level": { "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.SqlWarehousePermissionLevel", - "x-since-version": "v0.260.0" + "since_version": "v0.260.0" }, "service_principal_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.260.0" + "since_version": "v0.260.0" }, "user_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.260.0" + "since_version": "v0.260.0" } }, "additionalProperties": false, @@ -2087,24 +2087,24 @@ "properties": { "database_instance_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.266.0" + "since_version": "v0.266.0" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle", - "x-since-version": "v0.268.0" + "since_version": "v0.268.0" }, "logical_database_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.266.0" + "since_version": "v0.266.0" }, "name": { "$ref": "#/$defs/string", - "x-since-version": "v0.266.0" + "since_version": "v0.266.0" }, "spec": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.SyncedTableSpec", - "x-since-version": "v0.266.0" + "since_version": "v0.266.0" } }, "additionalProperties": false, @@ -2126,12 +2126,12 @@ "catalog_name": { "description": "The name of the catalog where the schema and the volume are", "$ref": "#/$defs/string", - "x-since-version": "v0.236.0" + "since_version": "v0.236.0" }, "comment": { "description": "The comment attached to the volume", "$ref": "#/$defs/string", - "x-since-version": "v0.236.0" + "since_version": "v0.236.0" }, "grants": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.VolumeGrant" @@ -2143,17 +2143,17 @@ "name": { "description": "The name of the volume", "$ref": "#/$defs/string", - "x-since-version": "v0.236.0" + "since_version": "v0.236.0" }, "schema_name": { "description": "The name of the schema where the volume is", "$ref": "#/$defs/string", - "x-since-version": "v0.236.0" + "since_version": "v0.236.0" }, "storage_location": { "description": "The storage location on the cloud", "$ref": "#/$defs/string", - "x-since-version": "v0.236.0" + "since_version": "v0.236.0" }, "volume_type": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.VolumeType" @@ -2180,11 +2180,11 @@ "properties": { "principal": { "$ref": "#/$defs/string", - "x-since-version": "v0.264.1" + "since_version": "v0.264.1" }, "privileges": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.VolumeGrantPrivilege", - "x-since-version": "v0.264.1" + "since_version": "v0.264.1" } }, "additionalProperties": false, @@ -2225,62 +2225,62 @@ "alert": { "description": "The name of the alert for which to retrieve an ID.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "cluster": { "description": "The name of the cluster for which to retrieve an ID.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "cluster_policy": { "description": "The name of the cluster_policy for which to retrieve an ID.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "dashboard": { "description": "The name of the dashboard for which to retrieve an ID.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "instance_pool": { "description": "The name of the instance_pool for which to retrieve an ID.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "job": { "description": "The name of the job for which to retrieve an ID.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "metastore": { "description": "The name of the metastore for which to retrieve an ID.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "notification_destination": { "description": "The name of the notification_destination for which to retrieve an ID.", "$ref": "#/$defs/string", - "x-since-version": "v0.236.0" + "since_version": "v0.236.0" }, "pipeline": { "description": "The name of the pipeline for which to retrieve an ID.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "query": { "description": "The name of the query for which to retrieve an ID.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "service_principal": { "description": "The name of the service_principal for which to retrieve an ID.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "warehouse": { "description": "The name of the warehouse for which to retrieve an ID.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -2299,22 +2299,22 @@ "default": { "description": "The default value for the variable.", "$ref": "#/$defs/interface", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "description": { "description": "The description of the variable.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "lookup": { "description": "The name of the alert, cluster_policy, cluster, dashboard, instance_pool, job, metastore, pipeline, query, service_principal, or warehouse object for which to retrieve an ID.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.Lookup", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "type": { "description": "The type of the variable.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.VariableType", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -2329,23 +2329,23 @@ "default": { "description": "The default value for the variable.", "$ref": "#/$defs/interface", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "description": { "description": "The description of the variable", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "lookup": { "description": "The name of the alert, cluster_policy, cluster, dashboard, instance_pool, job, metastore, pipeline, query, service_principal, or warehouse object for which to retrieve an ID.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.Lookup", "markdownDescription": "The name of the `alert`, `cluster_policy`, `cluster`, `dashboard`, `instance_pool`, `job`, `metastore`, `pipeline`, `query`, `service_principal`, or `warehouse` object for which to retrieve an ID.", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "type": { "description": "The type of the variable.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.VariableType", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -2363,33 +2363,33 @@ "build": { "description": "An optional set of build commands to run locally before deployment.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "dynamic_version": { "description": "Whether to patch the wheel version dynamically based on the timestamp of the whl file. If this is set to `true`, new code can be deployed without having to update the version in `setup.py` or `pyproject.toml`. This setting is only valid when `type` is set to `whl`. See [\\_](/dev-tools/bundles/settings.md#bundle-syntax-mappings-artifacts).", "$ref": "#/$defs/bool", - "x-since-version": "v0.245.0" + "since_version": "v0.245.0" }, "executable": { "description": "The executable type. Valid values are `bash`, `sh`, and `cmd`.", "$ref": "#/$defs/github.com/databricks/cli/libs/exec.ExecutableType", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "files": { "description": "The relative or absolute path to the built artifact files.", "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config.ArtifactFile", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "path": { "description": "The local path of the directory for the artifact.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "type": { "description": "Required if the artifact is a Python wheel. The type of the artifact. Valid values are `whl` and `jar`.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.ArtifactType", "markdownDescription": "Required if the artifact is a Python wheel. The type of the artifact. Valid values are `whl` and `jar`.", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -2408,7 +2408,7 @@ "source": { "description": "Required. The artifact source file.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -2434,40 +2434,40 @@ "description": "The ID of a cluster to use to run the bundle.", "$ref": "#/$defs/string", "markdownDescription": "The ID of a cluster to use to run the bundle. See [cluster_id](https://docs.databricks.com/dev-tools/bundles/settings.html#cluster_id).", - "x-since-version": "v0.229.0" + "since_version": "v0.229.0" }, "compute_id": { "description": "Deprecated. The ID of the compute to use to run the bundle.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "databricks_cli_version": { "description": "The Databricks CLI version to use for the bundle.", "$ref": "#/$defs/string", "markdownDescription": "The Databricks CLI version to use for the bundle. See [databricks_cli_version](https://docs.databricks.com/dev-tools/bundles/settings.html#databricks_cli_version).", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "deployment": { "description": "The definition of the bundle deployment", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Deployment", "markdownDescription": "The definition of the bundle deployment. For supported attributes see [link](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html).", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "git": { "description": "The Git version control details that are associated with your bundle.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Git", "markdownDescription": "The Git version control details that are associated with your bundle. For supported attributes see [git](https://docs.databricks.com/dev-tools/bundles/settings.html#git).", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "name": { "description": "The name of the bundle.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "uuid": { "description": "Reserved. A Universally Unique Identifier (UUID) for the bundle that uniquely identifies the bundle in internal Databricks systems. This is generated when a bundle project is initialized using a Databricks template (using the `databricks bundle init` command).", "$ref": "#/$defs/string", - "x-since-version": "v0.236.0" + "since_version": "v0.236.0" } }, "additionalProperties": false, @@ -2492,12 +2492,12 @@ "fail_on_active_runs": { "description": "Whether to fail on active runs. If this is set to true a deployment that is running can be interrupted.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "lock": { "description": "The deployment lock attributes.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Lock", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -2517,38 +2517,38 @@ "description": "The PyDABs configuration.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.PyDABs", "deprecationMessage": "Deprecated: please use python instead", - "x-since-version": "v0.228.1", + "since_version": "v0.228.1", "deprecated": true }, "python": { "description": "Configures loading of Python code defined with 'databricks-bundles' package.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Python", - "x-since-version": "v0.238.0" + "since_version": "v0.238.0" }, "python_wheel_wrapper": { "description": "Whether to use a Python wheel wrapper.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "scripts": { "description": "The commands to run.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Command", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "skip_artifact_cleanup": { "description": "Determines whether to skip cleaning up the .internal folder", "$ref": "#/$defs/bool", - "x-since-version": "v0.254.0" + "since_version": "v0.254.0" }, "skip_name_prefix_for_schema": { "description": "Skip adding the prefix that is either set in `presets.name_prefix` or computed when `mode: development`\nis set, to the names of UC schemas defined in the bundle.", "$ref": "#/$defs/bool", - "x-since-version": "v0.255.0" + "since_version": "v0.255.0" }, "use_legacy_run_as": { "description": "Whether to use the legacy run_as behavior.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -2568,13 +2568,13 @@ "description": "The Git branch name.", "$ref": "#/$defs/string", "markdownDescription": "The Git branch name. See [git](https://docs.databricks.com/dev-tools/bundles/settings.html#git).", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "origin_url": { "description": "The origin URL of the repository.", "$ref": "#/$defs/string", "markdownDescription": "The origin URL of the repository. See [git](https://docs.databricks.com/dev-tools/bundles/settings.html#git).", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -2593,12 +2593,12 @@ "enabled": { "description": "Whether this lock is enabled.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "force": { "description": "Whether to force this lock if it is enabled.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -2620,37 +2620,37 @@ "artifacts_dynamic_version": { "description": "Whether to enable dynamic_version on all artifacts.", "$ref": "#/$defs/bool", - "x-since-version": "v0.256.0" + "since_version": "v0.256.0" }, "jobs_max_concurrent_runs": { "description": "The maximum concurrent runs for a job.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "name_prefix": { "description": "The prefix for job runs of the bundle.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "pipelines_development": { "description": "Whether pipeline deployments should be locked in development mode.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "source_linked_deployment": { "description": "Whether to link the deployment to the bundle source.", "$ref": "#/$defs/bool", - "x-since-version": "v0.236.0" + "since_version": "v0.236.0" }, "tags": { "description": "The tags for the bundle deployment.", "$ref": "#/$defs/map/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "trigger_pause_status": { "description": "A pause status to apply to all job triggers and schedules. Valid values are PAUSED or UNPAUSED.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -2669,7 +2669,7 @@ "enabled": { "description": "Whether or not PyDABs (Private Preview) is enabled", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -2688,17 +2688,17 @@ "mutators": { "description": "Mutators contains a list of fully qualified function paths to mutator functions.\n\nExample: [\"my_project.mutators:add_default_cluster\"]", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.238.0" + "since_version": "v0.238.0" }, "resources": { "description": "Resources contains a list of fully qualified function paths to load resources\ndefined in Python code.\n\nExample: [\"my_project.resources:load_resources\"]", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.238.0" + "since_version": "v0.238.0" }, "venv_path": { "description": "VEnvPath is path to the virtual environment.\n\nIf enabled, Python code will execute within this environment. If disabled,\nit defaults to using the Python interpreter available in the current shell.", "$ref": "#/$defs/string", - "x-since-version": "v0.238.0" + "since_version": "v0.238.0" } }, "additionalProperties": false @@ -2716,103 +2716,103 @@ "properties": { "alerts": { "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Alert", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "apps": { "description": "The app resource defines a Databricks app.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.App", "markdownDescription": "The app resource defines a [Databricks app](https://docs.databricks.com/api/workspace/apps/create). For information about Databricks Apps, see [link](https://docs.databricks.com/dev-tools/databricks-apps/index.html).", - "x-since-version": "v0.239.0" + "since_version": "v0.239.0" }, "clusters": { "description": "The cluster definitions for the bundle, where each key is the name of a cluster.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Cluster", "markdownDescription": "The cluster definitions for the bundle, where each key is the name of a cluster. See [clusters](https://docs.databricks.com/dev-tools/bundles/resources.html#clusters).", - "x-since-version": "v0.229.0" + "since_version": "v0.229.0" }, "dashboards": { "description": "The dashboard definitions for the bundle, where each key is the name of the dashboard.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Dashboard", "markdownDescription": "The dashboard definitions for the bundle, where each key is the name of the dashboard. See [dashboards](https://docs.databricks.com/dev-tools/bundles/resources.html#dashboards).", - "x-since-version": "v0.232.0" + "since_version": "v0.232.0" }, "database_catalogs": { "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.DatabaseCatalog", - "x-since-version": "v0.265.0" + "since_version": "v0.265.0" }, "database_instances": { "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.DatabaseInstance", - "x-since-version": "v0.265.0" + "since_version": "v0.265.0" }, "experiments": { "description": "The experiment definitions for the bundle, where each key is the name of the experiment.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.MlflowExperiment", "markdownDescription": "The experiment definitions for the bundle, where each key is the name of the experiment. See [experiments](https://docs.databricks.com/dev-tools/bundles/resources.html#experiments).", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "jobs": { "description": "The job definitions for the bundle, where each key is the name of the job.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Job", "markdownDescription": "The job definitions for the bundle, where each key is the name of the job. See [jobs](https://docs.databricks.com/dev-tools/bundles/resources.html#jobs).", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "model_serving_endpoints": { "description": "The model serving endpoint definitions for the bundle, where each key is the name of the model serving endpoint.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint", "markdownDescription": "The model serving endpoint definitions for the bundle, where each key is the name of the model serving endpoint. See [model_serving_endpoints](https://docs.databricks.com/dev-tools/bundles/resources.html#model_serving_endpoints).", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "models": { "description": "The model definitions for the bundle, where each key is the name of the model.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.MlflowModel", "markdownDescription": "The model definitions for the bundle, where each key is the name of the model. See [models](https://docs.databricks.com/dev-tools/bundles/resources.html#models).", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "pipelines": { "description": "The pipeline definitions for the bundle, where each key is the name of the pipeline.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Pipeline", "markdownDescription": "The pipeline definitions for the bundle, where each key is the name of the pipeline. See [pipelines](https://docs.databricks.com/dev-tools/bundles/resources.html#pipelines).", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "quality_monitors": { "description": "The quality monitor definitions for the bundle, where each key is the name of the quality monitor.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.QualityMonitor", "markdownDescription": "The quality monitor definitions for the bundle, where each key is the name of the quality monitor. See [quality_monitors](https://docs.databricks.com/dev-tools/bundles/resources.html#quality_monitors).", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "registered_models": { "description": "The registered model definitions for the bundle, where each key is the name of the Unity Catalog registered model.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.RegisteredModel", "markdownDescription": "The registered model definitions for the bundle, where each key is the name of the Unity Catalog registered model. See [registered_models](https://docs.databricks.com/dev-tools/bundles/resources.html#registered_models)", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "schemas": { "description": "The schema definitions for the bundle, where each key is the name of the schema.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Schema", "markdownDescription": "The schema definitions for the bundle, where each key is the name of the schema. See [schemas](https://docs.databricks.com/dev-tools/bundles/resources.html#schemas).", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "secret_scopes": { "description": "The secret scope definitions for the bundle, where each key is the name of the secret scope.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.SecretScope", "markdownDescription": "The secret scope definitions for the bundle, where each key is the name of the secret scope. See [secret_scopes](https://docs.databricks.com/dev-tools/bundles/resources.html#secret_scopes).", - "x-since-version": "v0.252.0" + "since_version": "v0.252.0" }, "sql_warehouses": { "description": "The SQL warehouse definitions for the bundle, where each key is the name of the warehouse.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.SqlWarehouse", "markdownDescription": "The SQL warehouse definitions for the bundle, where each key is the name of the warehouse. See [sql_warehouses](https://docs.databricks.com/dev-tools/bundles/resources.html#sql_warehouses).", - "x-since-version": "v0.260.0" + "since_version": "v0.260.0" }, "synced_database_tables": { "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.SyncedDatabaseTable", - "x-since-version": "v0.266.0" + "since_version": "v0.266.0" }, "volumes": { "description": "The volume definitions for the bundle, where each key is the name of the volume.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Volume", "markdownDescription": "The volume definitions for the bundle, where each key is the name of the volume. See [volumes](https://docs.databricks.com/dev-tools/bundles/resources.html#volumes).", - "x-since-version": "v0.236.0" + "since_version": "v0.236.0" } }, "additionalProperties": false @@ -2830,7 +2830,7 @@ "properties": { "content": { "$ref": "#/$defs/string", - "x-since-version": "v0.259.0" + "since_version": "v0.259.0" } }, "additionalProperties": false, @@ -2852,17 +2852,17 @@ "exclude": { "description": "A list of files or folders to exclude from the bundle.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "include": { "description": "A list of files or folders to include in the bundle.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "paths": { "description": "The local folder paths, which can be outside the bundle root, to synchronize to the workspace when the bundle is deployed.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -2881,76 +2881,76 @@ "artifacts": { "description": "The artifacts to include in the target deployment.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Artifact", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "bundle": { "description": "The bundle attributes when deploying to this target.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Bundle", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "cluster_id": { "description": "The ID of the cluster to use for this target.", "$ref": "#/$defs/string", - "x-since-version": "v0.229.0" + "since_version": "v0.229.0" }, "compute_id": { "description": "Deprecated. The ID of the compute to use for this target.", "$ref": "#/$defs/string", "deprecationMessage": "Deprecated: please use cluster_id instead", - "x-since-version": "v0.228.1", + "since_version": "v0.228.1", "deprecated": true }, "default": { "description": "Whether this target is the default target.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "git": { "description": "The Git version control settings for the target.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Git", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "mode": { "description": "The deployment mode for the target.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Mode", "markdownDescription": "The deployment mode for the target. Valid values are `development` or `production`. See [link](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html).", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "permissions": { "description": "The permissions for deploying and running the bundle in the target.", "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "presets": { "description": "The deployment presets for the target.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Presets", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "resources": { "description": "The resource definitions for the target.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Resources", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "run_as": { "description": "The identity to use to run the bundle.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs", "markdownDescription": "The identity to use to run the bundle, see [link](https://docs.databricks.com/dev-tools/bundles/run-as.html).", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "sync": { "description": "The local paths to sync to the target workspace when a bundle is run or deployed.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Sync", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "variables": { "description": "The custom variable definitions for the target.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/variable.TargetVariable", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "workspace": { "description": "The Databricks workspace for the target.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Workspace", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -2969,82 +2969,82 @@ "artifact_path": { "description": "The artifact path to use within the workspace for both deployments and workflow runs", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "auth_type": { "description": "The authentication type.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "azure_client_id": { "description": "The Azure client ID", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "azure_environment": { "description": "The Azure environment", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "azure_login_app_id": { "description": "The Azure login app ID", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "azure_tenant_id": { "description": "The Azure tenant ID", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "azure_use_msi": { "description": "Whether to use MSI for Azure", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "azure_workspace_resource_id": { "description": "The Azure workspace resource ID", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "client_id": { "description": "The client ID for the workspace", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "file_path": { "description": "The file path to use within the workspace for both deployments and workflow runs", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "google_service_account": { "description": "The Google service account name", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "host": { "description": "The Databricks workspace host URL", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "profile": { "description": "The Databricks workspace profile name", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "resource_path": { "description": "The workspace resource path", "$ref": "#/$defs/string", - "x-since-version": "v0.230.0" + "since_version": "v0.230.0" }, "root_path": { "description": "The Databricks workspace root path", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "state_path": { "description": "The workspace state path", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -3071,15 +3071,15 @@ "properties": { "deployment_id": { "$ref": "#/$defs/string", - "x-since-version": "v0.239.0" + "since_version": "v0.239.0" }, "mode": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentMode", - "x-since-version": "v0.239.0" + "since_version": "v0.239.0" }, "source_code_path": { "$ref": "#/$defs/string", - "x-since-version": "v0.239.0" + "since_version": "v0.239.0" } }, "additionalProperties": false @@ -3097,7 +3097,7 @@ "properties": { "source_code_path": { "$ref": "#/$defs/string", - "x-since-version": "v0.239.0" + "since_version": "v0.239.0" } }, "additionalProperties": false @@ -3159,41 +3159,41 @@ "properties": { "database": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceDatabase", - "x-since-version": "v0.260.0" + "since_version": "v0.260.0" }, "description": { "description": "Description of the App Resource.", "$ref": "#/$defs/string", - "x-since-version": "v0.239.0" + "since_version": "v0.239.0" }, "genie_space": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceGenieSpace", - "x-since-version": "v0.273.0" + "since_version": "v0.273.0" }, "job": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceJob", - "x-since-version": "v0.239.0" + "since_version": "v0.239.0" }, "name": { "description": "Name of the App Resource.", "$ref": "#/$defs/string", - "x-since-version": "v0.239.0" + "since_version": "v0.239.0" }, "secret": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceSecret", - "x-since-version": "v0.239.0" + "since_version": "v0.239.0" }, "serving_endpoint": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceServingEndpoint", - "x-since-version": "v0.239.0" + "since_version": "v0.239.0" }, "sql_warehouse": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceSqlWarehouse", - "x-since-version": "v0.239.0" + "since_version": "v0.239.0" }, "uc_securable": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceUcSecurable", - "x-since-version": "v0.253.0" + "since_version": "v0.253.0" } }, "additionalProperties": false, @@ -3214,15 +3214,15 @@ "properties": { "database_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.260.0" + "since_version": "v0.260.0" }, "instance_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.260.0" + "since_version": "v0.260.0" }, "permission": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceDatabaseDatabasePermission", - "x-since-version": "v0.260.0" + "since_version": "v0.260.0" } }, "additionalProperties": false, @@ -3259,15 +3259,15 @@ "properties": { "name": { "$ref": "#/$defs/string", - "x-since-version": "v0.273.0" + "since_version": "v0.273.0" }, "permission": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceGenieSpaceGenieSpacePermission", - "x-since-version": "v0.273.0" + "since_version": "v0.273.0" }, "space_id": { "$ref": "#/$defs/string", - "x-since-version": "v0.273.0" + "since_version": "v0.273.0" } }, "additionalProperties": false, @@ -3307,11 +3307,11 @@ "properties": { "id": { "$ref": "#/$defs/string", - "x-since-version": "v0.239.0" + "since_version": "v0.239.0" }, "permission": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceJobJobPermission", - "x-since-version": "v0.239.0" + "since_version": "v0.239.0" } }, "additionalProperties": false, @@ -3350,15 +3350,15 @@ "properties": { "key": { "$ref": "#/$defs/string", - "x-since-version": "v0.239.0" + "since_version": "v0.239.0" }, "permission": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceSecretSecretPermission", - "x-since-version": "v0.239.0" + "since_version": "v0.239.0" }, "scope": { "$ref": "#/$defs/string", - "x-since-version": "v0.239.0" + "since_version": "v0.239.0" } }, "additionalProperties": false, @@ -3398,11 +3398,11 @@ "properties": { "name": { "$ref": "#/$defs/string", - "x-since-version": "v0.239.0" + "since_version": "v0.239.0" }, "permission": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceServingEndpointServingEndpointPermission", - "x-since-version": "v0.239.0" + "since_version": "v0.239.0" } }, "additionalProperties": false, @@ -3440,11 +3440,11 @@ "properties": { "id": { "$ref": "#/$defs/string", - "x-since-version": "v0.239.0" + "since_version": "v0.239.0" }, "permission": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceSqlWarehouseSqlWarehousePermission", - "x-since-version": "v0.239.0" + "since_version": "v0.239.0" } }, "additionalProperties": false, @@ -3482,15 +3482,15 @@ "properties": { "permission": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceUcSecurableUcSecurablePermission", - "x-since-version": "v0.253.0" + "since_version": "v0.253.0" }, "securable_full_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.253.0" + "since_version": "v0.253.0" }, "securable_type": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceUcSecurableUcSecurableType", - "x-since-version": "v0.253.0" + "since_version": "v0.253.0" } }, "additionalProperties": false, @@ -3619,17 +3619,17 @@ "pause_status": { "description": "Read only field that indicates whether a schedule is paused or not.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorCronSchedulePauseStatus", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "quartz_cron_expression": { "description": "The expression that determines when to run the monitor. See [examples](https://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html).", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "timezone_id": { "description": "The timezone id (e.g., ``PST``) in which to evaluate the quartz expression.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -3670,7 +3670,7 @@ "enabled": { "description": "Whether to enable data classification.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -3689,7 +3689,7 @@ "email_addresses": { "description": "The list of email addresses to send the notification to. A maximum of 5 email addresses is supported.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -3708,37 +3708,37 @@ "granularities": { "description": "Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "label_col": { "description": "Column for the label.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "model_id_col": { "description": "Column for the model identifier.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "prediction_col": { "description": "Column for the prediction.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "prediction_proba_col": { "description": "Column for prediction probabilities", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "problem_type": { "description": "Problem type the model aims to solve.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorInferenceLogProblemType", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "timestamp_col": { "description": "Column for the timestamp.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -3780,27 +3780,27 @@ "definition": { "description": "Jinja template for a SQL expression that specifies how to compute the metric. See [create metric definition](https://docs.databricks.com/en/lakehouse-monitoring/custom-metrics.html#create-definition).", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "input_columns": { "description": "A list of column names in the input table the metric should be computed for.\nCan use ``\":table\"`` to indicate that the metric needs information from multiple columns.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "name": { "description": "Name of the metric in the output tables.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "output_data_type": { "description": "The output type of the custom metric.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "type": { "description": "Can only be one of ``\"CUSTOM_METRIC_TYPE_AGGREGATE\"``, ``\"CUSTOM_METRIC_TYPE_DERIVED\"``, or ``\"CUSTOM_METRIC_TYPE_DRIFT\"``.\nThe ``\"CUSTOM_METRIC_TYPE_AGGREGATE\"`` and ``\"CUSTOM_METRIC_TYPE_DERIVED\"`` metrics\nare computed on a single table, whereas the ``\"CUSTOM_METRIC_TYPE_DRIFT\"`` compare metrics across\nbaseline and input table, or across the two consecutive time windows.\n- CUSTOM_METRIC_TYPE_AGGREGATE: only depend on the existing columns in your table\n- CUSTOM_METRIC_TYPE_DERIVED: depend on previously computed aggregate metrics\n- CUSTOM_METRIC_TYPE_DRIFT: depend on previously computed aggregate or derived metrics", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorMetricType", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -3843,14 +3843,14 @@ "on_failure": { "description": "Destinations to send notifications on failure/timeout.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorDestination", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "on_new_classification_tag_detected": { "description": "Destinations to send notifications on new classification tag detected.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorDestination", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -3883,12 +3883,12 @@ "granularities": { "description": "Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "timestamp_col": { "description": "Column for the timestamp.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -3911,28 +3911,28 @@ "alias_name": { "description": "Name of the alias, e.g. 'champion' or 'latest_stable'", "$ref": "#/$defs/string", - "x-since-version": "v0.273.0" + "since_version": "v0.273.0" }, "catalog_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.273.0" + "since_version": "v0.273.0" }, "id": { "$ref": "#/$defs/string", - "x-since-version": "v0.273.0" + "since_version": "v0.273.0" }, "model_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.273.0" + "since_version": "v0.273.0" }, "schema_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.273.0" + "since_version": "v0.273.0" }, "version_num": { "description": "Integer version number of the model version to which this alias points.", "$ref": "#/$defs/int", - "x-since-version": "v0.273.0" + "since_version": "v0.273.0" } }, "additionalProperties": false @@ -3967,7 +3967,7 @@ "destination": { "description": "abfss destination, e.g. `abfss://\u003ccontainer-name\u003e@\u003cstorage-account-name\u003e.dfs.core.windows.net/\u003cdirectory-name\u003e`.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -3989,12 +3989,12 @@ "max_workers": { "description": "The maximum number of workers to which the cluster can scale up when overloaded.\nNote that `max_workers` must be strictly greater than `min_workers`.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "min_workers": { "description": "The minimum number of workers to which the cluster can scale down when underutilized.\nIt is also the initial number of workers the cluster will have after creation.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -4013,51 +4013,51 @@ "properties": { "availability": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AwsAvailability", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "ebs_volume_count": { "description": "The number of volumes launched for each instance. Users can choose up to 10 volumes.\nThis feature is only enabled for supported node types. Legacy node types cannot specify\ncustom EBS volumes.\nFor node types with no instance store, at least one EBS volume needs to be specified;\notherwise, cluster creation will fail.\n\nThese EBS volumes will be mounted at `/ebs0`, `/ebs1`, and etc.\nInstance store volumes will be mounted at `/local_disk0`, `/local_disk1`, and etc.\n\nIf EBS volumes are attached, Databricks will configure Spark to use only the EBS volumes for\nscratch storage because heterogenously sized scratch devices can lead to inefficient disk\nutilization. If no EBS volumes are attached, Databricks will configure Spark to use instance\nstore volumes.\n\nPlease note that if EBS volumes are specified, then the Spark configuration `spark.local.dir`\nwill be overridden.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "ebs_volume_iops": { "description": "If using gp3 volumes, what IOPS to use for the disk. If this is not set, the maximum performance of a gp2 volume with the same volume size will be used.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "ebs_volume_size": { "description": "The size of each EBS volume (in GiB) launched for each instance. For general purpose\nSSD, this value must be within the range 100 - 4096. For throughput optimized HDD,\nthis value must be within the range 500 - 4096.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "ebs_volume_throughput": { "description": "If using gp3 volumes, what throughput to use for the disk. If this is not set, the maximum performance of a gp2 volume with the same volume size will be used.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "ebs_volume_type": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.EbsVolumeType", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "first_on_demand": { "description": "The first `first_on_demand` nodes of the cluster will be placed on on-demand instances.\nIf this value is greater than 0, the cluster driver node in particular will be placed on an\non-demand instance. If this value is greater than or equal to the current cluster size, all\nnodes will be placed on on-demand instances. If this value is less than the current cluster\nsize, `first_on_demand` nodes will be placed on on-demand instances and the remainder will\nbe placed on `availability` instances. Note that this value does not affect\ncluster size and cannot currently be mutated over the lifetime of a cluster.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "instance_profile_arn": { "description": "Nodes for this cluster will only be placed on AWS instances with this instance profile. If\nommitted, nodes will be placed on instances without an IAM instance profile. The instance\nprofile must have previously been added to the Databricks environment by an account\nadministrator.\n\nThis feature may only be available to certain customer plans.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "spot_bid_price_percent": { "description": "The bid price for AWS spot instances, as a percentage of the corresponding instance type's\non-demand price.\nFor example, if this field is set to 50, and the cluster needs a new `r3.xlarge` spot\ninstance, then the bid price is half of the price of\non-demand `r3.xlarge` instances. Similarly, if this field is set to 200, the bid price is twice\nthe price of on-demand `r3.xlarge` instances. If not specified, the default value is 100.\nWhen spot instances are requested for this cluster, only spot instances whose bid price\npercentage matches this field will be considered.\nNote that, for safety, we enforce this field to be no more than 10000.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "zone_id": { "description": "Identifier for the availability zone/datacenter in which the cluster resides.\nThis string will be of a form like \"us-west-2a\". The provided availability\nzone must be in the same region as the Databricks deployment. For example, \"us-west-2a\"\nis not a valid zone id if the Databricks deployment resides in the \"us-east-1\" region.\nThis is an optional field at cluster creation, and if not specified, the zone \"auto\" will be used.\nIf the zone specified is \"auto\", will try to place cluster in a zone with high availability,\nand will retry placement in a different AZ if there is not enough capacity.\n\nThe list of available zones as well as the default value can be found by using the\n`List Zones` method.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -4093,22 +4093,22 @@ "properties": { "availability": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AzureAvailability", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "first_on_demand": { "description": "The first `first_on_demand` nodes of the cluster will be placed on on-demand instances.\nThis value should be greater than 0, to make sure the cluster driver node is placed on an\non-demand instance. If this value is greater than or equal to the current cluster size, all\nnodes will be placed on on-demand instances. If this value is less than the current cluster\nsize, `first_on_demand` nodes will be placed on on-demand instances and the remainder will\nbe placed on `availability` instances. Note that this value does not affect\ncluster size and cannot currently be mutated over the lifetime of a cluster.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "log_analytics_info": { "description": "Defines values necessary to configure and run Azure Log Analytics agent", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.LogAnalyticsInfo", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "spot_bid_max_price": { "description": "The max bid price to be used for Azure spot instances.\nThe Max price for the bid cannot be higher than the on-demand price of the instance.\nIf not specified, the default value is -1, which specifies that the instance cannot be evicted\non the basis of price, and only on the basis of availability. Further, the value should \u003e 0 or -1.", "$ref": "#/$defs/float64", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -4144,12 +4144,12 @@ "jobs": { "description": "With jobs set, the cluster can be used for jobs", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "notebooks": { "description": "With notebooks set, this cluster can be used for notebooks", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -4169,17 +4169,17 @@ "dbfs": { "description": "destination needs to be provided. e.g.\n`{ \"dbfs\" : { \"destination\" : \"dbfs:/home/cluster_log\" } }`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DbfsStorageInfo", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "s3": { "description": "destination and either the region or endpoint need to be provided. e.g.\n`{ \"s3\": { \"destination\" : \"s3://cluster_log_bucket/prefix\", \"region\" : \"us-west-2\" } }`\nCluster iam role is used to access s3, please make sure the cluster iam role in\n`instance_profile_arn` has permission to write data to the s3 destination.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.S3StorageInfo", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "volumes": { "description": "destination needs to be provided, e.g.\n`{ \"volumes\": { \"destination\": \"/Volumes/catalog/schema/volume/cluster_log\" } }`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.VolumesStorageInfo", - "x-since-version": "v0.242.0" + "since_version": "v0.242.0" } }, "additionalProperties": false @@ -4199,157 +4199,157 @@ "apply_policy_default_values": { "description": "When set to true, fixed and default values from the policy will be used for fields that are omitted. When set to false, only fixed values from the policy will be applied.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "autoscale": { "description": "Parameters needed in order to automatically scale clusters up and down based on load.\nNote: autoscaling works best with DB runtime versions 3.0 or later.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AutoScale", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "autotermination_minutes": { "description": "Automatically terminates the cluster after it is inactive for this time in minutes. If not set,\nthis cluster will not be automatically terminated. If specified, the threshold must be between\n10 and 10000 minutes.\nUsers can also set this value to 0 to explicitly disable automatic termination.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "aws_attributes": { "description": "Attributes related to clusters running on Amazon Web Services.\nIf not specified at cluster creation, a set of default values will be used.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AwsAttributes", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "azure_attributes": { "description": "Attributes related to clusters running on Microsoft Azure.\nIf not specified at cluster creation, a set of default values will be used.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AzureAttributes", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "cluster_log_conf": { "description": "The configuration for delivering spark logs to a long-term storage destination.\nThree kinds of destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be specified\nfor one cluster. If the conf is given, the logs will be delivered to the destination every\n`5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while\nthe destination of executor logs is `$destination/$clusterId/executor`.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClusterLogConf", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "cluster_name": { "description": "Cluster name requested by the user. This doesn't have to be unique.\nIf not specified at creation, the cluster name will be an empty string.\nFor job clusters, the cluster name is automatically set based on the job and job run IDs.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "custom_tags": { "description": "Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS\ninstances and EBS volumes) with these tags in addition to `default_tags`. Notes:\n\n- Currently, Databricks allows at most 45 custom tags\n\n- Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags", "$ref": "#/$defs/map/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "data_security_mode": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DataSecurityMode", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "docker_image": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DockerImage", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "driver_instance_pool_id": { "description": "The optional ID of the instance pool for the driver of the cluster belongs.\nThe pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not\nassigned.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "driver_node_type_id": { "description": "The node type of the Spark driver.\nNote that this field is optional; if unset, the driver node type will be set as the same value\nas `node_type_id` defined above.\n\nThis field, along with node_type_id, should not be set if virtual_cluster_size is set.\nIf both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id and node_type_id take precedence.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "enable_elastic_disk": { "description": "Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk\nspace when its Spark workers are running low on disk space.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "enable_local_disk_encryption": { "description": "Whether to enable LUKS on cluster VMs' local disks", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "gcp_attributes": { "description": "Attributes related to clusters running on Google Cloud Platform.\nIf not specified at cluster creation, a set of default values will be used.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.GcpAttributes", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "init_scripts": { "description": "The configuration for storing init scripts. Any number of destinations can be specified.\nThe scripts are executed sequentially in the order provided.\nIf `cluster_log_conf` is specified, init script logs are sent to `\u003cdestination\u003e/\u003ccluster-ID\u003e/init_scripts`.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/compute.InitScriptInfo", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "instance_pool_id": { "description": "The optional ID of the instance pool to which the cluster belongs.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "is_single_node": { "description": "This field can only be used when `kind = CLASSIC_PREVIEW`.\n\nWhen set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers`", "$ref": "#/$defs/bool", - "x-since-version": "v0.237.0" + "since_version": "v0.237.0" }, "kind": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Kind", - "x-since-version": "v0.237.0" + "since_version": "v0.237.0" }, "node_type_id": { "description": "This field encodes, through a single value, the resources available to each of\nthe Spark nodes in this cluster. For example, the Spark nodes can be provisioned\nand optimized for memory or compute intensive workloads. A list of available node\ntypes can be retrieved by using the :method:clusters/listNodeTypes API call.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "num_workers": { "description": "Number of worker nodes that this cluster should have. A cluster has one Spark Driver\nand `num_workers` Executors for a total of `num_workers` + 1 Spark nodes.\n\nNote: When reading the properties of a cluster, this field reflects the desired number\nof workers rather than the actual current number of workers. For instance, if a cluster\nis resized from 5 to 10 workers, this field will immediately be updated to reflect\nthe target size of 10 workers, whereas the workers listed in `spark_info` will gradually\nincrease from 5 to 10 as the new nodes are provisioned.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "policy_id": { "description": "The ID of the cluster policy used to create the cluster if applicable.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "remote_disk_throughput": { "description": "If set, what the configurable throughput (in Mb/s) for the remote disk is. Currently only supported for GCP HYPERDISK_BALANCED disks.", "$ref": "#/$defs/int", - "x-since-version": "v0.257.0" + "since_version": "v0.257.0" }, "runtime_engine": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.RuntimeEngine", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "single_user_name": { "description": "Single user name if data_security_mode is `SINGLE_USER`", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "spark_conf": { "description": "An object containing a set of optional, user-specified Spark configuration key-value pairs.\nUsers can also pass in a string of extra JVM options to the driver and the executors via\n`spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.", "$ref": "#/$defs/map/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "spark_env_vars": { "description": "An object containing a set of optional, user-specified environment variable key-value pairs.\nPlease note that key-value pair of the form (X,Y) will be exported as is (i.e.,\n`export X='Y'`) while launching the driver and workers.\n\nIn order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending\nthem to `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all\ndefault databricks managed environmental variables are included as well.\n\nExample Spark environment variables:\n`{\"SPARK_WORKER_MEMORY\": \"28000m\", \"SPARK_LOCAL_DIRS\": \"/local_disk0\"}` or\n`{\"SPARK_DAEMON_JAVA_OPTS\": \"$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true\"}`", "$ref": "#/$defs/map/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "spark_version": { "description": "The Spark version of the cluster, e.g. `3.3.x-scala2.11`.\nA list of available Spark versions can be retrieved by using\nthe :method:clusters/sparkVersions API call.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "ssh_public_keys": { "description": "SSH public key contents that will be added to each Spark node in this cluster. The\ncorresponding private keys can be used to login with the user name `ubuntu` on port `2200`.\nUp to 10 keys can be specified.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "total_initial_remote_disk_size": { "description": "If set, what the total initial volume size (in GB) of the remote disks should be. Currently only supported for GCP HYPERDISK_BALANCED disks.", "$ref": "#/$defs/int", - "x-since-version": "v0.257.0" + "since_version": "v0.257.0" }, "use_ml_runtime": { "description": "This field can only be used when `kind = CLASSIC_PREVIEW`.\n\n`effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not.", "$ref": "#/$defs/bool", - "x-since-version": "v0.237.0" + "since_version": "v0.237.0" }, "workload_type": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.WorkloadType", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -4393,7 +4393,7 @@ "destination": { "description": "dbfs destination, e.g. `dbfs:/my/path`", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -4415,12 +4415,12 @@ "password": { "description": "Password of the user", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "username": { "description": "Name of the user", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -4438,12 +4438,12 @@ "properties": { "basic_auth": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DockerBasicAuth", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "url": { "description": "URL of the docker image.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -4480,22 +4480,22 @@ "description": "Use `environment_version` instead.", "$ref": "#/$defs/string", "deprecationMessage": "This field is deprecated", - "x-since-version": "v0.228.1", + "since_version": "v0.228.1", "deprecated": true }, "dependencies": { "description": "List of pip dependencies, as supported by the version of pip in this environment.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "environment_version": { "description": "Required. Environment version used by the environment.\nEach version comes with a specific Python version and a set of Python packages.\nThe version is a string, consisting of an integer.", "$ref": "#/$defs/string", - "x-since-version": "v0.252.0" + "since_version": "v0.252.0" }, "java_dependencies": { "$ref": "#/$defs/slice/string", - "x-since-version": "v0.271.0" + "since_version": "v0.271.0" } }, "additionalProperties": false @@ -4514,39 +4514,39 @@ "properties": { "availability": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.GcpAvailability", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "boot_disk_size": { "description": "Boot disk size in GB", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "first_on_demand": { "description": "The first `first_on_demand` nodes of the cluster will be placed on on-demand instances.\nThis value should be greater than 0, to make sure the cluster driver node is placed on an\non-demand instance. If this value is greater than or equal to the current cluster size, all\nnodes will be placed on on-demand instances. If this value is less than the current cluster\nsize, `first_on_demand` nodes will be placed on on-demand instances and the remainder will\nbe placed on `availability` instances. Note that this value does not affect\ncluster size and cannot currently be mutated over the lifetime of a cluster.", "$ref": "#/$defs/int", - "x-since-version": "v0.265.0" + "since_version": "v0.265.0" }, "google_service_account": { "description": "If provided, the cluster will impersonate the google service account when accessing\ngcloud services (like GCS). The google service account\nmust have previously been added to the Databricks environment by an account\nadministrator.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "local_ssd_count": { "description": "If provided, each node (workers and driver) in the cluster will have this number of local SSDs attached.\nEach local SSD is 375GB in size.\nRefer to [GCP documentation](https://cloud.google.com/compute/docs/disks/local-ssd#choose_number_local_ssds)\nfor the supported number of local SSDs for each instance type.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "use_preemptible_executors": { "description": "This field determines whether the spark executors will be scheduled to run on preemptible\nVMs (when set to true) versus standard compute engine VMs (when set to false; default).\nNote: Soon to be deprecated, use the 'availability' field instead.", "$ref": "#/$defs/bool", "deprecationMessage": "This field is deprecated", - "x-since-version": "v0.228.1", + "since_version": "v0.228.1", "deprecated": true }, "zone_id": { "description": "Identifier for the availability zone in which the cluster resides.\nThis can be one of the following:\n- \"HA\" =\u003e High availability, spread nodes across availability zones for a Databricks deployment region [default].\n- \"AUTO\" =\u003e Databricks picks an availability zone to schedule the cluster on.\n- A GCP availability zone =\u003e Pick One of the available zones for (machine type + region) from\nhttps://cloud.google.com/compute/docs/regions-zones.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -4583,7 +4583,7 @@ "destination": { "description": "GCS destination/URI, e.g. `gs://my-bucket/some-prefix`", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -4606,39 +4606,39 @@ "abfss": { "description": "Contains the Azure Data Lake Storage destination path", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Adlsgen2Info", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "dbfs": { "description": "destination needs to be provided. e.g.\n`{ \"dbfs\": { \"destination\" : \"dbfs:/home/cluster_log\" } }`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DbfsStorageInfo", "deprecationMessage": "This field is deprecated", - "x-since-version": "v0.228.1", + "since_version": "v0.228.1", "deprecated": true }, "file": { "description": "destination needs to be provided, e.g.\n`{ \"file\": { \"destination\": \"file:/my/local/file.sh\" } }`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.LocalFileInfo", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "gcs": { "description": "destination needs to be provided, e.g.\n`{ \"gcs\": { \"destination\": \"gs://my-bucket/file.sh\" } }`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.GcsStorageInfo", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "s3": { "description": "destination and either the region or endpoint need to be provided. e.g.\n`{ \\\"s3\\\": { \\\"destination\\\": \\\"s3://cluster_log_bucket/prefix\\\", \\\"region\\\": \\\"us-west-2\\\" } }`\nCluster iam role is used to access s3, please make sure the cluster iam role in\n`instance_profile_arn` has permission to write data to the s3 destination.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.S3StorageInfo", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "volumes": { "description": "destination needs to be provided. e.g.\n`{ \\\"volumes\\\" : { \\\"destination\\\" : \\\"/Volumes/my-init.sh\\\" } }`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.VolumesStorageInfo", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "workspace": { "description": "destination needs to be provided, e.g.\n`{ \"workspace\": { \"destination\": \"/cluster-init-scripts/setup-datadog.sh\" } }`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.WorkspaceStorageInfo", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -4671,39 +4671,39 @@ "cran": { "description": "Specification of a CRAN library to be installed as part of the library", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.RCranLibrary", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "egg": { "description": "Deprecated. URI of the egg library to install. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above.", "$ref": "#/$defs/string", "deprecationMessage": "This field is deprecated", - "x-since-version": "v0.228.1", + "since_version": "v0.228.1", "deprecated": true }, "jar": { "description": "URI of the JAR library to install. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs.\nFor example: `{ \"jar\": \"/Workspace/path/to/library.jar\" }`, `{ \"jar\" : \"/Volumes/path/to/library.jar\" }` or\n`{ \"jar\": \"s3://my-bucket/library.jar\" }`.\nIf S3 is used, please make sure the cluster has read access on the library. You may need to\nlaunch the cluster with an IAM role to access the S3 URI.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "maven": { "description": "Specification of a maven library to be installed. For example:\n`{ \"coordinates\": \"org.jsoup:jsoup:1.7.2\" }`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.MavenLibrary", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "pypi": { "description": "Specification of a PyPi library to be installed. For example:\n`{ \"package\": \"simplejson\" }`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.PythonPyPiLibrary", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "requirements": { "description": "URI of the requirements.txt file to install. Only Workspace paths and Unity Catalog Volumes paths are supported.\nFor example: `{ \"requirements\": \"/Workspace/path/to/requirements.txt\" }` or `{ \"requirements\" : \"/Volumes/path/to/requirements.txt\" }`", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "whl": { "description": "URI of the wheel library to install. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs.\nFor example: `{ \"whl\": \"/Workspace/path/to/library.whl\" }`, `{ \"whl\" : \"/Volumes/path/to/library.whl\" }` or\n`{ \"whl\": \"s3://my-bucket/library.whl\" }`.\nIf S3 is used, please make sure the cluster has read access on the library. You may need to\nlaunch the cluster with an IAM role to access the S3 URI.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -4722,7 +4722,7 @@ "destination": { "description": "local file destination, e.g. `file:/my/local/file.sh`", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -4744,12 +4744,12 @@ "log_analytics_primary_key": { "description": "The primary key for the Azure Log Analytics agent configuration", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "log_analytics_workspace_id": { "description": "The workspace ID for the Azure Log Analytics agent configuration", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -4768,17 +4768,17 @@ "coordinates": { "description": "Gradle-style maven coordinates. For example: \"org.jsoup:jsoup:1.7.2\".", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "exclusions": { "description": "List of dependences to exclude. For example: `[\"slf4j:slf4j\", \"*:hadoop-client\"]`.\n\nMaven dependency exclusions:\nhttps://maven.apache.org/guides/introduction/introduction-to-optional-and-excludes-dependencies.html.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "repo": { "description": "Maven repo to install the Maven package from. If omitted, both Maven Central Repository\nand Spark Packages are searched.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -4800,12 +4800,12 @@ "package": { "description": "The name of the pypi package to install. An optional exact version specification is also\nsupported. Examples: \"simplejson\" and \"simplejson==3.8.0\".", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "repo": { "description": "The repository where the package can be found. If not specified, the default pip index is\nused.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -4827,12 +4827,12 @@ "package": { "description": "The name of the CRAN package to install.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "repo": { "description": "The repository where the package can be found. If not specified, the default CRAN repo is used.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -4871,37 +4871,37 @@ "canned_acl": { "description": "(Optional) Set canned access control list for the logs, e.g. `bucket-owner-full-control`.\nIf `canned_cal` is set, please make sure the cluster iam role has `s3:PutObjectAcl` permission on\nthe destination bucket and prefix. The full list of possible canned acl can be found at\nhttp://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl.\nPlease also note that by default only the object owner gets full controls. If you are using cross account\nrole for writing data, you may want to set `bucket-owner-full-control` to make bucket owner able to\nread the logs.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "destination": { "description": "S3 destination, e.g. `s3://my-bucket/some-prefix` Note that logs will be delivered using\ncluster iam role, please make sure you set cluster iam role and the role has write access to the\ndestination. Please also note that you cannot use AWS keys to deliver logs.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "enable_encryption": { "description": "(Optional) Flag to enable server side encryption, `false` by default.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "encryption_type": { "description": "(Optional) The encryption type, it could be `sse-s3` or `sse-kms`. It will be used only when\nencryption is enabled and the default type is `sse-s3`.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "endpoint": { "description": "S3 endpoint, e.g. `https://s3-us-west-2.amazonaws.com`. Either region or endpoint needs to be set.\nIf both are set, endpoint will be used.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "kms_key": { "description": "(Optional) Kms key which will be used if encryption is enabled and encryption type is set to `sse-kms`.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "region": { "description": "S3 region, e.g. `us-west-2`. Either region or endpoint needs to be set. If both are set,\nendpoint will be used.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -4924,7 +4924,7 @@ "destination": { "description": "UC Volumes destination, e.g. `/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh`\nor `dbfs:/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh`", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -4947,7 +4947,7 @@ "clients": { "description": "defined what type of clients can use the cluster. E.g. Notebooks, Jobs", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClientsTypes", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -4970,7 +4970,7 @@ "destination": { "description": "wsfs destination, e.g. `workspace:/cluster-init-scripts/setup-datadog.sh`", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -5007,12 +5007,12 @@ "key": { "description": "The key of the custom tag.", "$ref": "#/$defs/string", - "x-since-version": "v0.273.0" + "since_version": "v0.273.0" }, "value": { "description": "The value of the custom tag.", "$ref": "#/$defs/string", - "x-since-version": "v0.273.0" + "since_version": "v0.273.0" } }, "additionalProperties": false @@ -5032,17 +5032,17 @@ "branch_time": { "description": "Branch time of the ref database instance.\nFor a parent ref instance, this is the point in time on the parent instance from which the\ninstance was created.\nFor a child ref instance, this is the point in time on the instance from which the child\ninstance was created.\nInput: For specifying the point in time to create a child instance. Optional.\nOutput: Only populated if provided as input to create a child instance.", "$ref": "#/$defs/string", - "x-since-version": "v0.265.0" + "since_version": "v0.265.0" }, "lsn": { "description": "User-specified WAL LSN of the ref database instance.\n\nInput: For specifying the WAL LSN to create a child instance. Optional.\nOutput: Only populated if provided as input to create a child instance.", "$ref": "#/$defs/string", - "x-since-version": "v0.265.0" + "since_version": "v0.265.0" }, "name": { "description": "Name of the ref database instance.", "$ref": "#/$defs/string", - "x-since-version": "v0.265.0" + "since_version": "v0.265.0" } }, "additionalProperties": false @@ -5093,17 +5093,17 @@ "budget_policy_id": { "description": "Budget policy to set on the newly created pipeline.", "$ref": "#/$defs/string", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "storage_catalog": { "description": "This field needs to be specified if the destination catalog is a managed postgres catalog.\n\nUC catalog for the pipeline to store intermediate files (checkpoints, event logs etc).\nThis needs to be a standard catalog where the user has permissions to create Delta tables.", "$ref": "#/$defs/string", - "x-since-version": "v0.266.0" + "since_version": "v0.266.0" }, "storage_schema": { "description": "This field needs to be specified if the destination catalog is a managed postgres catalog.\n\nUC schema for the pipeline to store intermediate files (checkpoints, event logs etc).\nThis needs to be in the standard catalog where the user has permissions to create Delta tables.", "$ref": "#/$defs/string", - "x-since-version": "v0.266.0" + "since_version": "v0.266.0" } }, "additionalProperties": false @@ -5238,37 +5238,37 @@ "create_database_objects_if_missing": { "description": "If true, the synced table's logical database and schema resources in PG\nwill be created if they do not already exist.", "$ref": "#/$defs/bool", - "x-since-version": "v0.266.0" + "since_version": "v0.266.0" }, "existing_pipeline_id": { "description": "At most one of existing_pipeline_id and new_pipeline_spec should be defined.\n\nIf existing_pipeline_id is defined, the synced table will be bin packed into the existing pipeline\nreferenced. This avoids creating a new pipeline and allows sharing existing compute.\nIn this case, the scheduling_policy of this synced table must match the scheduling policy of the existing pipeline.", "$ref": "#/$defs/string", - "x-since-version": "v0.266.0" + "since_version": "v0.266.0" }, "new_pipeline_spec": { "description": "At most one of existing_pipeline_id and new_pipeline_spec should be defined.\n\nIf new_pipeline_spec is defined, a new pipeline is created for this synced table. The location pointed to is used\nto store intermediate files (checkpoints, event logs etc). The caller must have write permissions to create Delta\ntables in the specified catalog and schema. Again, note this requires write permissions, whereas the source table\nonly requires read permissions.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.NewPipelineSpec", - "x-since-version": "v0.266.0" + "since_version": "v0.266.0" }, "primary_key_columns": { "description": "Primary Key columns to be used for data insert/update in the destination.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.266.0" + "since_version": "v0.266.0" }, "scheduling_policy": { "description": "Scheduling policy of the underlying pipeline.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.SyncedTableSchedulingPolicy", - "x-since-version": "v0.266.0" + "since_version": "v0.266.0" }, "source_table_full_name": { "description": "Three-part (catalog, schema, table) name of the source Delta table.", "$ref": "#/$defs/string", - "x-since-version": "v0.266.0" + "since_version": "v0.266.0" }, "timeseries_key": { "description": "Time series key to deduplicate (tie-break) rows with the same primary key.", "$ref": "#/$defs/string", - "x-since-version": "v0.266.0" + "since_version": "v0.266.0" } }, "additionalProperties": false @@ -5313,22 +5313,22 @@ "continuous_update_status": { "description": "Detailed status of a synced table. Shown if the synced table is in the SYNCED_CONTINUOUS_UPDATE\nor the SYNCED_UPDATING_PIPELINE_RESOURCES state.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.SyncedTableContinuousUpdateStatus", - "x-since-version": "v0.266.0" + "since_version": "v0.266.0" }, "failed_status": { "description": "Detailed status of a synced table. Shown if the synced table is in the OFFLINE_FAILED or the\nSYNCED_PIPELINE_FAILED state.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.SyncedTableFailedStatus", - "x-since-version": "v0.266.0" + "since_version": "v0.266.0" }, "provisioning_status": { "description": "Detailed status of a synced table. Shown if the synced table is in the\nPROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.SyncedTableProvisioningStatus", - "x-since-version": "v0.266.0" + "since_version": "v0.266.0" }, "triggered_update_status": { "description": "Detailed status of a synced table. Shown if the synced table is in the SYNCED_TRIGGERED_UPDATE\nor the SYNCED_NO_PENDING_UPDATE state.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.SyncedTableTriggeredUpdateStatus", - "x-since-version": "v0.266.0" + "since_version": "v0.266.0" } }, "additionalProperties": false @@ -5376,22 +5376,22 @@ "clean_room_name": { "description": "The clean room that the notebook belongs to.", "$ref": "#/$defs/string", - "x-since-version": "v0.237.0" + "since_version": "v0.237.0" }, "etag": { "description": "Checksum to validate the freshness of the notebook resource (i.e. the notebook being run is the latest version).\nIt can be fetched by calling the :method:cleanroomassets/get API.", "$ref": "#/$defs/string", - "x-since-version": "v0.237.0" + "since_version": "v0.237.0" }, "notebook_base_parameters": { "description": "Base parameters to be used for the clean room notebook job.", "$ref": "#/$defs/map/string", - "x-since-version": "v0.237.0" + "since_version": "v0.237.0" }, "notebook_name": { "description": "Name of the notebook being run.", "$ref": "#/$defs/string", - "x-since-version": "v0.237.0" + "since_version": "v0.237.0" } }, "additionalProperties": false, @@ -5414,17 +5414,17 @@ "gpu_node_pool_id": { "description": "IDof the GPU pool to use.", "$ref": "#/$defs/string", - "x-since-version": "v0.243.0" + "since_version": "v0.243.0" }, "gpu_type": { "description": "GPU type.", "$ref": "#/$defs/string", - "x-since-version": "v0.243.0" + "since_version": "v0.243.0" }, "num_gpus": { "description": "Number of GPUs.", "$ref": "#/$defs/int", - "x-since-version": "v0.243.0" + "since_version": "v0.243.0" } }, "additionalProperties": false, @@ -5461,17 +5461,17 @@ "left": { "description": "The left operand of the condition task. Can be either a string value or a job state or parameter reference.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "op": { "description": "* `EQUAL_TO`, `NOT_EQUAL` operators perform string comparison of their operands. This means that `“12.0” == “12”` will evaluate to `false`.\n* `GREATER_THAN`, `GREATER_THAN_OR_EQUAL`, `LESS_THAN`, `LESS_THAN_OR_EQUAL` operators perform numeric comparison of their operands. `“12.0” \u003e= “12”` will evaluate to `true`, `“10.0” \u003e= “12”` will evaluate to `false`.\n\nThe boolean comparison to task values can be implemented with operators `EQUAL_TO`, `NOT_EQUAL`. If a task value was set to a boolean value, it will be serialized to `“true”` or `“false”` for the comparison.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ConditionTaskOp", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "right": { "description": "The right operand of the condition task. Can be either a string value or a job state or parameter reference.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -5515,12 +5515,12 @@ "pause_status": { "description": "Indicate whether the continuous execution of the job is paused or not. Defaults to UNPAUSED.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PauseStatus", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "task_retry_mode": { "description": "Indicate whether the continuous job is applying task level retries or not. Defaults to NEVER.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TaskRetryMode", - "x-since-version": "v0.267.0" + "since_version": "v0.267.0" } }, "additionalProperties": false @@ -5539,17 +5539,17 @@ "pause_status": { "description": "Indicate whether this schedule is paused or not.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PauseStatus", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "quartz_cron_expression": { "description": "A Cron expression using Quartz syntax that describes the schedule for a job. See [Cron Trigger](http://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html) for details. This field is required.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "timezone_id": { "description": "A Java timezone ID. The schedule for a job is resolved with respect to this timezone. See [Java TimeZone](https://docs.oracle.com/javase/7/docs/api/java/util/TimeZone.html) for details. This field is required.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -5572,16 +5572,16 @@ "properties": { "dashboard_id": { "$ref": "#/$defs/string", - "x-since-version": "v0.248.0" + "since_version": "v0.248.0" }, "subscription": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Subscription", - "x-since-version": "v0.248.0" + "since_version": "v0.248.0" }, "warehouse_id": { "description": "Optional: The warehouse id to execute the dashboard with for the schedule.\nIf not specified, the default warehouse of the dashboard will be used.", "$ref": "#/$defs/string", - "x-since-version": "v0.248.0" + "since_version": "v0.248.0" } }, "additionalProperties": false @@ -5601,12 +5601,12 @@ "connection_resource_name": { "description": "The resource name of the UC connection that authenticates the dbt Cloud for this task", "$ref": "#/$defs/string", - "x-since-version": "v0.256.0" + "since_version": "v0.256.0" }, "dbt_cloud_job_id": { "description": "Id of the dbt Cloud job to be triggered", "$ref": "#/$defs/int64", - "x-since-version": "v0.256.0" + "since_version": "v0.256.0" } }, "additionalProperties": false @@ -5625,12 +5625,12 @@ "connection_resource_name": { "description": "The resource name of the UC connection that authenticates the dbt platform for this task", "$ref": "#/$defs/string", - "x-since-version": "v0.257.0" + "since_version": "v0.257.0" }, "dbt_platform_job_id": { "description": "Id of the dbt platform job to be triggered. Specified as a string for maximum compatibility with clients.", "$ref": "#/$defs/string", - "x-since-version": "v0.257.0" + "since_version": "v0.257.0" } }, "additionalProperties": false @@ -5649,37 +5649,37 @@ "catalog": { "description": "Optional name of the catalog to use. The value is the top level in the 3-level namespace of Unity Catalog (catalog / schema / relation). The catalog value can only be specified if a warehouse_id is specified. Requires dbt-databricks \u003e= 1.1.1.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "commands": { "description": "A list of dbt commands to execute. All commands must start with `dbt`. This parameter must not be empty. A maximum of up to 10 commands can be provided.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "profiles_directory": { "description": "Optional (relative) path to the profiles directory. Can only be specified if no warehouse_id is specified. If no warehouse_id is specified and this folder is unset, the root directory is used.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "project_directory": { "description": "Path to the project directory. Optional for Git sourced tasks, in which\ncase if no value is provided, the root of the Git repository is used.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "schema": { "description": "Optional schema to write to. This parameter is only used when a warehouse_id is also provided. If not provided, the `default` schema is used.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "source": { "description": "Optional location type of the project directory. When set to `WORKSPACE`, the project will be retrieved\nfrom the local Databricks workspace. When set to `GIT`, the project will be retrieved from a Git repository\ndefined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise.\n\n* `WORKSPACE`: Project is located in Databricks workspace.\n* `GIT`: Project is located in cloud Git provider.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Source", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "warehouse_id": { "description": "ID of the SQL warehouse to connect to. If provided, we automatically generate and provide the profile and connection details to dbt. It can be overridden on a per-command basis by using the `--profiles-dir` command line argument.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -5701,17 +5701,17 @@ "min_time_between_triggers_seconds": { "description": "If set, the trigger starts a run only after the specified amount of time passed since\nthe last time the trigger fired. The minimum allowed value is 60 seconds", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "url": { "description": "URL to be monitored for file arrivals. The path must point to the root or a subpath of the external location.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "wait_after_last_change_seconds": { "description": "If set, the trigger starts a run only after no file activity has occurred for the specified amount of time.\nThis makes it possible to wait for a batch of incoming files to arrive before triggering a run. The\nminimum allowed value is 60 seconds.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -5733,17 +5733,17 @@ "concurrency": { "description": "An optional maximum allowed number of concurrent runs of the task.\nSet this value if you want to be able to execute multiple runs of the task concurrently.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "inputs": { "description": "Array for task to iterate on. This can be a JSON string or a reference to\nan array parameter.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "task": { "description": "Configuration for the task that will be run for each element in the array", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Task", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -5781,41 +5781,41 @@ "command": { "description": "Command launcher to run the actual script, e.g. bash, python etc.", "$ref": "#/$defs/string", - "x-since-version": "v0.243.0" + "since_version": "v0.243.0" }, "compute": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ComputeConfig", - "x-since-version": "v0.243.0" + "since_version": "v0.243.0" }, "dl_runtime_image": { "description": "Runtime image", "$ref": "#/$defs/string", - "x-since-version": "v0.243.0" + "since_version": "v0.243.0" }, "mlflow_experiment_name": { "description": "Optional string containing the name of the MLflow experiment to log the run to. If name is not\nfound, backend will create the mlflow experiment using the name.", "$ref": "#/$defs/string", - "x-since-version": "v0.243.0" + "since_version": "v0.243.0" }, "source": { "description": "Optional location type of the training script. When set to `WORKSPACE`, the script will be retrieved from the local Databricks workspace. When set to `GIT`, the script will be retrieved from a Git repository\ndefined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise.\n* `WORKSPACE`: Script is located in Databricks workspace.\n* `GIT`: Script is located in cloud Git provider.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Source", - "x-since-version": "v0.243.0" + "since_version": "v0.243.0" }, "training_script_path": { "description": "The training script file path to be executed. Cloud file URIs (such as dbfs:/, s3:/, adls:/, gcs:/) and workspace paths are supported. For python files stored in the Databricks workspace, the path must be absolute and begin with `/`. For files stored in a remote repository, the path must be relative. This field is required.", "$ref": "#/$defs/string", - "x-since-version": "v0.243.0" + "since_version": "v0.243.0" }, "yaml_parameters": { "description": "Optional string containing model parameters passed to the training script in yaml format.\nIf present, then the content in yaml_parameters_file_path will be ignored.", "$ref": "#/$defs/string", - "x-since-version": "v0.243.0" + "since_version": "v0.243.0" }, "yaml_parameters_file_path": { "description": "Optional path to a YAML file containing model parameters passed to the training script.", "$ref": "#/$defs/string", - "x-since-version": "v0.243.0" + "since_version": "v0.243.0" } }, "additionalProperties": false, @@ -5859,7 +5859,7 @@ "used_commit": { "description": "Commit that was used to execute the run. If git_branch was specified, this points to the HEAD of the branch at the time of the run; if git_tag was specified, this points to the commit the tag points to.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -5879,27 +5879,27 @@ "git_branch": { "description": "Name of the branch to be checked out and used by this job. This field cannot be specified in conjunction with git_tag or git_commit.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "git_commit": { "description": "Commit to be checked out and used by this job. This field cannot be specified in conjunction with git_branch or git_tag.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "git_provider": { "description": "Unique identifier of the service used to host the Git repository. The value is case insensitive.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.GitProvider", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "git_tag": { "description": "Name of the tag to be checked out and used by this job. This field cannot be specified in conjunction with git_branch or git_commit.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "git_url": { "description": "URL of the repository to be cloned by this job.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -5922,12 +5922,12 @@ "job_cluster_key": { "description": "A unique name for the job cluster. This field is required and must be unique within the job.\n`JobTaskSettings` may refer to this field to determine which cluster to launch for the task execution.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "new_cluster": { "description": "If new_cluster, a description of a cluster that is created for each task.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClusterSpec", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -5950,12 +5950,12 @@ "kind": { "description": "The kind of deployment that manages the job.\n\n* `BUNDLE`: The job is managed by Databricks Asset Bundle.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobDeploymentKind", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "metadata_file_path": { "description": "Path of the file that contains deployment metadata.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -6009,33 +6009,33 @@ "description": "If true, do not send email to recipients specified in `on_failure` if the run is skipped.\nThis field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.", "$ref": "#/$defs/bool", "deprecationMessage": "This field is deprecated", - "x-since-version": "v0.228.1", + "since_version": "v0.228.1", "deprecated": true }, "on_duration_warning_threshold_exceeded": { "description": "A list of email addresses to be notified when the duration of a run exceeds the threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. If no rule for the `RUN_DURATION_SECONDS` metric is specified in the `health` field for the job, notifications are not sent.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "on_failure": { "description": "A list of email addresses to be notified when a run unsuccessfully completes. A run is considered to have completed unsuccessfully if it ends with an `INTERNAL_ERROR` `life_cycle_state` or a `FAILED`, or `TIMED_OUT` result_state. If this is not specified on job creation, reset, or update the list is empty, and notifications are not sent.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "on_start": { "description": "A list of email addresses to be notified when a run begins. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "on_streaming_backlog_exceeded": { "description": "A list of email addresses to notify when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "on_success": { "description": "A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -6054,11 +6054,11 @@ "environment_key": { "description": "The key of an environment. It has to be unique within a job.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "spec": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Environment", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -6080,12 +6080,12 @@ "no_alert_for_canceled_runs": { "description": "If true, do not send notifications to recipients specified in `on_failure` if the run is canceled.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "no_alert_for_skipped_runs": { "description": "If true, do not send notifications to recipients specified in `on_failure` if the run is skipped.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -6104,12 +6104,12 @@ "default": { "description": "Default value of the parameter.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "name": { "description": "The name of the defined parameter. May only contain alphanumeric characters, `_`, `-`, and `.`", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -6156,17 +6156,17 @@ "dirty_state": { "description": "Dirty state indicates the job is not fully synced with the job specification in the remote repository.\n\nPossible values are:\n* `NOT_SYNCED`: The job is not yet synced with the remote job specification. Import the remote job specification from UI to make the job fully synced.\n* `DISCONNECTED`: The job is temporary disconnected from the remote job specification and is allowed for live edit. Import the remote job specification again from UI to make the job fully synced.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobSourceDirtyState", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "import_from_git_branch": { "description": "Name of the branch which the job is imported from.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "job_config_path": { "description": "Path of the job YAML file that contains the job specification.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -6238,16 +6238,16 @@ "properties": { "metric": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthMetric", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "op": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthOperator", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "value": { "description": "Specifies the threshold value that the health metric should obey to satisfy the health rule.", "$ref": "#/$defs/int64", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -6271,7 +6271,7 @@ "properties": { "rules": { "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRule", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -6290,27 +6290,27 @@ "aliases": { "description": "Aliases of the model versions to monitor. Can only be used in conjunction with condition MODEL_ALIAS_SET.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "condition": { "description": "The condition based on which to trigger a job run.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ModelTriggerConfigurationCondition", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "min_time_between_triggers_seconds": { "description": "If set, the trigger starts a run only after the specified amount of time has passed since\nthe last time the trigger fired. The minimum allowed value is 60 seconds.", "$ref": "#/$defs/int", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "securable_name": { "description": "Name of the securable to monitor (\"mycatalog.myschema.mymodel\" in the case of model-level triggers,\n\"mycatalog.myschema\" in the case of schema-level triggers) or empty in the case of metastore-level triggers.", "$ref": "#/$defs/string", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "wait_after_last_change_seconds": { "description": "If set, the trigger starts a run only after no model updates have occurred for the specified time\nand can be used to wait for a series of model updates before triggering a run. The\nminimum allowed value is 60 seconds.", "$ref": "#/$defs/int", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" } }, "additionalProperties": false, @@ -6348,22 +6348,22 @@ "base_parameters": { "description": "Base parameters to be used for each run of this job. If the run is initiated by a call to :method:jobs/run\nNow with parameters specified, the two parameters maps are merged. If the same key is specified in\n`base_parameters` and in `run-now`, the value from `run-now` is used.\nUse [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.\n\nIf the notebook takes a parameter that is not specified in the job’s `base_parameters` or the `run-now` override parameters,\nthe default value from the notebook is used.\n\nRetrieve these parameters in a notebook using [dbutils.widgets.get](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-widgets).\n\nThe JSON representation of this field cannot exceed 1MB.", "$ref": "#/$defs/map/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "notebook_path": { "description": "The path of the notebook to be run in the Databricks workspace or remote repository.\nFor notebooks stored in the Databricks workspace, the path must be absolute and begin with a slash.\nFor notebooks stored in a remote repository, the path must be relative. This field is required.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "source": { "description": "Optional location type of the notebook. When set to `WORKSPACE`, the notebook will be retrieved from the local Databricks workspace. When set to `GIT`, the notebook will be retrieved from a Git repository\ndefined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise.\n* `WORKSPACE`: Notebook is located in Databricks workspace.\n* `GIT`: Notebook is located in cloud Git provider.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Source", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "warehouse_id": { "description": "Optional `warehouse_id` to run the notebook on a SQL warehouse. Classic SQL warehouses are NOT supported, please use serverless or pro SQL warehouses.\n\nNote that SQL warehouses only support SQL cells; if the notebook contains non-SQL cells, the run will fail.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -6416,12 +6416,12 @@ "interval": { "description": "The interval at which the trigger should run.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "unit": { "description": "The unit of time for the interval.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PeriodicTriggerConfigurationTimeUnit", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -6460,7 +6460,7 @@ "full_refresh": { "description": "If true, triggers a full refresh on the delta live table.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -6479,12 +6479,12 @@ "full_refresh": { "description": "If true, triggers a full refresh on the delta live table.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "pipeline_id": { "description": "The full name of the pipeline task to execute.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -6506,27 +6506,27 @@ "authentication_method": { "description": "How the published Power BI model authenticates to Databricks", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.AuthenticationMethod", - "x-since-version": "v0.248.0" + "since_version": "v0.248.0" }, "model_name": { "description": "The name of the Power BI model", "$ref": "#/$defs/string", - "x-since-version": "v0.248.0" + "since_version": "v0.248.0" }, "overwrite_existing": { "description": "Whether to overwrite existing Power BI models", "$ref": "#/$defs/bool", - "x-since-version": "v0.248.0" + "since_version": "v0.248.0" }, "storage_mode": { "description": "The default storage mode of the Power BI model", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.StorageMode", - "x-since-version": "v0.248.0" + "since_version": "v0.248.0" }, "workspace_name": { "description": "The name of the Power BI workspace of the model", "$ref": "#/$defs/string", - "x-since-version": "v0.248.0" + "since_version": "v0.248.0" } }, "additionalProperties": false @@ -6545,22 +6545,22 @@ "catalog": { "description": "The catalog name in Databricks", "$ref": "#/$defs/string", - "x-since-version": "v0.248.0" + "since_version": "v0.248.0" }, "name": { "description": "The table name in Databricks", "$ref": "#/$defs/string", - "x-since-version": "v0.248.0" + "since_version": "v0.248.0" }, "schema": { "description": "The schema name in Databricks", "$ref": "#/$defs/string", - "x-since-version": "v0.248.0" + "since_version": "v0.248.0" }, "storage_mode": { "description": "The Power BI storage mode of the table", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.StorageMode", - "x-since-version": "v0.248.0" + "since_version": "v0.248.0" } }, "additionalProperties": false @@ -6579,27 +6579,27 @@ "connection_resource_name": { "description": "The resource name of the UC connection to authenticate from Databricks to Power BI", "$ref": "#/$defs/string", - "x-since-version": "v0.248.0" + "since_version": "v0.248.0" }, "power_bi_model": { "description": "The semantic model to update", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PowerBiModel", - "x-since-version": "v0.248.0" + "since_version": "v0.248.0" }, "refresh_after_update": { "description": "Whether the model should be refreshed after the update", "$ref": "#/$defs/bool", - "x-since-version": "v0.248.0" + "since_version": "v0.248.0" }, "tables": { "description": "The tables to be exported to Power BI", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.PowerBiTable", - "x-since-version": "v0.248.0" + "since_version": "v0.248.0" }, "warehouse_id": { "description": "The SQL warehouse ID to use as the Power BI data source", "$ref": "#/$defs/string", - "x-since-version": "v0.248.0" + "since_version": "v0.248.0" } }, "additionalProperties": false @@ -6618,22 +6618,22 @@ "entry_point": { "description": "Named entry point to use, if it does not exist in the metadata of the package it executes the function from the package directly using `$packageName.$entryPoint()`", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "named_parameters": { "description": "Command-line parameters passed to Python wheel task in the form of `[\"--name=task\", \"--data=dbfs:/path/to/data.json\"]`. Leave it empty if `parameters` is not null.", "$ref": "#/$defs/map/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "package_name": { "description": "Name of the package to execute", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "parameters": { "description": "Command-line parameters passed to Python wheel task. Leave it empty if `named_parameters` is not null.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -6656,7 +6656,7 @@ "enabled": { "description": "If true, enable queueing for the job. This is a required field.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -6701,7 +6701,7 @@ "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, - "x-since-version": "v0.228.1", + "since_version": "v0.228.1", "deprecated": true }, "jar_params": { @@ -6710,18 +6710,18 @@ "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, - "x-since-version": "v0.228.1", + "since_version": "v0.228.1", "deprecated": true }, "job_id": { "description": "ID of the job to trigger.", "$ref": "#/$defs/int64", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "job_parameters": { "description": "Job-level parameters used to trigger the job.", "$ref": "#/$defs/map/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "notebook_params": { "description": "A map from keys to values for jobs with notebook task, for example `\"notebook_params\": {\"name\": \"john doe\", \"age\": \"35\"}`.\nThe map is passed to the notebook and is accessible through the [dbutils.widgets.get](https://docs.databricks.com/dev-tools/databricks-utils.html) function.\n\nIf not specified upon `run-now`, the triggered run uses the job’s base parameters.\n\nnotebook_params cannot be specified in conjunction with jar_params.\n\n⚠ **Deprecation note** Use [job parameters](https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown) to pass information down to tasks.\n\nThe JSON representation of this field (for example `{\"notebook_params\":{\"name\":\"john doe\",\"age\":\"35\"}}`) cannot exceed 10,000 bytes.", @@ -6729,20 +6729,20 @@ "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, - "x-since-version": "v0.228.1", + "since_version": "v0.228.1", "deprecated": true }, "pipeline_params": { "description": "Controls whether the pipeline should perform a full refresh", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PipelineParams", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "python_named_params": { "$ref": "#/$defs/map/string", "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, - "x-since-version": "v0.228.1", + "since_version": "v0.228.1", "deprecated": true }, "python_params": { @@ -6751,7 +6751,7 @@ "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, - "x-since-version": "v0.228.1", + "since_version": "v0.228.1", "deprecated": true }, "spark_submit_params": { @@ -6760,7 +6760,7 @@ "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, - "x-since-version": "v0.228.1", + "since_version": "v0.228.1", "deprecated": true }, "sql_params": { @@ -6769,7 +6769,7 @@ "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, - "x-since-version": "v0.228.1", + "since_version": "v0.228.1", "deprecated": true } }, @@ -6809,24 +6809,24 @@ "description": "Deprecated since 04/2016. For classic compute, provide a `jar` through the `libraries` field instead. For serverless compute, provide a `jar` though the `java_dependencies` field inside the `environments` list.\n\nSee the examples of classic and serverless compute usage at the top of the page.", "$ref": "#/$defs/string", "deprecationMessage": "This field is deprecated", - "x-since-version": "v0.228.1", + "since_version": "v0.228.1", "deprecated": true }, "main_class_name": { "description": "The full name of the class containing the main method to be executed. This class must be contained in a JAR provided as a library.\n\nThe code must use `SparkContext.getOrCreate` to obtain a Spark context; otherwise, runs of the job fail.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "parameters": { "description": "Parameters passed to the main method.\n\nUse [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "run_as_repl": { "description": "Deprecated. A value of `false` is no longer supported.", "$ref": "#/$defs/bool", "deprecationMessage": "This field is deprecated", - "x-since-version": "v0.240.0", + "since_version": "v0.240.0", "deprecated": true } }, @@ -6846,17 +6846,17 @@ "parameters": { "description": "Command line parameters passed to the Python file.\n\nUse [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "python_file": { "description": "The Python file to be executed. Cloud file URIs (such as dbfs:/, s3:/, adls:/, gcs:/) and workspace paths are supported. For python files stored in the Databricks workspace, the path must be absolute and begin with `/`. For files stored in a remote repository, the path must be relative. This field is required.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "source": { "description": "Optional location type of the Python file. When set to `WORKSPACE` or not specified, the file will be retrieved from the local\nDatabricks workspace or cloud location (if the `python_file` has a URI format). When set to `GIT`,\nthe Python file will be retrieved from a Git repository defined in `git_source`.\n\n* `WORKSPACE`: The Python file is located in a Databricks workspace or at a cloud filesystem URI.\n* `GIT`: The Python file is located in a remote Git repository.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Source", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -6878,7 +6878,7 @@ "parameters": { "description": "Command-line parameters passed to spark submit.\n\nUse [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -6897,32 +6897,32 @@ "alert": { "description": "If alert, indicates that this job must refresh a SQL alert.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskAlert", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "dashboard": { "description": "If dashboard, indicates that this job must refresh a SQL dashboard.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskDashboard", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "file": { "description": "If file, indicates that this job runs a SQL file in a remote Git repository.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskFile", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "parameters": { "description": "Parameters to be used for each run of this job. The SQL alert task does not support custom parameters.", "$ref": "#/$defs/map/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "query": { "description": "If query, indicates that this job must execute a SQL query.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskQuery", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "warehouse_id": { "description": "The canonical identifier of the SQL warehouse. Recommended to use with serverless or pro SQL warehouses. Classic SQL warehouses are only supported for SQL alert, dashboard and query tasks and are limited to scheduled single-task jobs.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -6944,17 +6944,17 @@ "alert_id": { "description": "The canonical identifier of the SQL alert.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "pause_subscriptions": { "description": "If true, the alert notifications are not sent to subscribers.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "subscriptions": { "description": "If specified, alert notifications are sent to subscribers.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskSubscription", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -6976,22 +6976,22 @@ "custom_subject": { "description": "Subject of the email sent to subscribers of this task.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "dashboard_id": { "description": "The canonical identifier of the SQL dashboard.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "pause_subscriptions": { "description": "If true, the dashboard snapshot is not taken, and emails are not sent to subscribers.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "subscriptions": { "description": "If specified, dashboard snapshots are sent to subscriptions.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskSubscription", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -7013,12 +7013,12 @@ "path": { "description": "Path of the SQL file. Must be relative if the source is a remote Git repository and absolute for workspace paths.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "source": { "description": "Optional location type of the SQL file. When set to `WORKSPACE`, the SQL file will be retrieved\nfrom the local Databricks workspace. When set to `GIT`, the SQL file will be retrieved from a Git repository\ndefined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise.\n\n* `WORKSPACE`: SQL file is located in Databricks workspace.\n* `GIT`: SQL file is located in cloud Git provider.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Source", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -7040,7 +7040,7 @@ "query_id": { "description": "The canonical identifier of the SQL query.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -7062,12 +7062,12 @@ "destination_id": { "description": "The canonical identifier of the destination to receive email notification. This parameter is mutually exclusive with user_name. You cannot set both destination_id and user_name for subscription notifications.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "user_name": { "description": "The user name to receive the subscription email. This parameter is mutually exclusive with destination_id. You cannot set both destination_id and user_name for subscription notifications.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -7102,16 +7102,16 @@ "custom_subject": { "description": "Optional: Allows users to specify a custom subject line on the email sent\nto subscribers.", "$ref": "#/$defs/string", - "x-since-version": "v0.248.0" + "since_version": "v0.248.0" }, "paused": { "description": "When true, the subscription will not send emails.", "$ref": "#/$defs/bool", - "x-since-version": "v0.248.0" + "since_version": "v0.248.0" }, "subscribers": { "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.SubscriptionSubscriber", - "x-since-version": "v0.248.0" + "since_version": "v0.248.0" } }, "additionalProperties": false @@ -7129,11 +7129,11 @@ "properties": { "destination_id": { "$ref": "#/$defs/string", - "x-since-version": "v0.248.0" + "since_version": "v0.248.0" }, "user_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.248.0" + "since_version": "v0.248.0" } }, "additionalProperties": false @@ -7152,22 +7152,22 @@ "condition": { "description": "The table(s) condition based on which to trigger a job run.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Condition", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "min_time_between_triggers_seconds": { "description": "If set, the trigger starts a run only after the specified amount of time has passed since\nthe last time the trigger fired. The minimum allowed value is 60 seconds.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "table_names": { "description": "A list of tables to monitor for changes. The table name must be in the format `catalog_name.schema_name.table_name`.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "wait_after_last_change_seconds": { "description": "If set, the trigger starts a run only after no table updates have occurred for the specified time\nand can be used to wait for a series of table updates before triggering a run. The\nminimum allowed value is 60 seconds.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -7189,17 +7189,17 @@ "clean_rooms_notebook_task": { "description": "The task runs a [clean rooms](https://docs.databricks.com/clean-rooms/index.html) notebook\nwhen the `clean_rooms_notebook_task` field is present.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.CleanRoomsNotebookTask", - "x-since-version": "v0.237.0" + "since_version": "v0.237.0" }, "condition_task": { "description": "The task evaluates a condition that can be used to control the execution of other tasks when the `condition_task` field is present.\nThe condition task does not require a cluster to execute and does not support retries or notifications.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ConditionTask", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "dashboard_task": { "description": "The task refreshes a dashboard and sends a snapshot to subscribers.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.DashboardTask", - "x-since-version": "v0.248.0" + "since_version": "v0.248.0" }, "dbt_cloud_task": { "description": "Task type for dbt cloud, deprecated in favor of the new name dbt_platform_task", @@ -7207,173 +7207,173 @@ "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, - "x-since-version": "v0.256.0", + "since_version": "v0.256.0", "deprecated": true }, "dbt_platform_task": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.DbtPlatformTask", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "x-since-version": "v0.257.0" + "since_version": "v0.257.0" }, "dbt_task": { "description": "The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.DbtTask", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "depends_on": { "description": "An optional array of objects specifying the dependency graph of the task. All tasks specified in this field must complete before executing this task. The task will run only if the `run_if` condition is true.\nThe key is `task_key`, and the value is the name assigned to the dependent task.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.TaskDependency", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "description": { "description": "An optional description for this task.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "disable_auto_optimization": { "description": "An option to disable auto optimization in serverless", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "disabled": { "description": "An optional flag to disable the task. If set to true, the task will not run even if it is part of a job.", "$ref": "#/$defs/bool", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "x-since-version": "v0.271.0" + "since_version": "v0.271.0" }, "email_notifications": { "description": "An optional set of email addresses that is notified when runs of this task begin or complete as well as when this task is deleted. The default behavior is to not send any emails.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TaskEmailNotifications", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "environment_key": { "description": "The key that references an environment spec in a job. This field is required for Python script, Python wheel and dbt tasks when using serverless compute.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "existing_cluster_id": { "description": "If existing_cluster_id, the ID of an existing cluster that is used for all runs.\nWhen running jobs or tasks on an existing cluster, you may need to manually restart\nthe cluster if it stops responding. We suggest running jobs and tasks on new clusters for\ngreater reliability", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "for_each_task": { "description": "The task executes a nested task for every input provided when the `for_each_task` field is present.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ForEachTask", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "gen_ai_compute_task": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.GenAiComputeTask", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "x-since-version": "v0.243.0" + "since_version": "v0.243.0" }, "health": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRules", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "job_cluster_key": { "description": "If job_cluster_key, this task is executed reusing the cluster specified in `job.settings.job_clusters`.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "libraries": { "description": "An optional list of libraries to be installed on the cluster.\nThe default value is an empty list.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/compute.Library", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "max_retries": { "description": "An optional maximum number of times to retry an unsuccessful run. A run is considered to be unsuccessful if it completes with the `FAILED` result_state or `INTERNAL_ERROR` `life_cycle_state`. The value `-1` means to retry indefinitely and the value `0` means to never retry.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "min_retry_interval_millis": { "description": "An optional minimal interval in milliseconds between the start of the failed run and the subsequent retry run. The default behavior is that unsuccessful runs are immediately retried.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "new_cluster": { "description": "If new_cluster, a description of a new cluster that is created for each run.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClusterSpec", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "notebook_task": { "description": "The task runs a notebook when the `notebook_task` field is present.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.NotebookTask", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "notification_settings": { "description": "Optional notification settings that are used when sending notifications to each of the `email_notifications` and `webhook_notifications` for this task.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TaskNotificationSettings", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "pipeline_task": { "description": "The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines configured to use triggered more are supported.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PipelineTask", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "power_bi_task": { "description": "The task triggers a Power BI semantic model update when the `power_bi_task` field is present.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PowerBiTask", - "x-since-version": "v0.248.0" + "since_version": "v0.248.0" }, "python_wheel_task": { "description": "The task runs a Python wheel when the `python_wheel_task` field is present.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PythonWheelTask", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "retry_on_timeout": { "description": "An optional policy to specify whether to retry a job when it times out. The default behavior\nis to not retry on timeout.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "run_if": { "description": "An optional value specifying the condition determining whether the task is run once its dependencies have been completed.\n\n* `ALL_SUCCESS`: All dependencies have executed and succeeded\n* `AT_LEAST_ONE_SUCCESS`: At least one dependency has succeeded\n* `NONE_FAILED`: None of the dependencies have failed and at least one was executed\n* `ALL_DONE`: All dependencies have been completed\n* `AT_LEAST_ONE_FAILED`: At least one dependency failed\n* `ALL_FAILED`: ALl dependencies have failed", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.RunIf", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "run_job_task": { "description": "The task triggers another job when the `run_job_task` field is present.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.RunJobTask", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "spark_jar_task": { "description": "The task runs a JAR when the `spark_jar_task` field is present.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SparkJarTask", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "spark_python_task": { "description": "The task runs a Python file when the `spark_python_task` field is present.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SparkPythonTask", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "spark_submit_task": { "description": "(Legacy) The task runs the spark-submit script when the spark_submit_task field is present. Databricks recommends using the spark_jar_task instead; see [Spark Submit task for jobs](/jobs/spark-submit).", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SparkSubmitTask", "deprecationMessage": "This field is deprecated", - "x-since-version": "v0.228.1", + "since_version": "v0.228.1", "deprecated": true }, "sql_task": { "description": "The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when the `sql_task` field is present.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTask", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "task_key": { "description": "A unique name for the task. This field is used to refer to this task from other tasks.\nThis field is required and must be unique within its parent job.\nOn Update or Reset, this field is used to reference the tasks to be updated or reset.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "timeout_seconds": { "description": "An optional timeout applied to each run of this job task. A value of `0` means no timeout.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "webhook_notifications": { "description": "A collection of system notification IDs to notify when runs of this task begin or complete. The default behavior is to not send any system notifications.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.WebhookNotifications", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -7395,12 +7395,12 @@ "outcome": { "description": "Can only be specified on condition task dependencies. The outcome of the dependent task that must be met for this task to run.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "task_key": { "description": "The name of the task this task depends on.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -7423,33 +7423,33 @@ "description": "If true, do not send email to recipients specified in `on_failure` if the run is skipped.\nThis field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.", "$ref": "#/$defs/bool", "deprecationMessage": "This field is deprecated", - "x-since-version": "v0.228.1", + "since_version": "v0.228.1", "deprecated": true }, "on_duration_warning_threshold_exceeded": { "description": "A list of email addresses to be notified when the duration of a run exceeds the threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. If no rule for the `RUN_DURATION_SECONDS` metric is specified in the `health` field for the job, notifications are not sent.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "on_failure": { "description": "A list of email addresses to be notified when a run unsuccessfully completes. A run is considered to have completed unsuccessfully if it ends with an `INTERNAL_ERROR` `life_cycle_state` or a `FAILED`, or `TIMED_OUT` result_state. If this is not specified on job creation, reset, or update the list is empty, and notifications are not sent.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "on_start": { "description": "A list of email addresses to be notified when a run begins. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "on_streaming_backlog_exceeded": { "description": "A list of email addresses to notify when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "on_success": { "description": "A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -7468,17 +7468,17 @@ "alert_on_last_attempt": { "description": "If true, do not send notifications to recipients specified in `on_start` for the retried runs and do not send notifications to recipients specified in `on_failure` until the last retry of the run.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "no_alert_for_canceled_runs": { "description": "If true, do not send notifications to recipients specified in `on_failure` if the run is canceled.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "no_alert_for_skipped_runs": { "description": "If true, do not send notifications to recipients specified in `on_failure` if the run is skipped.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -7513,27 +7513,27 @@ "file_arrival": { "description": "File arrival trigger settings.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.FileArrivalTriggerConfiguration", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "model": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ModelTriggerConfiguration", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "pause_status": { "description": "Whether this trigger is paused or not.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PauseStatus", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "periodic": { "description": "Periodic trigger settings.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PeriodicTriggerConfiguration", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "table_update": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TableUpdateTriggerConfiguration", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -7551,7 +7551,7 @@ "properties": { "id": { "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -7573,27 +7573,27 @@ "on_duration_warning_threshold_exceeded": { "description": "An optional list of system notification IDs to call when the duration of a run exceeds the threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. A maximum of 3 destinations can be specified for the `on_duration_warning_threshold_exceeded` property.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Webhook", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "on_failure": { "description": "An optional list of system notification IDs to call when the run fails. A maximum of 3 destinations can be specified for the `on_failure` property.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Webhook", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "on_start": { "description": "An optional list of system notification IDs to call when the run starts. A maximum of 3 destinations can be specified for the `on_start` property.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Webhook", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "on_streaming_backlog_exceeded": { "description": "An optional list of system notification IDs to call when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.\nA maximum of 3 destinations can be specified for the `on_streaming_backlog_exceeded` property.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Webhook", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "on_success": { "description": "An optional list of system notification IDs to call when the run completes successfully. A maximum of 3 destinations can be specified for the `on_success` property.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Webhook", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -7613,12 +7613,12 @@ "key": { "description": "The tag key.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "value": { "description": "The tag value.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -7638,12 +7638,12 @@ "key": { "description": "The tag key.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "value": { "description": "The tag value.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -7664,7 +7664,7 @@ "$ref": "#/$defs/string", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" } }, "additionalProperties": false @@ -7682,11 +7682,11 @@ "properties": { "quartz_cron_schedule": { "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "timezone_id": { "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -7742,17 +7742,17 @@ "catalog": { "description": "The UC catalog the event log is published under.", "$ref": "#/$defs/string", - "x-since-version": "v0.246.0" + "since_version": "v0.246.0" }, "name": { "description": "The name the event log is published to in UC.", "$ref": "#/$defs/string", - "x-since-version": "v0.246.0" + "since_version": "v0.246.0" }, "schema": { "description": "The UC schema the event log is published under.", "$ref": "#/$defs/string", - "x-since-version": "v0.246.0" + "since_version": "v0.246.0" } }, "additionalProperties": false @@ -7771,7 +7771,7 @@ "path": { "description": "The absolute path of the source code.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -7790,12 +7790,12 @@ "exclude": { "description": "Paths to exclude.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "include": { "description": "Paths to include.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -7814,17 +7814,17 @@ "report": { "description": "Select a specific source report.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.ReportSpec", - "x-since-version": "v0.231.0" + "since_version": "v0.231.0" }, "schema": { "description": "Select all tables from a specific source schema.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.SchemaSpec", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "table": { "description": "Select a specific source table.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpec", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -7844,35 +7844,35 @@ "description": "[Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.", "$ref": "#/$defs/string", "deprecationMessage": "This field is deprecated", - "x-since-version": "v0.228.1", + "since_version": "v0.228.1", "deprecated": true }, "connection_name": { "description": "Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.", "$ref": "#/$defs/string", - "x-since-version": "v0.234.0" + "since_version": "v0.234.0" }, "connection_parameters": { "description": "Optional, Internal. Parameters required to establish an initial connection with the source.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.ConnectionParameters", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "gateway_storage_catalog": { "description": "Required, Immutable. The name of the catalog for the gateway pipeline's storage location.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "gateway_storage_name": { "description": "Optional. The Unity Catalog-compatible name for the gateway storage location.\nThis is the destination to use for the data that is extracted by the gateway.\nSpark Declarative Pipelines system will automatically create the storage location under the catalog and schema.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "gateway_storage_schema": { "description": "Required, Immutable. The name of the schema for the gateway pipelines's storage location.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -7896,40 +7896,40 @@ "connection_name": { "description": "Immutable. The Unity Catalog connection that this ingestion pipeline uses to communicate with the source. This is used with connectors for applications like Salesforce, Workday, and so on.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "ingest_from_uc_foreign_catalog": { "description": "Immutable. If set to true, the pipeline will ingest tables from the\nUC foreign catalogs directly without the need to specify a UC connection or ingestion gateway.\nThe `source_catalog` fields in objects of IngestionConfig are interpreted as\nthe UC foreign catalogs to ingest from.", "$ref": "#/$defs/bool", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "ingestion_gateway_id": { "description": "Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate with the source database. This is used with connectors to databases like SQL Server.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "netsuite_jar_path": { "$ref": "#/$defs/string", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "x-since-version": "v0.271.0" + "since_version": "v0.271.0" }, "objects": { "description": "Required. Settings specifying tables to replicate and the destination for the replicated tables.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionConfig", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "source_configurations": { "description": "Top-level source configurations", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.SourceConfig", - "x-since-version": "v0.267.0" + "since_version": "v0.267.0" }, "table_configuration": { "description": "Configuration settings to control the ingestion of tables. These settings are applied to all tables in the pipeline.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -7951,21 +7951,21 @@ "$ref": "#/$defs/slice/string", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "x-since-version": "v0.264.0" + "since_version": "v0.264.0" }, "deletion_condition": { "description": "Specifies a SQL WHERE condition that specifies that the source row has been deleted.\nThis is sometimes referred to as \"soft-deletes\".\nFor example: \"Operation = 'DELETE'\" or \"is_deleted = true\".\nThis field is orthogonal to `hard_deletion_sync_interval_in_seconds`,\none for soft-deletes and the other for hard-deletes.\nSee also the hard_deletion_sync_min_interval_in_seconds field for\nhandling of \"hard deletes\" where the source rows are physically removed from the table.", "$ref": "#/$defs/string", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "x-since-version": "v0.264.0" + "since_version": "v0.264.0" }, "hard_deletion_sync_min_interval_in_seconds": { "description": "Specifies the minimum interval (in seconds) between snapshots on primary keys\nfor detecting and synchronizing hard deletions—i.e., rows that have been\nphysically removed from the source table.\nThis interval acts as a lower bound. If ingestion runs less frequently than\nthis value, hard deletion synchronization will align with the actual ingestion\nfrequency instead of happening more often.\nIf not set, hard deletion synchronization via snapshots is disabled.\nThis field is mutable and can be updated without triggering a full snapshot.", "$ref": "#/$defs/int64", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "x-since-version": "v0.264.0" + "since_version": "v0.264.0" } }, "additionalProperties": false @@ -7985,19 +7985,19 @@ "description": "(Optional) Marks the report as incremental.\nThis field is deprecated and should not be used. Use `parameters` instead. The incremental behavior is now\ncontrolled by the `parameters` field.", "$ref": "#/$defs/bool", "deprecationMessage": "This field is deprecated", - "x-since-version": "v0.271.0", + "since_version": "v0.271.0", "deprecated": true }, "parameters": { "description": "Parameters for the Workday report. Each key represents the parameter name (e.g., \"start_date\", \"end_date\"),\nand the corresponding value is a SQL-like expression used to compute the parameter value at runtime.\nExample:\n{\n\"start_date\": \"{ coalesce(current_offset(), date(\\\"2025-02-01\\\")) }\",\n\"end_date\": \"{ current_date() - INTERVAL 1 DAY }\"\n}", "$ref": "#/$defs/map/string", - "x-since-version": "v0.271.0" + "since_version": "v0.271.0" }, "report_parameters": { "description": "(Optional) Additional custom parameters for Workday Report\nThis field is deprecated and should not be used. Use `parameters` instead.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue", "deprecationMessage": "This field is deprecated", - "x-since-version": "v0.271.0", + "since_version": "v0.271.0", "deprecated": true } }, @@ -8017,12 +8017,12 @@ "key": { "description": "Key for the report parameter, can be a column name or other metadata", "$ref": "#/$defs/string", - "x-since-version": "v0.271.0" + "since_version": "v0.271.0" }, "value": { "description": "Value for the report parameter.\nPossible values it can take are these sql functions:\n1. coalesce(current_offset(), date(\"YYYY-MM-DD\")) -\u003e if current_offset() is null, then the passed date, else current_offset()\n2. current_date()\n3. date_sub(current_date(), x) -\u003e subtract x (some non-negative integer) days from current date", "$ref": "#/$defs/string", - "x-since-version": "v0.271.0" + "since_version": "v0.271.0" } }, "additionalProperties": false @@ -8081,7 +8081,7 @@ "path": { "description": "The absolute path of the source code.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -8100,12 +8100,12 @@ "alerts": { "description": "A list of alerts that trigger the sending of notifications to the configured\ndestinations. The supported alerts are:\n\n* `on-update-success`: A pipeline update completes successfully.\n* `on-update-failure`: Each time a pipeline update fails.\n* `on-update-fatal-failure`: A pipeline update fails with a non-retryable (fatal) error.\n* `on-flow-failure`: A single data flow fails.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "email_recipients": { "description": "A list of email addresses notified when a configured alert is triggered.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -8124,7 +8124,7 @@ "include": { "description": "The source code to include for pipelines", "$ref": "#/$defs/string", - "x-since-version": "v0.252.0" + "since_version": "v0.252.0" } }, "additionalProperties": false @@ -8143,97 +8143,97 @@ "apply_policy_default_values": { "description": "Note: This field won't be persisted. Only API users will check this field.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "autoscale": { "description": "Parameters needed in order to automatically scale clusters up and down based on load.\nNote: autoscaling works best with DB runtime versions 3.0 or later.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineClusterAutoscale", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "aws_attributes": { "description": "Attributes related to clusters running on Amazon Web Services.\nIf not specified at cluster creation, a set of default values will be used.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AwsAttributes", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "azure_attributes": { "description": "Attributes related to clusters running on Microsoft Azure.\nIf not specified at cluster creation, a set of default values will be used.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AzureAttributes", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "cluster_log_conf": { "description": "The configuration for delivering spark logs to a long-term storage destination.\nOnly dbfs destinations are supported. Only one destination can be specified\nfor one cluster. If the conf is given, the logs will be delivered to the destination every\n`5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while\nthe destination of executor logs is `$destination/$clusterId/executor`.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClusterLogConf", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "custom_tags": { "description": "Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS\ninstances and EBS volumes) with these tags in addition to `default_tags`. Notes:\n\n- Currently, Databricks allows at most 45 custom tags\n\n- Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags", "$ref": "#/$defs/map/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "driver_instance_pool_id": { "description": "The optional ID of the instance pool for the driver of the cluster belongs.\nThe pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not\nassigned.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "driver_node_type_id": { "description": "The node type of the Spark driver.\nNote that this field is optional; if unset, the driver node type will be set as the same value\nas `node_type_id` defined above.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "enable_local_disk_encryption": { "description": "Whether to enable local disk encryption for the cluster.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "gcp_attributes": { "description": "Attributes related to clusters running on Google Cloud Platform.\nIf not specified at cluster creation, a set of default values will be used.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.GcpAttributes", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "init_scripts": { "description": "The configuration for storing init scripts. Any number of destinations can be specified. The scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, init script logs are sent to `\u003cdestination\u003e/\u003ccluster-ID\u003e/init_scripts`.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/compute.InitScriptInfo", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "instance_pool_id": { "description": "The optional ID of the instance pool to which the cluster belongs.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "label": { "description": "A label for the cluster specification, either `default` to configure the default cluster, or `maintenance` to configure the maintenance cluster. This field is optional. The default value is `default`.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "node_type_id": { "description": "This field encodes, through a single value, the resources available to each of\nthe Spark nodes in this cluster. For example, the Spark nodes can be provisioned\nand optimized for memory or compute intensive workloads. A list of available node\ntypes can be retrieved by using the :method:clusters/listNodeTypes API call.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "num_workers": { "description": "Number of worker nodes that this cluster should have. A cluster has one Spark Driver\nand `num_workers` Executors for a total of `num_workers` + 1 Spark nodes.\n\nNote: When reading the properties of a cluster, this field reflects the desired number\nof workers rather than the actual current number of workers. For instance, if a cluster\nis resized from 5 to 10 workers, this field will immediately be updated to reflect\nthe target size of 10 workers, whereas the workers listed in `spark_info` will gradually\nincrease from 5 to 10 as the new nodes are provisioned.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "policy_id": { "description": "The ID of the cluster policy used to create the cluster if applicable.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "spark_conf": { "description": "An object containing a set of optional, user-specified Spark configuration key-value pairs.\nSee :method:clusters/create for more details.", "$ref": "#/$defs/map/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "spark_env_vars": { "description": "An object containing a set of optional, user-specified environment variable key-value pairs.\nPlease note that key-value pair of the form (X,Y) will be exported as is (i.e.,\n`export X='Y'`) while launching the driver and workers.\n\nIn order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending\nthem to `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all\ndefault databricks managed environmental variables are included as well.\n\nExample Spark environment variables:\n`{\"SPARK_WORKER_MEMORY\": \"28000m\", \"SPARK_LOCAL_DIRS\": \"/local_disk0\"}` or\n`{\"SPARK_DAEMON_JAVA_OPTS\": \"$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true\"}`", "$ref": "#/$defs/map/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "ssh_public_keys": { "description": "SSH public key contents that will be added to each Spark node in this cluster. The\ncorresponding private keys can be used to login with the user name `ubuntu` on port `2200`.\nUp to 10 keys can be specified.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -8252,17 +8252,17 @@ "max_workers": { "description": "The maximum number of workers to which the cluster can scale up when overloaded. `max_workers` must be strictly greater than `min_workers`.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "min_workers": { "description": "The minimum number of workers the cluster can scale down to when underutilized.\nIt is also the initial number of workers the cluster will have after creation.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "mode": { "description": "Databricks Enhanced Autoscaling optimizes cluster utilization by automatically\nallocating cluster resources based on workload volume, with minimal impact to\nthe data processing latency of your pipelines. Enhanced Autoscaling is available\nfor `updates` clusters only. The legacy autoscaling feature is used for `maintenance`\nclusters.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineClusterAutoscaleMode", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -8301,12 +8301,12 @@ "kind": { "description": "The deployment method that manages the pipeline.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.DeploymentKind", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "metadata_file_path": { "description": "The path to the file containing metadata about the deployment.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -8328,37 +8328,37 @@ "file": { "description": "The path to a file that defines a pipeline and is stored in the Databricks Repos.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.FileLibrary", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "glob": { "description": "The unified field to include source codes.\nEach entry can be a notebook path, a file path, or a folder path that ends `/**`.\nThis field cannot be used together with `notebook` or `file`.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PathPattern", - "x-since-version": "v0.252.0" + "since_version": "v0.252.0" }, "jar": { "description": "URI of the jar to be installed. Currently only DBFS is supported.", "$ref": "#/$defs/string", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "maven": { "description": "Specification of a maven library to be installed.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.MavenLibrary", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "notebook": { "description": "The path to a notebook that defines a pipeline and is stored in the Databricks workspace.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.NotebookLibrary", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "whl": { "description": "URI of the whl to be installed.", "$ref": "#/$defs/string", "deprecationMessage": "This field is deprecated", - "x-since-version": "v0.228.1", + "since_version": "v0.228.1", "deprecated": true } }, @@ -8377,11 +8377,11 @@ "properties": { "cron": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.CronTrigger", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "manual": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.ManualTrigger", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -8401,7 +8401,7 @@ "dependencies": { "description": "List of pip dependencies, as supported by the version of pip in this environment.\nEach dependency is a pip requirement file line https://pip.pypa.io/en/stable/reference/requirements-file-format/\nAllowed dependency could be \u003crequirement specifier\u003e, \u003carchive url/path\u003e, \u003clocal project path\u003e(WSFS or Volumes in Databricks), \u003cvcs project url\u003e", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.257.0" + "since_version": "v0.257.0" } }, "additionalProperties": false @@ -8421,7 +8421,7 @@ "slot_config": { "description": "Optional. The Postgres slot configuration to use for logical replication", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PostgresSlotConfig", - "x-since-version": "v0.267.0" + "since_version": "v0.267.0" } }, "additionalProperties": false @@ -8441,12 +8441,12 @@ "publication_name": { "description": "The name of the publication to use for the Postgres source", "$ref": "#/$defs/string", - "x-since-version": "v0.267.0" + "since_version": "v0.267.0" }, "slot_name": { "description": "The name of the logical replication slot to use for the Postgres source", "$ref": "#/$defs/string", - "x-since-version": "v0.267.0" + "since_version": "v0.267.0" } }, "additionalProperties": false @@ -8465,27 +8465,27 @@ "destination_catalog": { "description": "Required. Destination catalog to store table.", "$ref": "#/$defs/string", - "x-since-version": "v0.231.0" + "since_version": "v0.231.0" }, "destination_schema": { "description": "Required. Destination schema to store table.", "$ref": "#/$defs/string", - "x-since-version": "v0.231.0" + "since_version": "v0.231.0" }, "destination_table": { "description": "Required. Destination table name. The pipeline fails if a table with that name already exists.", "$ref": "#/$defs/string", - "x-since-version": "v0.231.0" + "since_version": "v0.231.0" }, "source_url": { "description": "Required. Report URL in the source system.", "$ref": "#/$defs/string", - "x-since-version": "v0.231.0" + "since_version": "v0.231.0" }, "table_configuration": { "description": "Configuration settings to control the ingestion of tables. These settings override the table_configuration defined in the IngestionPipelineDefinition object.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig", - "x-since-version": "v0.231.0" + "since_version": "v0.231.0" } }, "additionalProperties": false, @@ -8509,17 +8509,17 @@ "days_of_week": { "description": "Days of week in which the restart is allowed to happen (within a five-hour window starting at start_hour).\nIf not specified all days of the week will be used.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.DayOfWeek", - "x-since-version": "v0.234.0" + "since_version": "v0.234.0" }, "start_hour": { "description": "An integer between 0 and 23 denoting the start hour for the restart window in the 24-hour day.\nContinuous pipeline restart is triggered only within a five-hour window starting at this hour.", "$ref": "#/$defs/int", - "x-since-version": "v0.234.0" + "since_version": "v0.234.0" }, "time_zone_id": { "description": "Time zone id of restart window. See https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html for details.\nIf not specified, UTC will be used.", "$ref": "#/$defs/string", - "x-since-version": "v0.234.0" + "since_version": "v0.234.0" } }, "additionalProperties": false, @@ -8542,12 +8542,12 @@ "service_principal_name": { "description": "Application ID of an active service principal. Setting this field requires the `servicePrincipal/user` role.", "$ref": "#/$defs/string", - "x-since-version": "v0.241.0" + "since_version": "v0.241.0" }, "user_name": { "description": "The email of an active workspace user. Users can only set this field to their own email.", "$ref": "#/$defs/string", - "x-since-version": "v0.241.0" + "since_version": "v0.241.0" } }, "additionalProperties": false @@ -8566,27 +8566,27 @@ "destination_catalog": { "description": "Required. Destination catalog to store tables.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "destination_schema": { "description": "Required. Destination schema to store tables in. Tables with the same name as the source tables are created in this destination schema. The pipeline fails If a table with the same name already exists.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "source_catalog": { "description": "The source catalog name. Might be optional depending on the type of source.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "source_schema": { "description": "Required. Schema name in the source database.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "table_configuration": { "description": "Configuration settings to control the ingestion of tables. These settings are applied to all tables in this schema and override the table_configuration defined in the IngestionPipelineDefinition object.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -8611,12 +8611,12 @@ "postgres": { "description": "Postgres-specific catalog-level configuration parameters", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PostgresCatalogConfig", - "x-since-version": "v0.267.0" + "since_version": "v0.267.0" }, "source_catalog": { "description": "Source catalog name", "$ref": "#/$defs/string", - "x-since-version": "v0.267.0" + "since_version": "v0.267.0" } }, "additionalProperties": false @@ -8635,7 +8635,7 @@ "catalog": { "description": "Catalog-level source configuration parameters", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.SourceCatalogConfig", - "x-since-version": "v0.267.0" + "since_version": "v0.267.0" } }, "additionalProperties": false @@ -8654,37 +8654,37 @@ "destination_catalog": { "description": "Required. Destination catalog to store table.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "destination_schema": { "description": "Required. Destination schema to store table.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "destination_table": { "description": "Optional. Destination table name. The pipeline fails if a table with that name already exists. If not set, the source table name is used.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "source_catalog": { "description": "Source catalog name. Might be optional depending on the type of source.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "source_schema": { "description": "Schema name in the source database. Might be optional depending on the type of source.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "source_table": { "description": "Required. Table name in the source database.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "table_configuration": { "description": "Configuration settings to control the ingestion of tables. These settings override the table_configuration defined in the IngestionPipelineDefinition object and the SchemaSpec.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -8708,49 +8708,49 @@ "exclude_columns": { "description": "A list of column names to be excluded for the ingestion.\nWhen not specified, include_columns fully controls what columns to be ingested.\nWhen specified, all other columns including future ones will be automatically included for ingestion.\nThis field in mutually exclusive with `include_columns`.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.251.0" + "since_version": "v0.251.0" }, "include_columns": { "description": "A list of column names to be included for the ingestion.\nWhen not specified, all columns except ones in exclude_columns will be included. Future\ncolumns will be automatically included.\nWhen specified, all other future columns will be automatically excluded from ingestion.\nThis field in mutually exclusive with `exclude_columns`.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.251.0" + "since_version": "v0.251.0" }, "primary_keys": { "description": "The primary key of the table used to apply changes.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "query_based_connector_config": { "description": "Configurations that are only applicable for query-based ingestion connectors.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "x-since-version": "v0.264.0" + "since_version": "v0.264.0" }, "salesforce_include_formula_fields": { "description": "If true, formula fields defined in the table are included in the ingestion. This setting is only valid for the Salesforce connector", "$ref": "#/$defs/bool", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "scd_type": { "description": "The SCD type to use to ingest the table.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfigScdType", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "sequence_by": { "description": "The column names specifying the logical order of events in the source data. Spark Declarative Pipelines uses this sequencing to handle change events that arrive out of order.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.231.0" + "since_version": "v0.231.0" }, "workday_report_parameters": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinitionWorkdayReportParameters", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "x-since-version": "v0.271.0" + "since_version": "v0.271.0" } }, "additionalProperties": false @@ -8786,12 +8786,12 @@ "ai21labs_api_key": { "description": "The Databricks secret key reference for an AI21 Labs API key. If you\nprefer to paste your API key directly, see `ai21labs_api_key_plaintext`.\nYou must provide an API key using one of the following fields:\n`ai21labs_api_key` or `ai21labs_api_key_plaintext`.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "ai21labs_api_key_plaintext": { "description": "An AI21 Labs API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `ai21labs_api_key`. You\nmust provide an API key using one of the following fields:\n`ai21labs_api_key` or `ai21labs_api_key_plaintext`.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -8810,27 +8810,27 @@ "fallback_config": { "description": "Configuration for traffic fallback which auto fallbacks to other served entities if the request to a served\nentity fails with certain error codes, to increase availability.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.FallbackConfig", - "x-since-version": "v0.246.0" + "since_version": "v0.246.0" }, "guardrails": { "description": "Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and responses.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrails", - "x-since-version": "v0.230.0" + "since_version": "v0.230.0" }, "inference_table_config": { "description": "Configuration for payload logging using inference tables.\nUse these tables to monitor and audit data being sent to and received from model APIs and to improve model quality.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayInferenceTableConfig", - "x-since-version": "v0.230.0" + "since_version": "v0.230.0" }, "rate_limits": { "description": "Configuration for rate limits which can be set to limit endpoint traffic.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimit", - "x-since-version": "v0.230.0" + "since_version": "v0.230.0" }, "usage_tracking_config": { "description": "Configuration to enable usage tracking using system tables.\nThese tables allow you to monitor operational usage on endpoints and their associated costs.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayUsageTrackingConfig", - "x-since-version": "v0.230.0" + "since_version": "v0.230.0" } }, "additionalProperties": false @@ -8850,24 +8850,24 @@ "description": "List of invalid keywords.\nAI guardrail uses keyword or string matching to decide if the keyword exists in the request or response content.", "$ref": "#/$defs/slice/string", "deprecationMessage": "This field is deprecated", - "x-since-version": "v0.230.0", + "since_version": "v0.230.0", "deprecated": true }, "pii": { "description": "Configuration for guardrail PII filter.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailPiiBehavior", - "x-since-version": "v0.230.0" + "since_version": "v0.230.0" }, "safety": { "description": "Indicates whether the safety filter is enabled.", "$ref": "#/$defs/bool", - "x-since-version": "v0.230.0" + "since_version": "v0.230.0" }, "valid_topics": { "description": "The list of allowed topics.\nGiven a chat request, this guardrail flags the request if its topic is not in the allowed topics.", "$ref": "#/$defs/slice/string", "deprecationMessage": "This field is deprecated", - "x-since-version": "v0.230.0", + "since_version": "v0.230.0", "deprecated": true } }, @@ -8887,7 +8887,7 @@ "behavior": { "description": "Configuration for input guardrail filters.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailPiiBehaviorBehavior", - "x-since-version": "v0.230.0" + "since_version": "v0.230.0" } }, "additionalProperties": false @@ -8922,12 +8922,12 @@ "input": { "description": "Configuration for input guardrail filters.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailParameters", - "x-since-version": "v0.230.0" + "since_version": "v0.230.0" }, "output": { "description": "Configuration for output guardrail filters.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailParameters", - "x-since-version": "v0.230.0" + "since_version": "v0.230.0" } }, "additionalProperties": false @@ -8946,22 +8946,22 @@ "catalog_name": { "description": "The name of the catalog in Unity Catalog. Required when enabling inference tables.\nNOTE: On update, you have to disable inference table first in order to change the catalog name.", "$ref": "#/$defs/string", - "x-since-version": "v0.230.0" + "since_version": "v0.230.0" }, "enabled": { "description": "Indicates whether the inference table is enabled.", "$ref": "#/$defs/bool", - "x-since-version": "v0.230.0" + "since_version": "v0.230.0" }, "schema_name": { "description": "The name of the schema in Unity Catalog. Required when enabling inference tables.\nNOTE: On update, you have to disable inference table first in order to change the schema name.", "$ref": "#/$defs/string", - "x-since-version": "v0.230.0" + "since_version": "v0.230.0" }, "table_name_prefix": { "description": "The prefix of the table in Unity Catalog.\nNOTE: On update, you have to disable inference table first in order to change the prefix name.", "$ref": "#/$defs/string", - "x-since-version": "v0.230.0" + "since_version": "v0.230.0" } }, "additionalProperties": false @@ -8980,27 +8980,27 @@ "calls": { "description": "Used to specify how many calls are allowed for a key within the renewal_period.", "$ref": "#/$defs/int64", - "x-since-version": "v0.230.0" + "since_version": "v0.230.0" }, "key": { "description": "Key field for a rate limit. Currently, 'user', 'user_group, 'service_principal', and 'endpoint' are supported,\nwith 'endpoint' being the default if not specified.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitKey", - "x-since-version": "v0.230.0" + "since_version": "v0.230.0" }, "principal": { "description": "Principal field for a user, user group, or service principal to apply rate limiting to. Accepts a user email, group name, or service principal application ID.", "$ref": "#/$defs/string", - "x-since-version": "v0.260.0" + "since_version": "v0.260.0" }, "renewal_period": { "description": "Renewal period field for a rate limit. Currently, only 'minute' is supported.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitRenewalPeriod", - "x-since-version": "v0.230.0" + "since_version": "v0.230.0" }, "tokens": { "description": "Used to specify how many tokens are allowed for a key within the renewal_period.", "$ref": "#/$defs/int64", - "x-since-version": "v0.265.0" + "since_version": "v0.265.0" } }, "additionalProperties": false, @@ -9053,7 +9053,7 @@ "enabled": { "description": "Whether to enable usage tracking.", "$ref": "#/$defs/bool", - "x-since-version": "v0.230.0" + "since_version": "v0.230.0" } }, "additionalProperties": false @@ -9072,37 +9072,37 @@ "aws_access_key_id": { "description": "The Databricks secret key reference for an AWS access key ID with\npermissions to interact with Bedrock services. If you prefer to paste\nyour API key directly, see `aws_access_key_id_plaintext`. You must provide an API\nkey using one of the following fields: `aws_access_key_id` or\n`aws_access_key_id_plaintext`.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "aws_access_key_id_plaintext": { "description": "An AWS access key ID with permissions to interact with Bedrock services\nprovided as a plaintext string. If you prefer to reference your key using\nDatabricks Secrets, see `aws_access_key_id`. You must provide an API key\nusing one of the following fields: `aws_access_key_id` or\n`aws_access_key_id_plaintext`.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "aws_region": { "description": "The AWS region to use. Bedrock has to be enabled there.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "aws_secret_access_key": { "description": "The Databricks secret key reference for an AWS secret access key paired\nwith the access key ID, with permissions to interact with Bedrock\nservices. If you prefer to paste your API key directly, see\n`aws_secret_access_key_plaintext`. You must provide an API key using one\nof the following fields: `aws_secret_access_key` or\n`aws_secret_access_key_plaintext`.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "aws_secret_access_key_plaintext": { "description": "An AWS secret access key paired with the access key ID, with permissions\nto interact with Bedrock services provided as a plaintext string. If you\nprefer to reference your key using Databricks Secrets, see\n`aws_secret_access_key`. You must provide an API key using one of the\nfollowing fields: `aws_secret_access_key` or\n`aws_secret_access_key_plaintext`.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "bedrock_provider": { "description": "The underlying provider in Amazon Bedrock. Supported values (case\ninsensitive) include: Anthropic, Cohere, AI21Labs, Amazon.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfigBedrockProvider", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "instance_profile_arn": { "description": "ARN of the instance profile that the external model will use to access AWS resources.\nYou must authenticate using an instance profile or access keys.\nIf you prefer to authenticate using access keys, see `aws_access_key_id`,\n`aws_access_key_id_plaintext`, `aws_secret_access_key` and `aws_secret_access_key_plaintext`.", "$ref": "#/$defs/string", - "x-since-version": "v0.243.0" + "since_version": "v0.243.0" } }, "additionalProperties": false, @@ -9142,12 +9142,12 @@ "anthropic_api_key": { "description": "The Databricks secret key reference for an Anthropic API key. If you\nprefer to paste your API key directly, see `anthropic_api_key_plaintext`.\nYou must provide an API key using one of the following fields:\n`anthropic_api_key` or `anthropic_api_key_plaintext`.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "anthropic_api_key_plaintext": { "description": "The Anthropic API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `anthropic_api_key`. You\nmust provide an API key using one of the following fields:\n`anthropic_api_key` or `anthropic_api_key_plaintext`.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -9166,17 +9166,17 @@ "key": { "description": "The name of the API key parameter used for authentication.", "$ref": "#/$defs/string", - "x-since-version": "v0.246.0" + "since_version": "v0.246.0" }, "value": { "description": "The Databricks secret key reference for an API Key.\nIf you prefer to paste your token directly, see `value_plaintext`.", "$ref": "#/$defs/string", - "x-since-version": "v0.246.0" + "since_version": "v0.246.0" }, "value_plaintext": { "description": "The API Key provided as a plaintext string. If you prefer to reference your\ntoken using Databricks Secrets, see `value`.", "$ref": "#/$defs/string", - "x-since-version": "v0.246.0" + "since_version": "v0.246.0" } }, "additionalProperties": false, @@ -9198,22 +9198,22 @@ "catalog_name": { "description": "The name of the catalog in Unity Catalog. NOTE: On update, you cannot change the catalog name if the inference table is already enabled.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "enabled": { "description": "Indicates whether the inference table is enabled.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "schema_name": { "description": "The name of the schema in Unity Catalog. NOTE: On update, you cannot change the schema name if the inference table is already enabled.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "table_name_prefix": { "description": "The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if the inference table is already enabled.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -9232,12 +9232,12 @@ "token": { "description": "The Databricks secret key reference for a token.\nIf you prefer to paste your token directly, see `token_plaintext`.", "$ref": "#/$defs/string", - "x-since-version": "v0.246.0" + "since_version": "v0.246.0" }, "token_plaintext": { "description": "The token provided as a plaintext string. If you prefer to reference your\ntoken using Databricks Secrets, see `token`.", "$ref": "#/$defs/string", - "x-since-version": "v0.246.0" + "since_version": "v0.246.0" } }, "additionalProperties": false @@ -9256,17 +9256,17 @@ "cohere_api_base": { "description": "This is an optional field to provide a customized base URL for the Cohere\nAPI. If left unspecified, the standard Cohere base URL is used.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "cohere_api_key": { "description": "The Databricks secret key reference for a Cohere API key. If you prefer\nto paste your API key directly, see `cohere_api_key_plaintext`. You must\nprovide an API key using one of the following fields: `cohere_api_key` or\n`cohere_api_key_plaintext`.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "cohere_api_key_plaintext": { "description": "The Cohere API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `cohere_api_key`. You\nmust provide an API key using one of the following fields:\n`cohere_api_key` or `cohere_api_key_plaintext`.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -9286,17 +9286,17 @@ "api_key_auth": { "description": "This is a field to provide API key authentication for the custom provider API.\nYou can only specify one authentication method.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ApiKeyAuth", - "x-since-version": "v0.246.0" + "since_version": "v0.246.0" }, "bearer_token_auth": { "description": "This is a field to provide bearer token authentication for the custom provider API.\nYou can only specify one authentication method.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.BearerTokenAuth", - "x-since-version": "v0.246.0" + "since_version": "v0.246.0" }, "custom_provider_url": { "description": "This is a field to provide the URL of the custom provider API.", "$ref": "#/$defs/string", - "x-since-version": "v0.246.0" + "since_version": "v0.246.0" } }, "additionalProperties": false, @@ -9318,17 +9318,17 @@ "databricks_api_token": { "description": "The Databricks secret key reference for a Databricks API token that\ncorresponds to a user or service principal with Can Query access to the\nmodel serving endpoint pointed to by this external model. If you prefer\nto paste your API key directly, see `databricks_api_token_plaintext`. You\nmust provide an API key using one of the following fields:\n`databricks_api_token` or `databricks_api_token_plaintext`.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "databricks_api_token_plaintext": { "description": "The Databricks API token that corresponds to a user or service principal\nwith Can Query access to the model serving endpoint pointed to by this\nexternal model provided as a plaintext string. If you prefer to reference\nyour key using Databricks Secrets, see `databricks_api_token`. You must\nprovide an API key using one of the following fields:\n`databricks_api_token` or `databricks_api_token_plaintext`.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "databricks_workspace_url": { "description": "The URL of the Databricks workspace containing the model serving endpoint\npointed to by this external model.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -9350,12 +9350,12 @@ "on_update_failure": { "description": "A list of email addresses to be notified when an endpoint fails to update its configuration or state.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.264.0" + "since_version": "v0.264.0" }, "on_update_success": { "description": "A list of email addresses to be notified when an endpoint successfully updates its configuration or state.", "$ref": "#/$defs/slice/string", - "x-since-version": "v0.264.0" + "since_version": "v0.264.0" } }, "additionalProperties": false @@ -9374,22 +9374,22 @@ "auto_capture_config": { "description": "Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.\nNote: this field is deprecated for creating new provisioned throughput endpoints,\nor updating existing provisioned throughput endpoints that never have inference table configured;\nin these cases please use AI Gateway to manage inference tables.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AutoCaptureConfigInput", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "served_entities": { "description": "The list of served entities under the serving endpoint config.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "served_models": { "description": "(Deprecated, use served_entities instead) The list of served models under the serving endpoint config.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.ServedModelInput", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "traffic_config": { "description": "The traffic configuration associated with the serving endpoint config.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.TrafficConfig", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -9408,12 +9408,12 @@ "key": { "description": "Key field for a serving endpoint tag.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "value": { "description": "Optional value field for a serving endpoint tag.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -9435,62 +9435,62 @@ "ai21labs_config": { "description": "AI21Labs Config. Only required if the provider is 'ai21labs'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.Ai21LabsConfig", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "amazon_bedrock_config": { "description": "Amazon Bedrock Config. Only required if the provider is 'amazon-bedrock'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfig", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "anthropic_config": { "description": "Anthropic Config. Only required if the provider is 'anthropic'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AnthropicConfig", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "cohere_config": { "description": "Cohere Config. Only required if the provider is 'cohere'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.CohereConfig", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "custom_provider_config": { "description": "Custom Provider Config. Only required if the provider is 'custom'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.CustomProviderConfig", - "x-since-version": "v0.246.0" + "since_version": "v0.246.0" }, "databricks_model_serving_config": { "description": "Databricks Model Serving Config. Only required if the provider is 'databricks-model-serving'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.DatabricksModelServingConfig", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "google_cloud_vertex_ai_config": { "description": "Google Cloud Vertex AI Config. Only required if the provider is 'google-cloud-vertex-ai'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.GoogleCloudVertexAiConfig", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "name": { "description": "The name of the external model.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "openai_config": { "description": "OpenAI Config. Only required if the provider is 'openai'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.OpenAiConfig", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "palm_config": { "description": "PaLM Config. Only required if the provider is 'palm'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.PaLmConfig", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "provider": { "description": "The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', 'palm', and 'custom'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ExternalModelProvider", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "task": { "description": "The task type of the external model.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -9536,7 +9536,7 @@ "enabled": { "description": "Whether to enable traffic fallback. When a served entity in the serving endpoint returns specific error\ncodes (e.g. 500), the request will automatically be round-robin attempted with other served entities in the same\nendpoint, following the order of served entity list, until a successful response is returned.\nIf all attempts fail, return the last response with the error code.", "$ref": "#/$defs/bool", - "x-since-version": "v0.246.0" + "since_version": "v0.246.0" } }, "additionalProperties": false, @@ -9558,22 +9558,22 @@ "private_key": { "description": "The Databricks secret key reference for a private key for the service\naccount which has access to the Google Cloud Vertex AI Service. See [Best\npractices for managing service account keys]. If you prefer to paste your\nAPI key directly, see `private_key_plaintext`. You must provide an API\nkey using one of the following fields: `private_key` or\n`private_key_plaintext`\n\n[Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "private_key_plaintext": { "description": "The private key for the service account which has access to the Google\nCloud Vertex AI Service provided as a plaintext secret. See [Best\npractices for managing service account keys]. If you prefer to reference\nyour key using Databricks Secrets, see `private_key`. You must provide an\nAPI key using one of the following fields: `private_key` or\n`private_key_plaintext`.\n\n[Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "project_id": { "description": "This is the Google Cloud project id that the service account is\nassociated with.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "region": { "description": "This is the region for the Google Cloud Vertex AI Service. See [supported\nregions] for more details. Some models are only available in specific\nregions.\n\n[supported regions]: https://cloud.google.com/vertex-ai/docs/general/locations", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -9597,57 +9597,57 @@ "microsoft_entra_client_id": { "description": "This field is only required for Azure AD OpenAI and is the Microsoft\nEntra Client ID.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "microsoft_entra_client_secret": { "description": "The Databricks secret key reference for a client secret used for\nMicrosoft Entra ID authentication. If you prefer to paste your client\nsecret directly, see `microsoft_entra_client_secret_plaintext`. You must\nprovide an API key using one of the following fields:\n`microsoft_entra_client_secret` or\n`microsoft_entra_client_secret_plaintext`.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "microsoft_entra_client_secret_plaintext": { "description": "The client secret used for Microsoft Entra ID authentication provided as\na plaintext string. If you prefer to reference your key using Databricks\nSecrets, see `microsoft_entra_client_secret`. You must provide an API key\nusing one of the following fields: `microsoft_entra_client_secret` or\n`microsoft_entra_client_secret_plaintext`.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "microsoft_entra_tenant_id": { "description": "This field is only required for Azure AD OpenAI and is the Microsoft\nEntra Tenant ID.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "openai_api_base": { "description": "This is a field to provide a customized base URl for the OpenAI API. For\nAzure OpenAI, this field is required, and is the base URL for the Azure\nOpenAI API service provided by Azure. For other OpenAI API types, this\nfield is optional, and if left unspecified, the standard OpenAI base URL\nis used.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "openai_api_key": { "description": "The Databricks secret key reference for an OpenAI API key using the\nOpenAI or Azure service. If you prefer to paste your API key directly,\nsee `openai_api_key_plaintext`. You must provide an API key using one of\nthe following fields: `openai_api_key` or `openai_api_key_plaintext`.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "openai_api_key_plaintext": { "description": "The OpenAI API key using the OpenAI or Azure service provided as a\nplaintext string. If you prefer to reference your key using Databricks\nSecrets, see `openai_api_key`. You must provide an API key using one of\nthe following fields: `openai_api_key` or `openai_api_key_plaintext`.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "openai_api_type": { "description": "This is an optional field to specify the type of OpenAI API to use. For\nAzure OpenAI, this field is required, and adjust this parameter to\nrepresent the preferred security access validation protocol. For access\ntoken validation, use azure. For authentication using Azure Active\nDirectory (Azure AD) use, azuread.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "openai_api_version": { "description": "This is an optional field to specify the OpenAI API version. For Azure\nOpenAI, this field is required, and is the version of the Azure OpenAI\nservice to utilize, specified by a date.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "openai_deployment_name": { "description": "This field is only required for Azure OpenAI and is the name of the\ndeployment resource for the Azure OpenAI service.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "openai_organization": { "description": "This is an optional field to specify the organization in OpenAI or Azure\nOpenAI.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -9666,12 +9666,12 @@ "palm_api_key": { "description": "The Databricks secret key reference for a PaLM API key. If you prefer to\npaste your API key directly, see `palm_api_key_plaintext`. You must\nprovide an API key using one of the following fields: `palm_api_key` or\n`palm_api_key_plaintext`.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "palm_api_key_plaintext": { "description": "The PaLM API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `palm_api_key`. You must\nprovide an API key using one of the following fields: `palm_api_key` or\n`palm_api_key_plaintext`.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -9690,17 +9690,17 @@ "calls": { "description": "Used to specify how many calls are allowed for a key within the renewal_period.", "$ref": "#/$defs/int64", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "key": { "description": "Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.RateLimitKey", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "renewal_period": { "description": "Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.RateLimitRenewalPeriod", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -9751,17 +9751,17 @@ "properties": { "served_entity_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.260.0" + "since_version": "v0.260.0" }, "served_model_name": { "description": "The name of the served model this route configures traffic for.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "traffic_percentage": { "description": "The percentage of endpoint traffic to send to this route. It must be an integer between 0 and 100 inclusive.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -9783,71 +9783,71 @@ "entity_name": { "description": "The name of the entity to be served. The entity may be a model in the Databricks Model Registry, a model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC object, the full name of the object should be given in the form of **catalog_name.schema_name.model_name**.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "entity_version": { "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "environment_vars": { "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`", "$ref": "#/$defs/map/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "external_model": { "description": "The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled) can be specified with the latter set being used for custom model serving for a Databricks registered model. For an existing endpoint with external_model, it cannot be updated to an endpoint without external_model. If the endpoint is created without external_model, users cannot update it to add external_model later. The task type of all external models within an endpoint must be the same.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ExternalModel", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "instance_profile_arn": { "description": "ARN of the instance profile that the served entity uses to access AWS resources.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "max_provisioned_concurrency": { "description": "The maximum provisioned concurrency that the endpoint can scale up to. Do not use if workload_size is specified.", "$ref": "#/$defs/int", - "x-since-version": "v0.256.0" + "since_version": "v0.256.0" }, "max_provisioned_throughput": { "description": "The maximum tokens per second that the endpoint can scale up to.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "min_provisioned_concurrency": { "description": "The minimum provisioned concurrency that the endpoint can scale down to. Do not use if workload_size is specified.", "$ref": "#/$defs/int", - "x-since-version": "v0.256.0" + "since_version": "v0.256.0" }, "min_provisioned_throughput": { "description": "The minimum tokens per second that the endpoint can scale down to.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "name": { "description": "The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "provisioned_model_units": { "description": "The number of model units provisioned.", "$ref": "#/$defs/int64", - "x-since-version": "v0.252.0" + "since_version": "v0.252.0" }, "scale_to_zero_enabled": { "description": "Whether the compute resources for the served entity should scale down to zero.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "workload_size": { "description": "The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency). Additional custom workload sizes can also be used when available in the workspace. If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0. Do not use if min_provisioned_concurrency and max_provisioned_concurrency are specified.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "workload_type": { "description": "The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is \"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types](https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ServingModelWorkloadType", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -9866,65 +9866,65 @@ "environment_vars": { "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`", "$ref": "#/$defs/map/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "instance_profile_arn": { "description": "ARN of the instance profile that the served entity uses to access AWS resources.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "max_provisioned_concurrency": { "description": "The maximum provisioned concurrency that the endpoint can scale up to. Do not use if workload_size is specified.", "$ref": "#/$defs/int", - "x-since-version": "v0.256.0" + "since_version": "v0.256.0" }, "max_provisioned_throughput": { "description": "The maximum tokens per second that the endpoint can scale up to.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "min_provisioned_concurrency": { "description": "The minimum provisioned concurrency that the endpoint can scale down to. Do not use if workload_size is specified.", "$ref": "#/$defs/int", - "x-since-version": "v0.256.0" + "since_version": "v0.256.0" }, "min_provisioned_throughput": { "description": "The minimum tokens per second that the endpoint can scale down to.", "$ref": "#/$defs/int", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "model_name": { "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "model_version": { "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "name": { "description": "The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "provisioned_model_units": { "description": "The number of model units provisioned.", "$ref": "#/$defs/int64", - "x-since-version": "v0.252.0" + "since_version": "v0.252.0" }, "scale_to_zero_enabled": { "description": "Whether the compute resources for the served entity should scale down to zero.", "$ref": "#/$defs/bool", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "workload_size": { "description": "The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency). Additional custom workload sizes can also be used when available in the workspace. If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0. Do not use if min_provisioned_concurrency and max_provisioned_concurrency are specified.", "$ref": "#/$defs/string", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "workload_type": { "description": "The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is \"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types](https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ServedModelInputWorkloadType", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false, @@ -9986,7 +9986,7 @@ "routes": { "description": "The list of routes that define traffic to each served entity.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.Route", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": false @@ -10059,27 +10059,27 @@ "comparison_operator": { "description": "Operator used for comparison in alert evaluation.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.ComparisonOperator", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "empty_result_state": { "description": "Alert state if result is empty. Please avoid setting this field to be `UNKNOWN` because `UNKNOWN` state is planned to be deprecated.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertEvaluationState", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "notification": { "description": "User or Notification Destination to notify when alert is triggered.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2Notification", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "source": { "description": "Source column from result to use to evaluate alert", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2OperandColumn", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "threshold": { "description": "Threshold to user for alert evaluation, can be a column or a value.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2Operand", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" } }, "additionalProperties": false, @@ -10102,16 +10102,16 @@ "notify_on_ok": { "description": "Whether to notify alert subscribers when alert returns back to normal.", "$ref": "#/$defs/bool", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "retrigger_seconds": { "description": "Number of seconds an alert waits after being triggered before it is allowed to send another notification.\nIf set to 0 or omitted, the alert will not send any further notifications after the first trigger\nSetting this value to 1 allows the alert to send a notification on every evaluation where the condition is met, effectively making it always retrigger for notification purposes.", "$ref": "#/$defs/int", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "subscriptions": { "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/sql.AlertV2Subscription", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" } }, "additionalProperties": false @@ -10129,11 +10129,11 @@ "properties": { "column": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2OperandColumn", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "value": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2OperandValue", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" } }, "additionalProperties": false @@ -10151,15 +10151,15 @@ "properties": { "aggregation": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.Aggregation", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "display": { "$ref": "#/$defs/string", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "name": { "$ref": "#/$defs/string", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" } }, "additionalProperties": false, @@ -10180,15 +10180,15 @@ "properties": { "bool_value": { "$ref": "#/$defs/bool", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "double_value": { "$ref": "#/$defs/float64", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "string_value": { "$ref": "#/$defs/string", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" } }, "additionalProperties": false @@ -10207,12 +10207,12 @@ "service_principal_name": { "description": "Application ID of an active service principal. Setting this field requires the `servicePrincipal/user` role.", "$ref": "#/$defs/string", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "user_name": { "description": "The email of an active workspace user. Can only set this field to their own email.", "$ref": "#/$defs/string", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" } }, "additionalProperties": false @@ -10230,11 +10230,11 @@ "properties": { "destination_id": { "$ref": "#/$defs/string", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "user_email": { "$ref": "#/$defs/string", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" } }, "additionalProperties": false @@ -10253,11 +10253,11 @@ "properties": { "dbsql_version": { "$ref": "#/$defs/string", - "x-since-version": "v0.260.0" + "since_version": "v0.260.0" }, "name": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.ChannelName", - "x-since-version": "v0.260.0" + "since_version": "v0.260.0" } }, "additionalProperties": false @@ -10330,17 +10330,17 @@ "pause_status": { "description": "Indicate whether this schedule is paused or not.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.SchedulePauseStatus", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "quartz_cron_schedule": { "description": "A cron expression using quartz syntax that specifies the schedule for this pipeline.\nShould use the quartz format described here: http://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/tutorial-lesson-06.html", "$ref": "#/$defs/string", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" }, "timezone_id": { "description": "A Java timezone id. The schedule will be resolved using this timezone.\nThis will be combined with the quartz_cron_schedule to determine the schedule.\nSee https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html for details.", "$ref": "#/$defs/string", - "x-since-version": "v0.279.0" + "since_version": "v0.279.0" } }, "additionalProperties": false, @@ -10362,11 +10362,11 @@ "properties": { "key": { "$ref": "#/$defs/string", - "x-since-version": "v0.260.0" + "since_version": "v0.260.0" }, "value": { "$ref": "#/$defs/string", - "x-since-version": "v0.260.0" + "since_version": "v0.260.0" } }, "additionalProperties": false @@ -10384,7 +10384,7 @@ "properties": { "custom_tags": { "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/sql.EndpointTagPair", - "x-since-version": "v0.260.0" + "since_version": "v0.260.0" } }, "additionalProperties": false @@ -10436,12 +10436,12 @@ "dns_name": { "description": "The DNS of the KeyVault", "$ref": "#/$defs/string", - "x-since-version": "v0.252.0" + "since_version": "v0.252.0" }, "resource_id": { "description": "The resource id of the azure KeyVault that user wants to associate the scope with.", "$ref": "#/$defs/string", - "x-since-version": "v0.252.0" + "since_version": "v0.252.0" } }, "additionalProperties": false, @@ -11675,85 +11675,85 @@ "description": "Defines the attributes to build an artifact", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Artifact", "markdownDescription": "Defines the attributes to build artifacts, where each key is the name of the artifact, and the value is a Map that defines the artifact build settings. For information about the `artifacts` mapping, see [artifacts](https://docs.databricks.com/dev-tools/bundles/settings.html#artifacts).\n\nArtifact settings defined in the top level of the bundle configuration can be overridden in the `targets` mapping. See [link](https://docs.databricks.com/dev-tools/bundles/artifact-overrides.html).", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "bundle": { "description": "The bundle attributes when deploying to this target.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Bundle", "markdownDescription": "The bundle attributes when deploying to this target,", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "environments": { "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Target", "deprecationMessage": "Deprecated: please use targets instead", - "x-since-version": "v0.243.0", + "since_version": "v0.243.0", "deprecated": true }, "experimental": { "description": "Defines attributes for experimental features.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Experimental", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "include": { "description": "Specifies a list of path globs that contain configuration files to include within the bundle.", "$ref": "#/$defs/slice/string", "markdownDescription": "Specifies a list of path globs that contain configuration files to include within the bundle. See [include](https://docs.databricks.com/dev-tools/bundles/settings.html#include).", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "permissions": { "description": "Defines a permission for a specific entity.", "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission", "markdownDescription": "A Sequence that defines the permissions to apply to experiments, jobs, pipelines, and models defined in the bundle, where each item in the sequence is a permission for a specific entity.\n\nSee [permissions](https://docs.databricks.com/dev-tools/bundles/settings.html#permissions) and [link](https://docs.databricks.com/dev-tools/bundles/permissions.html).", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "presets": { "description": "Defines bundle deployment presets.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Presets", "markdownDescription": "Defines bundle deployment presets. See [presets](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html#presets).", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "python": { "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Python", - "x-since-version": "v0.275.0" + "since_version": "v0.275.0" }, "resources": { "description": "A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Resources", "markdownDescription": "A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource. For more information about Databricks Asset Bundles supported resources, and resource definition reference, see [link](https://docs.databricks.com/dev-tools/bundles/resources.html).\n\n```yaml\nresources:\n \u003cresource-type\u003e:\n \u003cresource-name\u003e:\n \u003cresource-field-name\u003e: \u003cresource-field-value\u003e\n```", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "run_as": { "description": "The identity to use when running Databricks Asset Bundles workflows.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs", "markdownDescription": "The identity to use when running Databricks Asset Bundles workflows. See [link](https://docs.databricks.com/dev-tools/bundles/run-as.html).", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "scripts": { "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Script", - "x-since-version": "v0.259.0" + "since_version": "v0.259.0" }, "sync": { "description": "The files and file paths to include or exclude in the bundle.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Sync", "markdownDescription": "The files and file paths to include or exclude in the bundle. See [sync](https://docs.databricks.com/dev-tools/bundles/settings.html#sync).", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "targets": { "description": "Defines deployment targets for the bundle.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Target", "markdownDescription": "Defines deployment targets for the bundle. See [targets](https://docs.databricks.com/dev-tools/bundles/settings.html#targets)", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "variables": { "description": "A Map that defines the custom variables for the bundle, where each key is the name of the variable, and the value is a Map that defines the variable.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/variable.Variable", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" }, "workspace": { "description": "Defines the Databricks workspace for the bundle.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Workspace", "markdownDescription": "Defines the Databricks workspace for the bundle. See [workspace](https://docs.databricks.com/dev-tools/bundles/settings.html#workspace).", - "x-since-version": "v0.228.1" + "since_version": "v0.228.1" } }, "additionalProperties": {} diff --git a/libs/jsonschema/extension.go b/libs/jsonschema/extension.go index b5d122b6d8..32f7955a05 100644 --- a/libs/jsonschema/extension.go +++ b/libs/jsonschema/extension.go @@ -67,5 +67,5 @@ type Extension struct { FieldBehaviors []string `json:"x-databricks-field-behaviors,omitempty"` // SinceVersion indicates which CLI version introduced this field. - SinceVersion string `json:"x-since-version,omitempty"` + SinceVersion string `json:"since_version,omitempty"` } From 0f643af518faaa456cb0f87bb8d8ee87b3a0c56c Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Wed, 14 Jan 2026 02:51:00 +0100 Subject: [PATCH 5/8] - --- bundle/internal/schema/annotations.yml | 340 ++++++++++++------------- bundle/schema/jsonschema.json | 340 ++++++++++++------------- 2 files changed, 340 insertions(+), 340 deletions(-) diff --git a/bundle/internal/schema/annotations.yml b/bundle/internal/schema/annotations.yml index c033a1c93b..3eded1977a 100644 --- a/bundle/internal/schema/annotations.yml +++ b/bundle/internal/schema/annotations.yml @@ -3,7 +3,7 @@ github.com/databricks/cli/bundle/config.Artifact: "description": |- An optional set of build commands to run locally before deployment. "since_version": |- - v0.228.1 + v0.229.0 "dynamic_version": "description": |- Whether to patch the wheel version dynamically based on the timestamp of the whl file. If this is set to `true`, new code can be deployed without having to update the version in `setup.py` or `pyproject.toml`. This setting is only valid when `type` is set to `whl`. See [\_](/dev-tools/bundles/settings.md#bundle-syntax-mappings-artifacts). @@ -13,30 +13,30 @@ github.com/databricks/cli/bundle/config.Artifact: "description": |- The executable type. Valid values are `bash`, `sh`, and `cmd`. "since_version": |- - v0.228.1 + v0.229.0 "files": "description": |- The relative or absolute path to the built artifact files. "since_version": |- - v0.228.1 + v0.229.0 "path": "description": |- The local path of the directory for the artifact. "since_version": |- - v0.228.1 + v0.229.0 "type": "description": |- Required if the artifact is a Python wheel. The type of the artifact. Valid values are `whl` and `jar`. "markdown_description": |- Required if the artifact is a Python wheel. The type of the artifact. Valid values are `whl` and `jar`. "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/cli/bundle/config.ArtifactFile: "source": "description": |- Required. The artifact source file. "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/cli/bundle/config.Bundle: "cluster_id": "description": |- @@ -49,33 +49,33 @@ github.com/databricks/cli/bundle/config.Bundle: "description": |- Deprecated. The ID of the compute to use to run the bundle. "since_version": |- - v0.228.1 + v0.229.0 "databricks_cli_version": "description": |- The Databricks CLI version to use for the bundle. "markdown_description": |- The Databricks CLI version to use for the bundle. See [\_](/dev-tools/bundles/settings.md#databricks_cli_version). "since_version": |- - v0.228.1 + v0.229.0 "deployment": "description": |- The definition of the bundle deployment "markdown_description": |- The definition of the bundle deployment. For supported attributes see [\_](/dev-tools/bundles/deployment-modes.md). "since_version": |- - v0.228.1 + v0.229.0 "git": "description": |- The Git version control details that are associated with your bundle. "markdown_description": |- The Git version control details that are associated with your bundle. For supported attributes see [\_](/dev-tools/bundles/settings.md#git). "since_version": |- - v0.228.1 + v0.229.0 "name": "description": |- The name of the bundle. "since_version": |- - v0.228.1 + v0.229.0 "uuid": "description": |- Reserved. A Universally Unique Identifier (UUID) for the bundle that uniquely identifies the bundle in internal Databricks systems. This is generated when a bundle project is initialized using a Databricks template (using the `databricks bundle init` command). @@ -86,18 +86,18 @@ github.com/databricks/cli/bundle/config.Deployment: "description": |- Whether to fail on active runs. If this is set to true a deployment that is running can be interrupted. "since_version": |- - v0.228.1 + v0.229.0 "lock": "description": |- The deployment lock attributes. "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/cli/bundle/config.Experimental: "pydabs": "description": |- The PyDABs configuration. "since_version": |- - v0.228.1 + v0.229.0 "deprecation_message": |- Deprecated: please use python instead "python": @@ -109,12 +109,12 @@ github.com/databricks/cli/bundle/config.Experimental: "description": |- Whether to use a Python wheel wrapper. "since_version": |- - v0.228.1 + v0.229.0 "scripts": "description": |- The commands to run. "since_version": |- - v0.228.1 + v0.229.0 "skip_artifact_cleanup": "description": |- Determines whether to skip cleaning up the .internal folder @@ -130,7 +130,7 @@ github.com/databricks/cli/bundle/config.Experimental: "description": |- Whether to use the legacy run_as behavior. "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/cli/bundle/config.Git: "branch": "description": |- @@ -138,25 +138,25 @@ github.com/databricks/cli/bundle/config.Git: "markdown_description": |- The Git branch name. See [\_](/dev-tools/bundles/settings.md#git). "since_version": |- - v0.228.1 + v0.229.0 "origin_url": "description": |- The origin URL of the repository. "markdown_description": |- The origin URL of the repository. See [\_](/dev-tools/bundles/settings.md#git). "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/cli/bundle/config.Lock: "enabled": "description": |- Whether this lock is enabled. "since_version": |- - v0.228.1 + v0.229.0 "force": "description": |- Whether to force this lock if it is enabled. "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/cli/bundle/config.Presets: "artifacts_dynamic_version": "description": |- @@ -167,17 +167,17 @@ github.com/databricks/cli/bundle/config.Presets: "description": |- The maximum concurrent runs for a job. "since_version": |- - v0.228.1 + v0.229.0 "name_prefix": "description": |- The prefix for job runs of the bundle. "since_version": |- - v0.228.1 + v0.229.0 "pipelines_development": "description": |- Whether pipeline deployments should be locked in development mode. "since_version": |- - v0.228.1 + v0.229.0 "source_linked_deployment": "description": |- Whether to link the deployment to the bundle source. @@ -187,18 +187,18 @@ github.com/databricks/cli/bundle/config.Presets: "description": |- The tags for the bundle deployment. "since_version": |- - v0.228.1 + v0.229.0 "trigger_pause_status": "description": |- A pause status to apply to all job triggers and schedules. Valid values are PAUSED or UNPAUSED. "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/cli/bundle/config.PyDABs: "enabled": "description": |- Whether or not PyDABs (Private Preview) is enabled "since_version": |- - v0.228.1 + v0.229.0 "import": "description": |- The PyDABs project to import to discover resources, resource generator and mutators @@ -272,56 +272,56 @@ github.com/databricks/cli/bundle/config.Resources: "markdown_description": |- The experiment definitions for the bundle, where each key is the name of the experiment. See [\_](/dev-tools/bundles/resources.md#experiments). "since_version": |- - v0.228.1 + v0.229.0 "jobs": "description": |- The job definitions for the bundle, where each key is the name of the job. "markdown_description": |- The job definitions for the bundle, where each key is the name of the job. See [\_](/dev-tools/bundles/resources.md#jobs). "since_version": |- - v0.228.1 + v0.229.0 "model_serving_endpoints": "description": |- The model serving endpoint definitions for the bundle, where each key is the name of the model serving endpoint. "markdown_description": |- The model serving endpoint definitions for the bundle, where each key is the name of the model serving endpoint. See [\_](/dev-tools/bundles/resources.md#model_serving_endpoints). "since_version": |- - v0.228.1 + v0.229.0 "models": "description": |- The model definitions for the bundle, where each key is the name of the model. "markdown_description": |- The model definitions for the bundle, where each key is the name of the model. See [\_](/dev-tools/bundles/resources.md#models). "since_version": |- - v0.228.1 + v0.229.0 "pipelines": "description": |- The pipeline definitions for the bundle, where each key is the name of the pipeline. "markdown_description": |- The pipeline definitions for the bundle, where each key is the name of the pipeline. See [\_](/dev-tools/bundles/resources.md#pipelines). "since_version": |- - v0.228.1 + v0.229.0 "quality_monitors": "description": |- The quality monitor definitions for the bundle, where each key is the name of the quality monitor. "markdown_description": |- The quality monitor definitions for the bundle, where each key is the name of the quality monitor. See [\_](/dev-tools/bundles/resources.md#quality_monitors). "since_version": |- - v0.228.1 + v0.229.0 "registered_models": "description": |- The registered model definitions for the bundle, where each key is the name of the Unity Catalog registered model. "markdown_description": |- The registered model definitions for the bundle, where each key is the name of the Unity Catalog registered model. See [\_](/dev-tools/bundles/resources.md#registered_models) "since_version": |- - v0.228.1 + v0.229.0 "schemas": "description": |- The schema definitions for the bundle, where each key is the name of the schema. "markdown_description": |- The schema definitions for the bundle, where each key is the name of the schema. See [\_](/dev-tools/bundles/resources.md#schemas). "since_version": |- - v0.228.1 + v0.229.0 "secret_scopes": "description": |- The secret scope definitions for the bundle, where each key is the name of the secret scope. @@ -357,7 +357,7 @@ github.com/databricks/cli/bundle/config.Root: Artifact settings defined in the top level of the bundle configuration can be overridden in the `targets` mapping. See [\_](/dev-tools/bundles/artifact-overrides.md). "since_version": |- - v0.228.1 + v0.229.0 "markdown_examples": |- ```yaml artifacts: @@ -372,7 +372,7 @@ github.com/databricks/cli/bundle/config.Root: "markdown_description": |- The bundle attributes when deploying to this target, "since_version": |- - v0.228.1 + v0.229.0 "environments": "description": |- PLACEHOLDER @@ -384,14 +384,14 @@ github.com/databricks/cli/bundle/config.Root: "description": |- Defines attributes for experimental features. "since_version": |- - v0.228.1 + v0.229.0 "include": "description": |- Specifies a list of path globs that contain configuration files to include within the bundle. "markdown_description": |- Specifies a list of path globs that contain configuration files to include within the bundle. See [\_](/dev-tools/bundles/settings.md#include). "since_version": |- - v0.228.1 + v0.229.0 "permissions": "description": |- Defines a permission for a specific entity. @@ -400,7 +400,7 @@ github.com/databricks/cli/bundle/config.Root: See [\_](/dev-tools/bundles/settings.md#permissions) and [\_](/dev-tools/bundles/permissions.md). "since_version": |- - v0.228.1 + v0.229.0 "markdown_examples": |- ```yaml permissions: @@ -417,7 +417,7 @@ github.com/databricks/cli/bundle/config.Root: "markdown_description": |- Defines bundle deployment presets. See [\_](/dev-tools/bundles/deployment-modes.md#presets). "since_version": |- - v0.228.1 + v0.229.0 "python": "description": |- PLACEHOLDER @@ -436,14 +436,14 @@ github.com/databricks/cli/bundle/config.Root: : ``` "since_version": |- - v0.228.1 + v0.229.0 "run_as": "description": |- The identity to use when running Databricks Asset Bundles workflows. "markdown_description": |- The identity to use when running Databricks Asset Bundles workflows. See [\_](/dev-tools/bundles/run-as.md). "since_version": |- - v0.228.1 + v0.229.0 "scripts": "description": |- PLACEHOLDER @@ -455,26 +455,26 @@ github.com/databricks/cli/bundle/config.Root: "markdown_description": |- The files and file paths to include or exclude in the bundle. See [\_](/dev-tools/bundles/settings.md#sync). "since_version": |- - v0.228.1 + v0.229.0 "targets": "description": |- Defines deployment targets for the bundle. "markdown_description": |- Defines deployment targets for the bundle. See [\_](/dev-tools/bundles/settings.md#targets) "since_version": |- - v0.228.1 + v0.229.0 "variables": "description": |- A Map that defines the custom variables for the bundle, where each key is the name of the variable, and the value is a Map that defines the variable. "since_version": |- - v0.228.1 + v0.229.0 "workspace": "description": |- Defines the Databricks workspace for the bundle. "markdown_description": |- Defines the Databricks workspace for the bundle. See [\_](/dev-tools/bundles/settings.md#workspace). "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/cli/bundle/config.Script: "content": "description": |- @@ -486,28 +486,28 @@ github.com/databricks/cli/bundle/config.Sync: "description": |- A list of files or folders to exclude from the bundle. "since_version": |- - v0.228.1 + v0.229.0 "include": "description": |- A list of files or folders to include in the bundle. "since_version": |- - v0.228.1 + v0.229.0 "paths": "description": |- The local folder paths, which can be outside the bundle root, to synchronize to the workspace when the bundle is deployed. "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/cli/bundle/config.Target: "artifacts": "description": |- The artifacts to include in the target deployment. "since_version": |- - v0.228.1 + v0.229.0 "bundle": "description": |- The bundle attributes when deploying to this target. "since_version": |- - v0.228.1 + v0.229.0 "cluster_id": "description": |- The ID of the cluster to use for this target. @@ -517,129 +517,129 @@ github.com/databricks/cli/bundle/config.Target: "description": |- Deprecated. The ID of the compute to use for this target. "since_version": |- - v0.228.1 + v0.229.0 "deprecation_message": |- Deprecated: please use cluster_id instead "default": "description": |- Whether this target is the default target. "since_version": |- - v0.228.1 + v0.229.0 "git": "description": |- The Git version control settings for the target. "since_version": |- - v0.228.1 + v0.229.0 "mode": "description": |- The deployment mode for the target. "markdown_description": |- The deployment mode for the target. Valid values are `development` or `production`. See [\_](/dev-tools/bundles/deployment-modes.md). "since_version": |- - v0.228.1 + v0.229.0 "permissions": "description": |- The permissions for deploying and running the bundle in the target. "since_version": |- - v0.228.1 + v0.229.0 "presets": "description": |- The deployment presets for the target. "since_version": |- - v0.228.1 + v0.229.0 "resources": "description": |- The resource definitions for the target. "since_version": |- - v0.228.1 + v0.229.0 "run_as": "description": |- The identity to use to run the bundle. "markdown_description": |- The identity to use to run the bundle, see [\_](/dev-tools/bundles/run-as.md). "since_version": |- - v0.228.1 + v0.229.0 "sync": "description": |- The local paths to sync to the target workspace when a bundle is run or deployed. "since_version": |- - v0.228.1 + v0.229.0 "variables": "description": |- The custom variable definitions for the target. "since_version": |- - v0.228.1 + v0.229.0 "workspace": "description": |- The Databricks workspace for the target. "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/cli/bundle/config.Workspace: "artifact_path": "description": |- The artifact path to use within the workspace for both deployments and workflow runs "since_version": |- - v0.228.1 + v0.229.0 "auth_type": "description": |- The authentication type. "since_version": |- - v0.228.1 + v0.229.0 "azure_client_id": "description": |- The Azure client ID "since_version": |- - v0.228.1 + v0.229.0 "azure_environment": "description": |- The Azure environment "since_version": |- - v0.228.1 + v0.229.0 "azure_login_app_id": "description": |- The Azure login app ID "since_version": |- - v0.228.1 + v0.229.0 "azure_tenant_id": "description": |- The Azure tenant ID "since_version": |- - v0.228.1 + v0.229.0 "azure_use_msi": "description": |- Whether to use MSI for Azure "since_version": |- - v0.228.1 + v0.229.0 "azure_workspace_resource_id": "description": |- The Azure workspace resource ID "since_version": |- - v0.228.1 + v0.229.0 "client_id": "description": |- The client ID for the workspace "since_version": |- - v0.228.1 + v0.229.0 "file_path": "description": |- The file path to use within the workspace for both deployments and workflow runs "since_version": |- - v0.228.1 + v0.229.0 "google_service_account": "description": |- The Google service account name "since_version": |- - v0.228.1 + v0.229.0 "host": "description": |- The Databricks workspace host URL "since_version": |- - v0.228.1 + v0.229.0 "profile": "description": |- The Databricks workspace profile name "since_version": |- - v0.228.1 + v0.229.0 "resource_path": "description": |- The workspace resource path @@ -649,12 +649,12 @@ github.com/databricks/cli/bundle/config.Workspace: "description": |- The Databricks workspace root path "since_version": |- - v0.228.1 + v0.229.0 "state_path": "description": |- The workspace state path "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/cli/bundle/config/resources.Alert: "create_time": "description": |- @@ -971,73 +971,73 @@ github.com/databricks/cli/bundle/config/resources.Grant: "description": |- The name of the principal that will be granted privileges "since_version": |- - v0.228.1 + v0.229.0 "privileges": "description": |- The privileges to grant to the specified entity "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/cli/bundle/config/resources.Job: "budget_policy_id": "since_version": |- v0.231.0 "continuous": "since_version": |- - v0.228.1 + v0.229.0 "description": "since_version": |- - v0.228.1 + v0.229.0 "email_notifications": "since_version": |- - v0.228.1 + v0.229.0 "environments": "since_version": |- - v0.228.1 + v0.229.0 "git_source": "since_version": |- - v0.228.1 + v0.229.0 "job_clusters": "since_version": |- - v0.228.1 + v0.229.0 "max_concurrent_runs": "since_version": |- - v0.228.1 + v0.229.0 "name": "since_version": |- - v0.228.1 + v0.229.0 "notification_settings": "since_version": |- - v0.228.1 + v0.229.0 "parameters": "since_version": |- - v0.228.1 + v0.229.0 "performance_target": "since_version": |- v0.241.0 "queue": "since_version": |- - v0.228.1 + v0.229.0 "schedule": "since_version": |- - v0.228.1 + v0.229.0 "tags": "since_version": |- - v0.228.1 + v0.229.0 "tasks": "since_version": |- - v0.228.1 + v0.229.0 "timeout_seconds": "since_version": |- - v0.228.1 + v0.229.0 "trigger": "since_version": |- - v0.228.1 + v0.229.0 "usage_policy_id": "since_version": |- v0.265.0 "webhook_notifications": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/cli/bundle/config/resources.JobPermission: "group_name": "description": |- @@ -1068,13 +1068,13 @@ github.com/databricks/cli/bundle/config/resources.Lifecycle: github.com/databricks/cli/bundle/config/resources.MlflowExperiment: "artifact_location": "since_version": |- - v0.228.1 + v0.229.0 "name": "since_version": |- - v0.228.1 + v0.229.0 "tags": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/cli/bundle/config/resources.MlflowExperimentPermission: "group_name": "description": |- @@ -1099,13 +1099,13 @@ github.com/databricks/cli/bundle/config/resources.MlflowExperimentPermission: github.com/databricks/cli/bundle/config/resources.MlflowModel: "description": "since_version": |- - v0.228.1 + v0.229.0 "name": "since_version": |- - v0.228.1 + v0.229.0 "tags": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/cli/bundle/config/resources.MlflowModelPermission: "group_name": "description": |- @@ -1136,22 +1136,22 @@ github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint: v0.244.0 "config": "since_version": |- - v0.228.1 + v0.229.0 "email_notifications": "since_version": |- v0.264.0 "name": "since_version": |- - v0.228.1 + v0.229.0 "rate_limits": "since_version": |- - v0.228.1 + v0.229.0 "route_optimized": "since_version": |- - v0.228.1 + v0.229.0 "tags": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/cli/bundle/config/resources.ModelServingEndpointPermission: "group_name": "description": |- @@ -1183,22 +1183,22 @@ github.com/databricks/cli/bundle/config/resources.Permission: "description": |- The name of the group that has the permission set in level. "since_version": |- - v0.228.1 + v0.229.0 "level": "description": |- The allowed permission for user, group, service principal defined for this permission. "since_version": |- - v0.228.1 + v0.229.0 "service_principal_name": "description": |- The name of the service principal that has the permission set in level. "since_version": |- - v0.228.1 + v0.229.0 "user_name": "description": |- The name of the user that has the permission set in level. "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/cli/bundle/config/resources.Pipeline: "allow_duplicate_names": "since_version": |- @@ -1208,25 +1208,25 @@ github.com/databricks/cli/bundle/config/resources.Pipeline: v0.230.0 "catalog": "since_version": |- - v0.228.1 + v0.229.0 "channel": "since_version": |- - v0.228.1 + v0.229.0 "clusters": "since_version": |- - v0.228.1 + v0.229.0 "configuration": "since_version": |- - v0.228.1 + v0.229.0 "continuous": "since_version": |- - v0.228.1 + v0.229.0 "development": "since_version": |- - v0.228.1 + v0.229.0 "edition": "since_version": |- - v0.228.1 + v0.229.0 "environment": "since_version": |- v0.257.0 @@ -1235,28 +1235,28 @@ github.com/databricks/cli/bundle/config/resources.Pipeline: v0.246.0 "filters": "since_version": |- - v0.228.1 + v0.229.0 "gateway_definition": "since_version": |- - v0.228.1 + v0.229.0 "id": "since_version": |- - v0.228.1 + v0.229.0 "ingestion_definition": "since_version": |- - v0.228.1 + v0.229.0 "libraries": "since_version": |- - v0.228.1 + v0.229.0 "name": "since_version": |- - v0.228.1 + v0.229.0 "notifications": "since_version": |- - v0.228.1 + v0.229.0 "photon": "since_version": |- - v0.228.1 + v0.229.0 "restart_window": "since_version": |- v0.234.0 @@ -1268,16 +1268,16 @@ github.com/databricks/cli/bundle/config/resources.Pipeline: v0.230.0 "serverless": "since_version": |- - v0.228.1 + v0.229.0 "storage": "since_version": |- - v0.228.1 + v0.229.0 "tags": "since_version": |- v0.256.0 "target": "since_version": |- - v0.228.1 + v0.229.0 "usage_policy_id": "since_version": |- v0.276.0 @@ -1305,72 +1305,72 @@ github.com/databricks/cli/bundle/config/resources.PipelinePermission: github.com/databricks/cli/bundle/config/resources.QualityMonitor: "assets_dir": "since_version": |- - v0.228.1 + v0.229.0 "baseline_table_name": "since_version": |- - v0.228.1 + v0.229.0 "custom_metrics": "since_version": |- - v0.228.1 + v0.229.0 "data_classification_config": "since_version": |- - v0.228.1 + v0.229.0 "latest_monitor_failure_msg": "since_version": |- v0.264.0 "notifications": "since_version": |- - v0.228.1 + v0.229.0 "output_schema_name": "since_version": |- - v0.228.1 + v0.229.0 "schedule": "since_version": |- - v0.228.1 + v0.229.0 "skip_builtin_dashboard": "since_version": |- - v0.228.1 + v0.229.0 "slicing_exprs": "since_version": |- - v0.228.1 + v0.229.0 "snapshot": "since_version": |- - v0.228.1 + v0.229.0 "time_series": "since_version": |- - v0.228.1 + v0.229.0 "warehouse_id": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/cli/bundle/config/resources.RegisteredModel: "catalog_name": "since_version": |- - v0.228.1 + v0.229.0 "comment": "since_version": |- - v0.228.1 + v0.229.0 "name": "since_version": |- - v0.228.1 + v0.229.0 "schema_name": "since_version": |- - v0.228.1 + v0.229.0 "storage_location": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/cli/bundle/config/resources.Schema: "catalog_name": "since_version": |- - v0.228.1 + v0.229.0 "comment": "since_version": |- - v0.228.1 + v0.229.0 "name": "since_version": |- - v0.228.1 + v0.229.0 "storage_root": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/cli/bundle/config/resources.SchemaGrant: "principal": "description": |- @@ -1551,37 +1551,37 @@ github.com/databricks/cli/bundle/config/variable.Lookup: "description": |- The name of the alert for which to retrieve an ID. "since_version": |- - v0.228.1 + v0.229.0 "cluster": "description": |- The name of the cluster for which to retrieve an ID. "since_version": |- - v0.228.1 + v0.229.0 "cluster_policy": "description": |- The name of the cluster_policy for which to retrieve an ID. "since_version": |- - v0.228.1 + v0.229.0 "dashboard": "description": |- The name of the dashboard for which to retrieve an ID. "since_version": |- - v0.228.1 + v0.229.0 "instance_pool": "description": |- The name of the instance_pool for which to retrieve an ID. "since_version": |- - v0.228.1 + v0.229.0 "job": "description": |- The name of the job for which to retrieve an ID. "since_version": |- - v0.228.1 + v0.229.0 "metastore": "description": |- The name of the metastore for which to retrieve an ID. "since_version": |- - v0.228.1 + v0.229.0 "notification_destination": "description": |- The name of the notification_destination for which to retrieve an ID. @@ -1591,38 +1591,38 @@ github.com/databricks/cli/bundle/config/variable.Lookup: "description": |- The name of the pipeline for which to retrieve an ID. "since_version": |- - v0.228.1 + v0.229.0 "query": "description": |- The name of the query for which to retrieve an ID. "since_version": |- - v0.228.1 + v0.229.0 "service_principal": "description": |- The name of the service_principal for which to retrieve an ID. "since_version": |- - v0.228.1 + v0.229.0 "warehouse": "description": |- The name of the warehouse for which to retrieve an ID. "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/cli/bundle/config/variable.TargetVariable: "default": "description": |- The default value for the variable. "since_version": |- - v0.228.1 + v0.229.0 "description": "description": |- The description of the variable. "since_version": |- - v0.228.1 + v0.229.0 "lookup": "description": |- The name of the alert, cluster_policy, cluster, dashboard, instance_pool, job, metastore, pipeline, query, service_principal, or warehouse object for which to retrieve an ID. "since_version": |- - v0.228.1 + v0.229.0 "markdown_description": "description": |- The type of the variable. @@ -1630,7 +1630,7 @@ github.com/databricks/cli/bundle/config/variable.TargetVariable: "description": |- The type of the variable. "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/cli/bundle/config/variable.Variable: "_": "description": |- @@ -1641,24 +1641,24 @@ github.com/databricks/cli/bundle/config/variable.Variable: "description": |- The default value for the variable. "since_version": |- - v0.228.1 + v0.229.0 "description": "description": |- The description of the variable "since_version": |- - v0.228.1 + v0.229.0 "lookup": "description": |- The name of the alert, cluster_policy, cluster, dashboard, instance_pool, job, metastore, pipeline, query, service_principal, or warehouse object for which to retrieve an ID. "markdown_description": |- The name of the `alert`, `cluster_policy`, `cluster`, `dashboard`, `instance_pool`, `job`, `metastore`, `pipeline`, `query`, `service_principal`, or `warehouse` object for which to retrieve an ID. "since_version": |- - v0.228.1 + v0.229.0 "type": "description": |- The type of the variable. "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs: "service_principal_name": "description": |- diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index 842ffb03db..dc1a939e02 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -799,12 +799,12 @@ "principal": { "description": "The name of the principal that will be granted privileges", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "privileges": { "description": "The privileges to grant to the specified entity", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -832,27 +832,27 @@ "continuous": { "description": "An optional continuous property for this job. The continuous property will ensure that there is always one run executing. Only one of `schedule` and `continuous` can be used.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Continuous", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "description": { "description": "An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "email_notifications": { "description": "An optional set of email addresses that is notified when runs of this job begin or complete as well as when this job is deleted.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobEmailNotifications", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "environments": { "description": "A list of task execution environment specifications that can be referenced by serverless tasks of this job.\nFor serverless notebook tasks, if the environment_key is not specified, the notebook environment will be used if present. If a jobs environment is specified, it will override the notebook environment.\nFor other serverless tasks, the task environment is required to be specified using environment_key in the task settings.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.JobEnvironment", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "git_source": { "description": "An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks.\n\nIf `git_source` is set, these tasks retrieve the file from the remote repository by default. However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task.\n\nNote: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.GitSource", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "health": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRules" @@ -860,7 +860,7 @@ "job_clusters": { "description": "A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.JobCluster", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", @@ -869,22 +869,22 @@ "max_concurrent_runs": { "description": "An optional maximum allowed number of concurrent runs of the job.\nSet this value if you want to be able to execute multiple runs of the same job concurrently.\nThis is useful for example if you trigger your job on a frequent schedule and want to allow consecutive runs to overlap with each other, or if you want to trigger multiple runs which differ by their input parameters.\nThis setting affects only new runs. For example, suppose the job’s concurrency is 4 and there are 4 concurrent active runs. Then setting the concurrency to 3 won’t kill any of the active runs.\nHowever, from then on, new runs are skipped unless there are fewer than 3 active runs.\nThis value cannot exceed 1000. Setting this value to `0` causes all new runs to be skipped.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "name": { "description": "An optional name for the job. The maximum length is 4096 bytes in UTF-8 encoding.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "notification_settings": { "description": "Optional notification settings that are used when sending notifications to each of the `email_notifications` and `webhook_notifications` for this job.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobNotificationSettings", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "parameters": { "description": "Job-level parameter definitions", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.JobParameterDefinition", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "performance_target": { "description": "The performance mode on a serverless job. This field determines the level of compute performance or cost-efficiency for the run.\nThe performance target does not apply to tasks that run on Serverless GPU compute.\n\n* `STANDARD`: Enables cost-efficient execution of serverless workloads.\n* `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.", @@ -897,7 +897,7 @@ "queue": { "description": "The queue settings of the job.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.QueueSettings", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "run_as": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs" @@ -905,27 +905,27 @@ "schedule": { "description": "An optional periodic schedule for this job. The default behavior is that the job only runs when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.CronSchedule", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "tags": { "description": "A map of tags associated with the job. These are forwarded to the cluster as cluster tags for jobs clusters, and are subject to the same limitations as cluster tags. A maximum of 25 tags can be added to the job.", "$ref": "#/$defs/map/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "tasks": { "description": "A list of task specifications to be executed by this job.\nIt supports up to 1000 elements in write endpoints (:method:jobs/create, :method:jobs/reset, :method:jobs/update, :method:jobs/submit).\nRead endpoints return only 100 tasks. If more than 100 tasks are available, you can paginate through them using :method:jobs/get. Use the `next_page_token` field at the object root to determine if more results are available.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Task", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "timeout_seconds": { "description": "An optional timeout applied to each run of this job. A value of `0` means no timeout.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "trigger": { "description": "A configuration to trigger a run when certain conditions are met. The default behavior is that the job runs only when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TriggerSettings", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "usage_policy_id": { "description": "The id of the user specified usage policy to use for this job.\nIf not specified, a default usage policy may be applied when creating or modifying the job.\nSee `effective_usage_policy_id` for the usage policy used by this workload.", @@ -937,7 +937,7 @@ "webhook_notifications": { "description": "A collection of system notification IDs to notify when runs of this job begin or complete.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.WebhookNotifications", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -1026,7 +1026,7 @@ "artifact_location": { "description": "Location where all artifacts for the experiment are stored.\nIf not provided, the remote server will select an appropriate default.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", @@ -1035,7 +1035,7 @@ "name": { "description": "Experiment name.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.MlflowExperimentPermission" @@ -1043,7 +1043,7 @@ "tags": { "description": "A collection of tags to set on the experiment. Maximum tag size and number of tags per request\ndepends on the storage backend. All storage backends are guaranteed to support tag keys up\nto 250 bytes in size and tag values up to 5000 bytes in size. All storage backends are also\nguaranteed to support up to 20 tags per request.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/ml.ExperimentTag", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -1115,7 +1115,7 @@ "description": { "description": "Optional description for registered model.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", @@ -1124,7 +1124,7 @@ "name": { "description": "Register models under this name", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.MlflowModelPermission" @@ -1132,7 +1132,7 @@ "tags": { "description": "Additional metadata for registered model.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/ml.ModelTag", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -1216,7 +1216,7 @@ "config": { "description": "The core config of the serving endpoint.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.EndpointCoreConfigInput", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "description": { "$ref": "#/$defs/string" @@ -1233,7 +1233,7 @@ "name": { "description": "The name of the serving endpoint. This field is required and must be unique across a Databricks workspace.\nAn endpoint name can consist of alphanumeric characters, dashes, and underscores.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.ModelServingEndpointPermission" @@ -1242,18 +1242,18 @@ "description": "Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI Gateway to manage rate limits.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.RateLimit", "deprecationMessage": "This field is deprecated", - "since_version": "v0.228.1", + "since_version": "v0.229.0", "deprecated": true }, "route_optimized": { "description": "Enable route optimization for the serving endpoint.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "tags": { "description": "Tags to be attached to the serving endpoint and automatically propagated to billing logs.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.EndpointTag", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -1325,22 +1325,22 @@ "group_name": { "description": "The name of the group that has the permission set in level.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "level": { "description": "The allowed permission for user, group, service principal defined for this permission.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "service_principal_name": { "description": "The name of the service principal that has the permission set in level.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "user_name": { "description": "The name of the user that has the permission set in level.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -1372,37 +1372,37 @@ "catalog": { "description": "A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, tables in this pipeline are published to a `target` schema inside `catalog` (for example, `catalog`.`target`.`table`). If `target` is not specified, no data is published to Unity Catalog.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "channel": { "description": "DLT Release Channel that specifies which version to use.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "clusters": { "description": "Cluster settings for this pipeline deployment.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineCluster", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "configuration": { "description": "String-String configuration for this pipeline execution.", "$ref": "#/$defs/map/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "continuous": { "description": "Whether the pipeline is continuous or triggered. This replaces `trigger`.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "development": { "description": "Whether the pipeline is in Development mode. Defaults to false.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "edition": { "description": "Pipeline product edition.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "environment": { "description": "Environment specification for this pipeline used to install dependencies.", @@ -1417,29 +1417,29 @@ "filters": { "description": "Filters on which Pipeline packages to include in the deployed graph.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.Filters", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "gateway_definition": { "description": "The definition of a gateway pipeline to support change data capture.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionGatewayPipelineDefinition", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "id": { "description": "Unique identifier for this pipeline.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "ingestion_definition": { "description": "The configuration for a managed ingestion pipeline. These settings cannot be used with the 'libraries', 'schema', 'target', or 'catalog' settings.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinition", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "libraries": { "description": "Libraries or code needed by this deployment.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineLibrary", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", @@ -1448,12 +1448,12 @@ "name": { "description": "Friendly identifier for this pipeline.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "notifications": { "description": "List of notification settings for this pipeline.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.Notifications", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.PipelinePermission" @@ -1461,7 +1461,7 @@ "photon": { "description": "Whether Photon is enabled for this pipeline.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "restart_window": { "description": "Restart window of this pipeline.", @@ -1486,12 +1486,12 @@ "serverless": { "description": "Whether serverless compute is enabled for this pipeline.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "storage": { "description": "DBFS root directory for storing checkpoints and tables.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "tags": { "description": "A map of tags associated with the pipeline.\nThese are forwarded to the cluster as cluster tags, and are therefore subject to the same limitations.\nA maximum of 25 tags can be added to the pipeline.", @@ -1502,7 +1502,7 @@ "description": "Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field.", "$ref": "#/$defs/string", "deprecationMessage": "This field is deprecated", - "since_version": "v0.228.1", + "since_version": "v0.229.0", "deprecated": true }, "trigger": { @@ -1586,24 +1586,24 @@ "assets_dir": { "description": "[Create:REQ Update:IGN] Field for specifying the absolute path to a custom directory to store data-monitoring\nassets. Normally prepopulated to a default user location via UI and Python APIs.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "baseline_table_name": { "description": "[Create:OPT Update:OPT] Baseline table name.\nBaseline data is used to compute drift from the data in the monitored `table_name`.\nThe baseline table and the monitored table shall have the same schema.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "custom_metrics": { "description": "[Create:OPT Update:OPT] Custom metrics.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/catalog.MonitorMetric", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "data_classification_config": { "description": "[Create:OPT Update:OPT] Data classification related config.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorDataClassificationConfig", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "inference_log": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorInferenceLog" @@ -1620,32 +1620,32 @@ "notifications": { "description": "[Create:OPT Update:OPT] Field for specifying notification settings.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorNotifications", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "output_schema_name": { "description": "[Create:REQ Update:REQ] Schema where output tables are created. Needs to be in 2-level format {catalog}.{schema}", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "schedule": { "description": "[Create:OPT Update:OPT] The monitor schedule.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorCronSchedule", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "skip_builtin_dashboard": { "description": "Whether to skip creating a default dashboard summarizing data quality metrics.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "slicing_exprs": { "description": "[Create:OPT Update:OPT] List of column expressions to slice data with for targeted analysis. The data is grouped by\neach expression independently, resulting in a separate slice for each predicate and its\ncomplements. For example `slicing_exprs=[“col_1”, “col_2 \u003e 10”]` will generate the following\nslices: two slices for `col_2 \u003e 10` (True and False), and one slice per unique value in\n`col1`. For high-cardinality columns, only the top 100 unique values by frequency will\ngenerate slices.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "snapshot": { "description": "Configuration for monitoring snapshot tables.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorSnapshot", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "table_name": { "$ref": "#/$defs/string" @@ -1653,12 +1653,12 @@ "time_series": { "description": "Configuration for monitoring time series tables.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorTimeSeries", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "warehouse_id": { "description": "Optional argument to specify the warehouse for dashboard creation. If not specified, the first running\nwarehouse will be used.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -1689,12 +1689,12 @@ "catalog_name": { "description": "The name of the catalog where the schema and the registered model reside", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "comment": { "description": "The comment attached to the registered model", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "created_at": { "$ref": "#/$defs/int64" @@ -1718,7 +1718,7 @@ "name": { "description": "The name of the registered model", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "owner": { "$ref": "#/$defs/string" @@ -1726,12 +1726,12 @@ "schema_name": { "description": "The name of the schema where the registered model resides", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "storage_location": { "description": "The storage location on the cloud under which model version data files are stored", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "updated_at": { "$ref": "#/$defs/int64" @@ -1757,12 +1757,12 @@ "catalog_name": { "description": "Name of parent catalog.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "comment": { "description": "User-provided free-form text description.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "grants": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.SchemaGrant" @@ -1774,7 +1774,7 @@ "name": { "description": "Name of schema, relative to parent catalog.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "properties": { "$ref": "#/$defs/map/string" @@ -1782,7 +1782,7 @@ "storage_root": { "description": "Storage root URL for managed tables within schema.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -2225,37 +2225,37 @@ "alert": { "description": "The name of the alert for which to retrieve an ID.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "cluster": { "description": "The name of the cluster for which to retrieve an ID.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "cluster_policy": { "description": "The name of the cluster_policy for which to retrieve an ID.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "dashboard": { "description": "The name of the dashboard for which to retrieve an ID.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "instance_pool": { "description": "The name of the instance_pool for which to retrieve an ID.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "job": { "description": "The name of the job for which to retrieve an ID.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "metastore": { "description": "The name of the metastore for which to retrieve an ID.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "notification_destination": { "description": "The name of the notification_destination for which to retrieve an ID.", @@ -2265,22 +2265,22 @@ "pipeline": { "description": "The name of the pipeline for which to retrieve an ID.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "query": { "description": "The name of the query for which to retrieve an ID.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "service_principal": { "description": "The name of the service_principal for which to retrieve an ID.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "warehouse": { "description": "The name of the warehouse for which to retrieve an ID.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -2299,22 +2299,22 @@ "default": { "description": "The default value for the variable.", "$ref": "#/$defs/interface", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "description": { "description": "The description of the variable.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "lookup": { "description": "The name of the alert, cluster_policy, cluster, dashboard, instance_pool, job, metastore, pipeline, query, service_principal, or warehouse object for which to retrieve an ID.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.Lookup", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "type": { "description": "The type of the variable.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.VariableType", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -2329,23 +2329,23 @@ "default": { "description": "The default value for the variable.", "$ref": "#/$defs/interface", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "description": { "description": "The description of the variable", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "lookup": { "description": "The name of the alert, cluster_policy, cluster, dashboard, instance_pool, job, metastore, pipeline, query, service_principal, or warehouse object for which to retrieve an ID.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.Lookup", "markdownDescription": "The name of the `alert`, `cluster_policy`, `cluster`, `dashboard`, `instance_pool`, `job`, `metastore`, `pipeline`, `query`, `service_principal`, or `warehouse` object for which to retrieve an ID.", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "type": { "description": "The type of the variable.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.VariableType", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -2363,7 +2363,7 @@ "build": { "description": "An optional set of build commands to run locally before deployment.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "dynamic_version": { "description": "Whether to patch the wheel version dynamically based on the timestamp of the whl file. If this is set to `true`, new code can be deployed without having to update the version in `setup.py` or `pyproject.toml`. This setting is only valid when `type` is set to `whl`. See [\\_](/dev-tools/bundles/settings.md#bundle-syntax-mappings-artifacts).", @@ -2373,23 +2373,23 @@ "executable": { "description": "The executable type. Valid values are `bash`, `sh`, and `cmd`.", "$ref": "#/$defs/github.com/databricks/cli/libs/exec.ExecutableType", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "files": { "description": "The relative or absolute path to the built artifact files.", "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config.ArtifactFile", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "path": { "description": "The local path of the directory for the artifact.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "type": { "description": "Required if the artifact is a Python wheel. The type of the artifact. Valid values are `whl` and `jar`.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.ArtifactType", "markdownDescription": "Required if the artifact is a Python wheel. The type of the artifact. Valid values are `whl` and `jar`.", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -2408,7 +2408,7 @@ "source": { "description": "Required. The artifact source file.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -2439,30 +2439,30 @@ "compute_id": { "description": "Deprecated. The ID of the compute to use to run the bundle.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "databricks_cli_version": { "description": "The Databricks CLI version to use for the bundle.", "$ref": "#/$defs/string", "markdownDescription": "The Databricks CLI version to use for the bundle. See [databricks_cli_version](https://docs.databricks.com/dev-tools/bundles/settings.html#databricks_cli_version).", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "deployment": { "description": "The definition of the bundle deployment", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Deployment", "markdownDescription": "The definition of the bundle deployment. For supported attributes see [link](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html).", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "git": { "description": "The Git version control details that are associated with your bundle.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Git", "markdownDescription": "The Git version control details that are associated with your bundle. For supported attributes see [git](https://docs.databricks.com/dev-tools/bundles/settings.html#git).", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "name": { "description": "The name of the bundle.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "uuid": { "description": "Reserved. A Universally Unique Identifier (UUID) for the bundle that uniquely identifies the bundle in internal Databricks systems. This is generated when a bundle project is initialized using a Databricks template (using the `databricks bundle init` command).", @@ -2492,12 +2492,12 @@ "fail_on_active_runs": { "description": "Whether to fail on active runs. If this is set to true a deployment that is running can be interrupted.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "lock": { "description": "The deployment lock attributes.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Lock", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -2517,7 +2517,7 @@ "description": "The PyDABs configuration.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.PyDABs", "deprecationMessage": "Deprecated: please use python instead", - "since_version": "v0.228.1", + "since_version": "v0.229.0", "deprecated": true }, "python": { @@ -2528,12 +2528,12 @@ "python_wheel_wrapper": { "description": "Whether to use a Python wheel wrapper.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "scripts": { "description": "The commands to run.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Command", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "skip_artifact_cleanup": { "description": "Determines whether to skip cleaning up the .internal folder", @@ -2548,7 +2548,7 @@ "use_legacy_run_as": { "description": "Whether to use the legacy run_as behavior.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -2568,13 +2568,13 @@ "description": "The Git branch name.", "$ref": "#/$defs/string", "markdownDescription": "The Git branch name. See [git](https://docs.databricks.com/dev-tools/bundles/settings.html#git).", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "origin_url": { "description": "The origin URL of the repository.", "$ref": "#/$defs/string", "markdownDescription": "The origin URL of the repository. See [git](https://docs.databricks.com/dev-tools/bundles/settings.html#git).", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -2593,12 +2593,12 @@ "enabled": { "description": "Whether this lock is enabled.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "force": { "description": "Whether to force this lock if it is enabled.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -2625,17 +2625,17 @@ "jobs_max_concurrent_runs": { "description": "The maximum concurrent runs for a job.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "name_prefix": { "description": "The prefix for job runs of the bundle.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "pipelines_development": { "description": "Whether pipeline deployments should be locked in development mode.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "source_linked_deployment": { "description": "Whether to link the deployment to the bundle source.", @@ -2645,12 +2645,12 @@ "tags": { "description": "The tags for the bundle deployment.", "$ref": "#/$defs/map/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "trigger_pause_status": { "description": "A pause status to apply to all job triggers and schedules. Valid values are PAUSED or UNPAUSED.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -2669,7 +2669,7 @@ "enabled": { "description": "Whether or not PyDABs (Private Preview) is enabled", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -2748,49 +2748,49 @@ "description": "The experiment definitions for the bundle, where each key is the name of the experiment.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.MlflowExperiment", "markdownDescription": "The experiment definitions for the bundle, where each key is the name of the experiment. See [experiments](https://docs.databricks.com/dev-tools/bundles/resources.html#experiments).", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "jobs": { "description": "The job definitions for the bundle, where each key is the name of the job.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Job", "markdownDescription": "The job definitions for the bundle, where each key is the name of the job. See [jobs](https://docs.databricks.com/dev-tools/bundles/resources.html#jobs).", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "model_serving_endpoints": { "description": "The model serving endpoint definitions for the bundle, where each key is the name of the model serving endpoint.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint", "markdownDescription": "The model serving endpoint definitions for the bundle, where each key is the name of the model serving endpoint. See [model_serving_endpoints](https://docs.databricks.com/dev-tools/bundles/resources.html#model_serving_endpoints).", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "models": { "description": "The model definitions for the bundle, where each key is the name of the model.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.MlflowModel", "markdownDescription": "The model definitions for the bundle, where each key is the name of the model. See [models](https://docs.databricks.com/dev-tools/bundles/resources.html#models).", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "pipelines": { "description": "The pipeline definitions for the bundle, where each key is the name of the pipeline.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Pipeline", "markdownDescription": "The pipeline definitions for the bundle, where each key is the name of the pipeline. See [pipelines](https://docs.databricks.com/dev-tools/bundles/resources.html#pipelines).", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "quality_monitors": { "description": "The quality monitor definitions for the bundle, where each key is the name of the quality monitor.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.QualityMonitor", "markdownDescription": "The quality monitor definitions for the bundle, where each key is the name of the quality monitor. See [quality_monitors](https://docs.databricks.com/dev-tools/bundles/resources.html#quality_monitors).", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "registered_models": { "description": "The registered model definitions for the bundle, where each key is the name of the Unity Catalog registered model.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.RegisteredModel", "markdownDescription": "The registered model definitions for the bundle, where each key is the name of the Unity Catalog registered model. See [registered_models](https://docs.databricks.com/dev-tools/bundles/resources.html#registered_models)", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "schemas": { "description": "The schema definitions for the bundle, where each key is the name of the schema.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Schema", "markdownDescription": "The schema definitions for the bundle, where each key is the name of the schema. See [schemas](https://docs.databricks.com/dev-tools/bundles/resources.html#schemas).", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "secret_scopes": { "description": "The secret scope definitions for the bundle, where each key is the name of the secret scope.", @@ -2852,17 +2852,17 @@ "exclude": { "description": "A list of files or folders to exclude from the bundle.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "include": { "description": "A list of files or folders to include in the bundle.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "paths": { "description": "The local folder paths, which can be outside the bundle root, to synchronize to the workspace when the bundle is deployed.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -2881,12 +2881,12 @@ "artifacts": { "description": "The artifacts to include in the target deployment.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Artifact", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "bundle": { "description": "The bundle attributes when deploying to this target.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Bundle", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "cluster_id": { "description": "The ID of the cluster to use for this target.", @@ -2897,60 +2897,60 @@ "description": "Deprecated. The ID of the compute to use for this target.", "$ref": "#/$defs/string", "deprecationMessage": "Deprecated: please use cluster_id instead", - "since_version": "v0.228.1", + "since_version": "v0.229.0", "deprecated": true }, "default": { "description": "Whether this target is the default target.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "git": { "description": "The Git version control settings for the target.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Git", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "mode": { "description": "The deployment mode for the target.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Mode", "markdownDescription": "The deployment mode for the target. Valid values are `development` or `production`. See [link](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html).", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "permissions": { "description": "The permissions for deploying and running the bundle in the target.", "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "presets": { "description": "The deployment presets for the target.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Presets", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "resources": { "description": "The resource definitions for the target.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Resources", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "run_as": { "description": "The identity to use to run the bundle.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs", "markdownDescription": "The identity to use to run the bundle, see [link](https://docs.databricks.com/dev-tools/bundles/run-as.html).", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "sync": { "description": "The local paths to sync to the target workspace when a bundle is run or deployed.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Sync", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "variables": { "description": "The custom variable definitions for the target.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/variable.TargetVariable", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "workspace": { "description": "The Databricks workspace for the target.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Workspace", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -2969,67 +2969,67 @@ "artifact_path": { "description": "The artifact path to use within the workspace for both deployments and workflow runs", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "auth_type": { "description": "The authentication type.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "azure_client_id": { "description": "The Azure client ID", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "azure_environment": { "description": "The Azure environment", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "azure_login_app_id": { "description": "The Azure login app ID", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "azure_tenant_id": { "description": "The Azure tenant ID", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "azure_use_msi": { "description": "Whether to use MSI for Azure", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "azure_workspace_resource_id": { "description": "The Azure workspace resource ID", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "client_id": { "description": "The client ID for the workspace", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "file_path": { "description": "The file path to use within the workspace for both deployments and workflow runs", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "google_service_account": { "description": "The Google service account name", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "host": { "description": "The Databricks workspace host URL", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "profile": { "description": "The Databricks workspace profile name", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "resource_path": { "description": "The workspace resource path", @@ -3039,12 +3039,12 @@ "root_path": { "description": "The Databricks workspace root path", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "state_path": { "description": "The workspace state path", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -11675,13 +11675,13 @@ "description": "Defines the attributes to build an artifact", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Artifact", "markdownDescription": "Defines the attributes to build artifacts, where each key is the name of the artifact, and the value is a Map that defines the artifact build settings. For information about the `artifacts` mapping, see [artifacts](https://docs.databricks.com/dev-tools/bundles/settings.html#artifacts).\n\nArtifact settings defined in the top level of the bundle configuration can be overridden in the `targets` mapping. See [link](https://docs.databricks.com/dev-tools/bundles/artifact-overrides.html).", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "bundle": { "description": "The bundle attributes when deploying to this target.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Bundle", "markdownDescription": "The bundle attributes when deploying to this target,", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "environments": { "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Target", @@ -11692,25 +11692,25 @@ "experimental": { "description": "Defines attributes for experimental features.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Experimental", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "include": { "description": "Specifies a list of path globs that contain configuration files to include within the bundle.", "$ref": "#/$defs/slice/string", "markdownDescription": "Specifies a list of path globs that contain configuration files to include within the bundle. See [include](https://docs.databricks.com/dev-tools/bundles/settings.html#include).", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "permissions": { "description": "Defines a permission for a specific entity.", "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission", "markdownDescription": "A Sequence that defines the permissions to apply to experiments, jobs, pipelines, and models defined in the bundle, where each item in the sequence is a permission for a specific entity.\n\nSee [permissions](https://docs.databricks.com/dev-tools/bundles/settings.html#permissions) and [link](https://docs.databricks.com/dev-tools/bundles/permissions.html).", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "presets": { "description": "Defines bundle deployment presets.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Presets", "markdownDescription": "Defines bundle deployment presets. See [presets](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html#presets).", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "python": { "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Python", @@ -11720,13 +11720,13 @@ "description": "A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Resources", "markdownDescription": "A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource. For more information about Databricks Asset Bundles supported resources, and resource definition reference, see [link](https://docs.databricks.com/dev-tools/bundles/resources.html).\n\n```yaml\nresources:\n \u003cresource-type\u003e:\n \u003cresource-name\u003e:\n \u003cresource-field-name\u003e: \u003cresource-field-value\u003e\n```", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "run_as": { "description": "The identity to use when running Databricks Asset Bundles workflows.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs", "markdownDescription": "The identity to use when running Databricks Asset Bundles workflows. See [link](https://docs.databricks.com/dev-tools/bundles/run-as.html).", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "scripts": { "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Script", @@ -11736,24 +11736,24 @@ "description": "The files and file paths to include or exclude in the bundle.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Sync", "markdownDescription": "The files and file paths to include or exclude in the bundle. See [sync](https://docs.databricks.com/dev-tools/bundles/settings.html#sync).", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "targets": { "description": "Defines deployment targets for the bundle.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Target", "markdownDescription": "Defines deployment targets for the bundle. See [targets](https://docs.databricks.com/dev-tools/bundles/settings.html#targets)", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "variables": { "description": "A Map that defines the custom variables for the bundle, where each key is the name of the variable, and the value is a Map that defines the variable.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/variable.Variable", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "workspace": { "description": "Defines the Databricks workspace for the bundle.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Workspace", "markdownDescription": "Defines the Databricks workspace for the bundle. See [workspace](https://docs.databricks.com/dev-tools/bundles/settings.html#workspace).", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": {} From f35d4b5ef94878b90ece0c1d8c02d1500c1f8363 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Wed, 14 Jan 2026 02:53:51 +0100 Subject: [PATCH 6/8] - --- .../schema/annotations_openapi_overrides.yml | 836 +++++++++--------- bundle/schema/jsonschema.json | 836 +++++++++--------- 2 files changed, 836 insertions(+), 836 deletions(-) diff --git a/bundle/internal/schema/annotations_openapi_overrides.yml b/bundle/internal/schema/annotations_openapi_overrides.yml index c452ba610a..7e3d51f59a 100644 --- a/bundle/internal/schema/annotations_openapi_overrides.yml +++ b/bundle/internal/schema/annotations_openapi_overrides.yml @@ -886,77 +886,77 @@ github.com/databricks/databricks-sdk-go/service/apps.ComputeStatus: github.com/databricks/databricks-sdk-go/service/catalog.MonitorCronSchedule: "pause_status": "since_version": |- - v0.228.1 + v0.229.0 "quartz_cron_expression": "since_version": |- - v0.228.1 + v0.229.0 "timezone_id": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/catalog.MonitorDataClassificationConfig: "enabled": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/catalog.MonitorDestination: "email_addresses": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/catalog.MonitorInferenceLog: "granularities": "description": |- Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year. "since_version": |- - v0.228.1 + v0.229.0 "label_col": "since_version": |- - v0.228.1 + v0.229.0 "model_id_col": "since_version": |- - v0.228.1 + v0.229.0 "prediction_col": "since_version": |- - v0.228.1 + v0.229.0 "prediction_proba_col": "since_version": |- - v0.228.1 + v0.229.0 "problem_type": "since_version": |- - v0.228.1 + v0.229.0 "timestamp_col": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/catalog.MonitorMetric: "definition": "since_version": |- - v0.228.1 + v0.229.0 "input_columns": "since_version": |- - v0.228.1 + v0.229.0 "name": "since_version": |- - v0.228.1 + v0.229.0 "output_data_type": "since_version": |- - v0.228.1 + v0.229.0 "type": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/catalog.MonitorNotifications: "on_failure": "since_version": |- - v0.228.1 + v0.229.0 "on_new_classification_tag_detected": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/catalog.MonitorTimeSeries: "granularities": "description": |- Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year. "since_version": |- - v0.228.1 + v0.229.0 "timestamp_col": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/catalog.RegisteredModelAlias: "alias_name": "since_version": |- @@ -987,137 +987,137 @@ github.com/databricks/databricks-sdk-go/service/catalog.RegisteredModelAlias: github.com/databricks/databricks-sdk-go/service/compute.Adlsgen2Info: "destination": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.AutoScale: "max_workers": "since_version": |- - v0.228.1 + v0.229.0 "min_workers": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.AwsAttributes: "availability": "description": |- PLACEHOLDER "since_version": |- - v0.228.1 + v0.229.0 "ebs_volume_count": "since_version": |- - v0.228.1 + v0.229.0 "ebs_volume_iops": "since_version": |- - v0.228.1 + v0.229.0 "ebs_volume_size": "since_version": |- - v0.228.1 + v0.229.0 "ebs_volume_throughput": "since_version": |- - v0.228.1 + v0.229.0 "ebs_volume_type": "description": |- PLACEHOLDER "since_version": |- - v0.228.1 + v0.229.0 "first_on_demand": "since_version": |- - v0.228.1 + v0.229.0 "instance_profile_arn": "since_version": |- - v0.228.1 + v0.229.0 "spot_bid_price_percent": "since_version": |- - v0.228.1 + v0.229.0 "zone_id": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.AzureAttributes: "availability": "description": |- PLACEHOLDER "since_version": |- - v0.228.1 + v0.229.0 "first_on_demand": "since_version": |- - v0.228.1 + v0.229.0 "log_analytics_info": "since_version": |- - v0.228.1 + v0.229.0 "spot_bid_max_price": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.ClientsTypes: "jobs": "since_version": |- - v0.228.1 + v0.229.0 "notebooks": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.ClusterLogConf: "dbfs": "since_version": |- - v0.228.1 + v0.229.0 "s3": "since_version": |- - v0.228.1 + v0.229.0 "volumes": "since_version": |- v0.242.0 github.com/databricks/databricks-sdk-go/service/compute.ClusterSpec: "apply_policy_default_values": "since_version": |- - v0.228.1 + v0.229.0 "autoscale": "since_version": |- - v0.228.1 + v0.229.0 "autotermination_minutes": "since_version": |- - v0.228.1 + v0.229.0 "aws_attributes": "since_version": |- - v0.228.1 + v0.229.0 "azure_attributes": "since_version": |- - v0.228.1 + v0.229.0 "cluster_log_conf": "since_version": |- - v0.228.1 + v0.229.0 "cluster_name": "since_version": |- - v0.228.1 + v0.229.0 "custom_tags": "since_version": |- - v0.228.1 + v0.229.0 "data_security_mode": "description": |- PLACEHOLDER "since_version": |- - v0.228.1 + v0.229.0 "docker_image": "description": |- PLACEHOLDER "since_version": |- - v0.228.1 + v0.229.0 "driver_instance_pool_id": "since_version": |- - v0.228.1 + v0.229.0 "driver_node_type_id": "since_version": |- - v0.228.1 + v0.229.0 "enable_elastic_disk": "since_version": |- - v0.228.1 + v0.229.0 "enable_local_disk_encryption": "since_version": |- - v0.228.1 + v0.229.0 "gcp_attributes": "since_version": |- - v0.228.1 + v0.229.0 "init_scripts": "since_version": |- - v0.228.1 + v0.229.0 "instance_pool_id": "since_version": |- - v0.228.1 + v0.229.0 "is_single_node": "since_version": |- v0.237.0 @@ -1128,13 +1128,13 @@ github.com/databricks/databricks-sdk-go/service/compute.ClusterSpec: v0.237.0 "node_type_id": "since_version": |- - v0.228.1 + v0.229.0 "num_workers": "since_version": |- - v0.228.1 + v0.229.0 "policy_id": "since_version": |- - v0.228.1 + v0.229.0 "remote_disk_throughput": "since_version": |- v0.257.0 @@ -1142,22 +1142,22 @@ github.com/databricks/databricks-sdk-go/service/compute.ClusterSpec: "description": |- PLACEHOLDER "since_version": |- - v0.228.1 + v0.229.0 "single_user_name": "since_version": |- - v0.228.1 + v0.229.0 "spark_conf": "since_version": |- - v0.228.1 + v0.229.0 "spark_env_vars": "since_version": |- - v0.228.1 + v0.229.0 "spark_version": "since_version": |- - v0.228.1 + v0.229.0 "ssh_public_keys": "since_version": |- - v0.228.1 + v0.229.0 "total_initial_remote_disk_size": "since_version": |- v0.257.0 @@ -1168,36 +1168,36 @@ github.com/databricks/databricks-sdk-go/service/compute.ClusterSpec: "description": |- PLACEHOLDER "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.DbfsStorageInfo: "destination": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.DockerBasicAuth: "password": "since_version": |- - v0.228.1 + v0.229.0 "username": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.DockerImage: "basic_auth": "description": |- PLACEHOLDER "since_version": |- - v0.228.1 + v0.229.0 "url": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.Environment: "client": "since_version": |- - v0.228.1 + v0.229.0 "dependencies": "description": |- List of pip dependencies, as supported by the version of pip in this environment. "since_version": |- - v0.228.1 + v0.229.0 "environment_version": "since_version": |- v0.252.0 @@ -1211,53 +1211,53 @@ github.com/databricks/databricks-sdk-go/service/compute.GcpAttributes: "description": |- PLACEHOLDER "since_version": |- - v0.228.1 + v0.229.0 "boot_disk_size": "since_version": |- - v0.228.1 + v0.229.0 "first_on_demand": "since_version": |- v0.265.0 "google_service_account": "since_version": |- - v0.228.1 + v0.229.0 "local_ssd_count": "since_version": |- - v0.228.1 + v0.229.0 "use_preemptible_executors": "since_version": |- - v0.228.1 + v0.229.0 "zone_id": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.GcsStorageInfo: "destination": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.InitScriptInfo: "abfss": "description": |- Contains the Azure Data Lake Storage destination path "since_version": |- - v0.228.1 + v0.229.0 "dbfs": "since_version": |- - v0.228.1 + v0.229.0 "file": "since_version": |- - v0.228.1 + v0.229.0 "gcs": "since_version": |- - v0.228.1 + v0.229.0 "s3": "since_version": |- - v0.228.1 + v0.229.0 "volumes": "since_version": |- - v0.228.1 + v0.229.0 "workspace": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.Kind: "_": "enum": @@ -1266,98 +1266,98 @@ github.com/databricks/databricks-sdk-go/service/compute.Kind: github.com/databricks/databricks-sdk-go/service/compute.Library: "cran": "since_version": |- - v0.228.1 + v0.229.0 "egg": "since_version": |- - v0.228.1 + v0.229.0 "jar": "since_version": |- - v0.228.1 + v0.229.0 "maven": "since_version": |- - v0.228.1 + v0.229.0 "pypi": "since_version": |- - v0.228.1 + v0.229.0 "requirements": "since_version": |- - v0.228.1 + v0.229.0 "whl": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.LocalFileInfo: "destination": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.LogAnalyticsInfo: "log_analytics_primary_key": "description": |- The primary key for the Azure Log Analytics agent configuration "since_version": |- - v0.228.1 + v0.229.0 "log_analytics_workspace_id": "description": |- The workspace ID for the Azure Log Analytics agent configuration "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.MavenLibrary: "coordinates": "since_version": |- - v0.228.1 + v0.229.0 "exclusions": "since_version": |- - v0.228.1 + v0.229.0 "repo": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.PythonPyPiLibrary: "package": "since_version": |- - v0.228.1 + v0.229.0 "repo": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.RCranLibrary: "package": "since_version": |- - v0.228.1 + v0.229.0 "repo": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.S3StorageInfo: "canned_acl": "since_version": |- - v0.228.1 + v0.229.0 "destination": "since_version": |- - v0.228.1 + v0.229.0 "enable_encryption": "since_version": |- - v0.228.1 + v0.229.0 "encryption_type": "since_version": |- - v0.228.1 + v0.229.0 "endpoint": "since_version": |- - v0.228.1 + v0.229.0 "kms_key": "since_version": |- - v0.228.1 + v0.229.0 "region": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.VolumesStorageInfo: "destination": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.WorkloadType: "clients": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.WorkspaceStorageInfo: "destination": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/database.CustomTag: "key": "since_version": |- @@ -1450,30 +1450,30 @@ github.com/databricks/databricks-sdk-go/service/jobs.ComputeConfig: github.com/databricks/databricks-sdk-go/service/jobs.ConditionTask: "left": "since_version": |- - v0.228.1 + v0.229.0 "op": "since_version": |- - v0.228.1 + v0.229.0 "right": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.Continuous: "pause_status": "since_version": |- - v0.228.1 + v0.229.0 "task_retry_mode": "since_version": |- v0.267.0 github.com/databricks/databricks-sdk-go/service/jobs.CronSchedule: "pause_status": "since_version": |- - v0.228.1 + v0.229.0 "quartz_cron_expression": "since_version": |- - v0.228.1 + v0.229.0 "timezone_id": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.DashboardTask: "dashboard_id": "description": |- @@ -1505,45 +1505,45 @@ github.com/databricks/databricks-sdk-go/service/jobs.DbtPlatformTask: github.com/databricks/databricks-sdk-go/service/jobs.DbtTask: "catalog": "since_version": |- - v0.228.1 + v0.229.0 "commands": "since_version": |- - v0.228.1 + v0.229.0 "profiles_directory": "since_version": |- - v0.228.1 + v0.229.0 "project_directory": "since_version": |- - v0.228.1 + v0.229.0 "schema": "since_version": |- - v0.228.1 + v0.229.0 "source": "since_version": |- - v0.228.1 + v0.229.0 "warehouse_id": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.FileArrivalTriggerConfiguration: "min_time_between_triggers_seconds": "since_version": |- - v0.228.1 + v0.229.0 "url": "since_version": |- - v0.228.1 + v0.229.0 "wait_after_last_change_seconds": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.ForEachTask: "concurrency": "since_version": |- - v0.228.1 + v0.229.0 "inputs": "since_version": |- - v0.228.1 + v0.229.0 "task": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.GenAiComputeTask: "command": "since_version": |- @@ -1574,112 +1574,112 @@ github.com/databricks/databricks-sdk-go/service/jobs.GenAiComputeTask: github.com/databricks/databricks-sdk-go/service/jobs.GitSnapshot: "used_commit": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.GitSource: "git_branch": "since_version": |- - v0.228.1 + v0.229.0 "git_commit": "since_version": |- - v0.228.1 + v0.229.0 "git_provider": "since_version": |- - v0.228.1 + v0.229.0 "git_snapshot": "description": |- PLACEHOLDER "git_tag": "since_version": |- - v0.228.1 + v0.229.0 "git_url": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.JobCluster: "job_cluster_key": "since_version": |- - v0.228.1 + v0.229.0 "new_cluster": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.JobDeployment: "kind": "since_version": |- - v0.228.1 + v0.229.0 "metadata_file_path": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.JobEmailNotifications: "no_alert_for_skipped_runs": "since_version": |- - v0.228.1 + v0.229.0 "on_duration_warning_threshold_exceeded": "since_version": |- - v0.228.1 + v0.229.0 "on_failure": "since_version": |- - v0.228.1 + v0.229.0 "on_start": "since_version": |- - v0.228.1 + v0.229.0 "on_streaming_backlog_exceeded": "since_version": |- - v0.228.1 + v0.229.0 "on_success": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.JobEnvironment: "environment_key": "since_version": |- - v0.228.1 + v0.229.0 "spec": "description": |- PLACEHOLDER "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.JobNotificationSettings: "no_alert_for_canceled_runs": "since_version": |- - v0.228.1 + v0.229.0 "no_alert_for_skipped_runs": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.JobParameterDefinition: "default": "since_version": |- - v0.228.1 + v0.229.0 "name": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.JobSource: "dirty_state": "since_version": |- - v0.228.1 + v0.229.0 "import_from_git_branch": "since_version": |- - v0.228.1 + v0.229.0 "job_config_path": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRule: "metric": "description": |- PLACEHOLDER "since_version": |- - v0.228.1 + v0.229.0 "op": "description": |- PLACEHOLDER "since_version": |- - v0.228.1 + v0.229.0 "value": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRules: "rules": "description": |- PLACEHOLDER "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.ModelTriggerConfiguration: "aliases": "since_version": |- @@ -1699,34 +1699,34 @@ github.com/databricks/databricks-sdk-go/service/jobs.ModelTriggerConfiguration: github.com/databricks/databricks-sdk-go/service/jobs.NotebookTask: "base_parameters": "since_version": |- - v0.228.1 + v0.229.0 "notebook_path": "since_version": |- - v0.228.1 + v0.229.0 "source": "since_version": |- - v0.228.1 + v0.229.0 "warehouse_id": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.PeriodicTriggerConfiguration: "interval": "since_version": |- - v0.228.1 + v0.229.0 "unit": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.PipelineParams: "full_refresh": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.PipelineTask: "full_refresh": "since_version": |- - v0.228.1 + v0.229.0 "pipeline_id": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.PowerBiModel: "authentication_method": "since_version": |- @@ -1775,140 +1775,140 @@ github.com/databricks/databricks-sdk-go/service/jobs.PowerBiTask: github.com/databricks/databricks-sdk-go/service/jobs.PythonWheelTask: "entry_point": "since_version": |- - v0.228.1 + v0.229.0 "named_parameters": "since_version": |- - v0.228.1 + v0.229.0 "package_name": "since_version": |- - v0.228.1 + v0.229.0 "parameters": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.QueueSettings: "enabled": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.RunJobTask: "dbt_commands": "since_version": |- - v0.228.1 + v0.229.0 "jar_params": "since_version": |- - v0.228.1 + v0.229.0 "job_id": "since_version": |- - v0.228.1 + v0.229.0 "job_parameters": "since_version": |- - v0.228.1 + v0.229.0 "notebook_params": "since_version": |- - v0.228.1 + v0.229.0 "pipeline_params": "since_version": |- - v0.228.1 + v0.229.0 "python_named_params": "description": |- PLACEHOLDER "since_version": |- - v0.228.1 + v0.229.0 "python_params": "since_version": |- - v0.228.1 + v0.229.0 "spark_submit_params": "since_version": |- - v0.228.1 + v0.229.0 "sql_params": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.SparkJarTask: "jar_uri": "since_version": |- - v0.228.1 + v0.229.0 "main_class_name": "since_version": |- - v0.228.1 + v0.229.0 "parameters": "since_version": |- - v0.228.1 + v0.229.0 "run_as_repl": "since_version": |- v0.240.0 github.com/databricks/databricks-sdk-go/service/jobs.SparkPythonTask: "parameters": "since_version": |- - v0.228.1 + v0.229.0 "python_file": "since_version": |- - v0.228.1 + v0.229.0 "source": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.SparkSubmitTask: "parameters": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.SqlTask: "alert": "since_version": |- - v0.228.1 + v0.229.0 "dashboard": "since_version": |- - v0.228.1 + v0.229.0 "file": "since_version": |- - v0.228.1 + v0.229.0 "parameters": "since_version": |- - v0.228.1 + v0.229.0 "query": "since_version": |- - v0.228.1 + v0.229.0 "warehouse_id": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskAlert: "alert_id": "since_version": |- - v0.228.1 + v0.229.0 "pause_subscriptions": "since_version": |- - v0.228.1 + v0.229.0 "subscriptions": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskDashboard: "custom_subject": "since_version": |- - v0.228.1 + v0.229.0 "dashboard_id": "since_version": |- - v0.228.1 + v0.229.0 "pause_subscriptions": "since_version": |- - v0.228.1 + v0.229.0 "subscriptions": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskFile: "path": "since_version": |- - v0.228.1 + v0.229.0 "source": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskQuery: "query_id": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskSubscription: "destination_id": "since_version": |- - v0.228.1 + v0.229.0 "user_name": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.Subscription: "custom_subject": "since_version": |- @@ -1935,23 +1935,23 @@ github.com/databricks/databricks-sdk-go/service/jobs.SubscriptionSubscriber: github.com/databricks/databricks-sdk-go/service/jobs.TableUpdateTriggerConfiguration: "condition": "since_version": |- - v0.228.1 + v0.229.0 "min_time_between_triggers_seconds": "since_version": |- - v0.228.1 + v0.229.0 "table_names": "since_version": |- - v0.228.1 + v0.229.0 "wait_after_last_change_seconds": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.Task: "clean_rooms_notebook_task": "since_version": |- v0.237.0 "condition_task": "since_version": |- - v0.228.1 + v0.229.0 "dashboard_task": "since_version": |- v0.248.0 @@ -1965,31 +1965,31 @@ github.com/databricks/databricks-sdk-go/service/jobs.Task: v0.257.0 "dbt_task": "since_version": |- - v0.228.1 + v0.229.0 "depends_on": "since_version": |- - v0.228.1 + v0.229.0 "description": "since_version": |- - v0.228.1 + v0.229.0 "disable_auto_optimization": "since_version": |- - v0.228.1 + v0.229.0 "disabled": "since_version": |- v0.271.0 "email_notifications": "since_version": |- - v0.228.1 + v0.229.0 "environment_key": "since_version": |- - v0.228.1 + v0.229.0 "existing_cluster_id": "since_version": |- - v0.228.1 + v0.229.0 "for_each_task": "since_version": |- - v0.228.1 + v0.229.0 "gen_ai_compute_task": "description": |- PLACEHOLDER @@ -1999,107 +1999,107 @@ github.com/databricks/databricks-sdk-go/service/jobs.Task: "description": |- PLACEHOLDER "since_version": |- - v0.228.1 + v0.229.0 "job_cluster_key": "since_version": |- - v0.228.1 + v0.229.0 "libraries": "since_version": |- - v0.228.1 + v0.229.0 "max_retries": "since_version": |- - v0.228.1 + v0.229.0 "min_retry_interval_millis": "since_version": |- - v0.228.1 + v0.229.0 "new_cluster": "since_version": |- - v0.228.1 + v0.229.0 "notebook_task": "since_version": |- - v0.228.1 + v0.229.0 "notification_settings": "since_version": |- - v0.228.1 + v0.229.0 "pipeline_task": "since_version": |- - v0.228.1 + v0.229.0 "power_bi_task": "since_version": |- v0.248.0 "python_wheel_task": "since_version": |- - v0.228.1 + v0.229.0 "retry_on_timeout": "since_version": |- - v0.228.1 + v0.229.0 "run_if": "since_version": |- - v0.228.1 + v0.229.0 "run_job_task": "since_version": |- - v0.228.1 + v0.229.0 "spark_jar_task": "since_version": |- - v0.228.1 + v0.229.0 "spark_python_task": "since_version": |- - v0.228.1 + v0.229.0 "spark_submit_task": "since_version": |- - v0.228.1 + v0.229.0 "sql_task": "since_version": |- - v0.228.1 + v0.229.0 "task_key": "since_version": |- - v0.228.1 + v0.229.0 "timeout_seconds": "since_version": |- - v0.228.1 + v0.229.0 "webhook_notifications": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.TaskDependency: "outcome": "since_version": |- - v0.228.1 + v0.229.0 "task_key": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.TaskEmailNotifications: "no_alert_for_skipped_runs": "since_version": |- - v0.228.1 + v0.229.0 "on_duration_warning_threshold_exceeded": "since_version": |- - v0.228.1 + v0.229.0 "on_failure": "since_version": |- - v0.228.1 + v0.229.0 "on_start": "since_version": |- - v0.228.1 + v0.229.0 "on_streaming_backlog_exceeded": "since_version": |- - v0.228.1 + v0.229.0 "on_success": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.TaskNotificationSettings: "alert_on_last_attempt": "since_version": |- - v0.228.1 + v0.229.0 "no_alert_for_canceled_runs": "since_version": |- - v0.228.1 + v0.229.0 "no_alert_for_skipped_runs": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.TriggerSettings: "file_arrival": "since_version": |- - v0.228.1 + v0.229.0 "model": "description": |- PLACEHOLDER @@ -2107,51 +2107,51 @@ github.com/databricks/databricks-sdk-go/service/jobs.TriggerSettings: v0.279.0 "pause_status": "since_version": |- - v0.228.1 + v0.229.0 "periodic": "since_version": |- - v0.228.1 + v0.229.0 "table_update": "description": |- PLACEHOLDER "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.Webhook: "id": "description": |- PLACEHOLDER "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.WebhookNotifications: "on_duration_warning_threshold_exceeded": "since_version": |- - v0.228.1 + v0.229.0 "on_failure": "since_version": |- - v0.228.1 + v0.229.0 "on_start": "since_version": |- - v0.228.1 + v0.229.0 "on_streaming_backlog_exceeded": "since_version": |- - v0.228.1 + v0.229.0 "on_success": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/ml.ExperimentTag: "key": "since_version": |- - v0.228.1 + v0.229.0 "value": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/ml.ModelTag: "key": "since_version": |- - v0.228.1 + v0.229.0 "value": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/pipelines.ConnectionParameters: "source_catalog": "since_version": |- @@ -2161,12 +2161,12 @@ github.com/databricks/databricks-sdk-go/service/pipelines.CronTrigger: "description": |- PLACEHOLDER "since_version": |- - v0.228.1 + v0.229.0 "timezone_id": "description": |- PLACEHOLDER "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/pipelines.EventLogSpec: "catalog": "since_version": |- @@ -2180,28 +2180,28 @@ github.com/databricks/databricks-sdk-go/service/pipelines.EventLogSpec: github.com/databricks/databricks-sdk-go/service/pipelines.FileLibrary: "path": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/pipelines.Filters: "exclude": "since_version": |- - v0.228.1 + v0.229.0 "include": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/pipelines.IngestionConfig: "report": "since_version": |- v0.231.0 "schema": "since_version": |- - v0.228.1 + v0.229.0 "table": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/pipelines.IngestionGatewayPipelineDefinition: "connection_id": "since_version": |- - v0.228.1 + v0.229.0 "connection_name": "since_version": |- v0.234.0 @@ -2210,23 +2210,23 @@ github.com/databricks/databricks-sdk-go/service/pipelines.IngestionGatewayPipeli v0.279.0 "gateway_storage_catalog": "since_version": |- - v0.228.1 + v0.229.0 "gateway_storage_name": "since_version": |- - v0.228.1 + v0.229.0 "gateway_storage_schema": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinition: "connection_name": "since_version": |- - v0.228.1 + v0.229.0 "ingest_from_uc_foreign_catalog": "since_version": |- v0.279.0 "ingestion_gateway_id": "since_version": |- - v0.228.1 + v0.229.0 "netsuite_jar_path": "description": |- PLACEHOLDER @@ -2234,13 +2234,13 @@ github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefin v0.271.0 "objects": "since_version": |- - v0.228.1 + v0.229.0 "source_configurations": "since_version": |- v0.267.0 "table_configuration": "since_version": |- - v0.228.1 + v0.229.0 ? github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig : "cursor_columns": "since_version": |- @@ -2271,14 +2271,14 @@ github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefin github.com/databricks/databricks-sdk-go/service/pipelines.NotebookLibrary: "path": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/pipelines.Notifications: "alerts": "since_version": |- - v0.228.1 + v0.229.0 "email_recipients": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/pipelines.PathPattern: "include": "since_version": |- @@ -2286,97 +2286,97 @@ github.com/databricks/databricks-sdk-go/service/pipelines.PathPattern: github.com/databricks/databricks-sdk-go/service/pipelines.PipelineCluster: "apply_policy_default_values": "since_version": |- - v0.228.1 + v0.229.0 "autoscale": "since_version": |- - v0.228.1 + v0.229.0 "aws_attributes": "since_version": |- - v0.228.1 + v0.229.0 "azure_attributes": "since_version": |- - v0.228.1 + v0.229.0 "cluster_log_conf": "since_version": |- - v0.228.1 + v0.229.0 "custom_tags": "since_version": |- - v0.228.1 + v0.229.0 "driver_instance_pool_id": "since_version": |- - v0.228.1 + v0.229.0 "driver_node_type_id": "since_version": |- - v0.228.1 + v0.229.0 "enable_local_disk_encryption": "since_version": |- - v0.228.1 + v0.229.0 "gcp_attributes": "since_version": |- - v0.228.1 + v0.229.0 "init_scripts": "since_version": |- - v0.228.1 + v0.229.0 "instance_pool_id": "since_version": |- - v0.228.1 + v0.229.0 "label": "since_version": |- - v0.228.1 + v0.229.0 "node_type_id": "since_version": |- - v0.228.1 + v0.229.0 "num_workers": "since_version": |- - v0.228.1 + v0.229.0 "policy_id": "since_version": |- - v0.228.1 + v0.229.0 "spark_conf": "since_version": |- - v0.228.1 + v0.229.0 "spark_env_vars": "since_version": |- - v0.228.1 + v0.229.0 "ssh_public_keys": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/pipelines.PipelineClusterAutoscale: "max_workers": "since_version": |- - v0.228.1 + v0.229.0 "min_workers": "since_version": |- - v0.228.1 + v0.229.0 "mode": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/pipelines.PipelineDeployment: "kind": "since_version": |- - v0.228.1 + v0.229.0 "metadata_file_path": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/pipelines.PipelineLibrary: "file": "since_version": |- - v0.228.1 + v0.229.0 "glob": "since_version": |- v0.252.0 "jar": "since_version": |- - v0.228.1 + v0.229.0 "maven": "since_version": |- - v0.228.1 + v0.229.0 "notebook": "since_version": |- - v0.228.1 + v0.229.0 "whl": "since_version": |- - v0.228.1 + v0.229.0 "deprecation_message": |- This field is deprecated github.com/databricks/databricks-sdk-go/service/pipelines.PipelineTrigger: @@ -2384,12 +2384,12 @@ github.com/databricks/databricks-sdk-go/service/pipelines.PipelineTrigger: "description": |- PLACEHOLDER "since_version": |- - v0.228.1 + v0.229.0 "manual": "description": |- PLACEHOLDER "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/pipelines.PipelinesEnvironment: "dependencies": "since_version": |- @@ -2441,19 +2441,19 @@ github.com/databricks/databricks-sdk-go/service/pipelines.RunAs: github.com/databricks/databricks-sdk-go/service/pipelines.SchemaSpec: "destination_catalog": "since_version": |- - v0.228.1 + v0.229.0 "destination_schema": "since_version": |- - v0.228.1 + v0.229.0 "source_catalog": "since_version": |- - v0.228.1 + v0.229.0 "source_schema": "since_version": |- - v0.228.1 + v0.229.0 "table_configuration": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/pipelines.SourceCatalogConfig: "postgres": "since_version": |- @@ -2468,25 +2468,25 @@ github.com/databricks/databricks-sdk-go/service/pipelines.SourceConfig: github.com/databricks/databricks-sdk-go/service/pipelines.TableSpec: "destination_catalog": "since_version": |- - v0.228.1 + v0.229.0 "destination_schema": "since_version": |- - v0.228.1 + v0.229.0 "destination_table": "since_version": |- - v0.228.1 + v0.229.0 "source_catalog": "since_version": |- - v0.228.1 + v0.229.0 "source_schema": "since_version": |- - v0.228.1 + v0.229.0 "source_table": "since_version": |- - v0.228.1 + v0.229.0 "table_configuration": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig: "exclude_columns": "since_version": |- @@ -2496,16 +2496,16 @@ github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig: v0.251.0 "primary_keys": "since_version": |- - v0.228.1 + v0.229.0 "query_based_connector_config": "since_version": |- v0.264.0 "salesforce_include_formula_fields": "since_version": |- - v0.228.1 + v0.229.0 "scd_type": "since_version": |- - v0.228.1 + v0.229.0 "sequence_by": "since_version": |- v0.231.0 @@ -2517,10 +2517,10 @@ github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig: github.com/databricks/databricks-sdk-go/service/serving.Ai21LabsConfig: "ai21labs_api_key": "since_version": |- - v0.228.1 + v0.229.0 "ai21labs_api_key_plaintext": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayConfig: "fallback_config": "since_version": |- @@ -2597,32 +2597,32 @@ github.com/databricks/databricks-sdk-go/service/serving.AiGatewayUsageTrackingCo github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfig: "aws_access_key_id": "since_version": |- - v0.228.1 + v0.229.0 "aws_access_key_id_plaintext": "since_version": |- - v0.228.1 + v0.229.0 "aws_region": "since_version": |- - v0.228.1 + v0.229.0 "aws_secret_access_key": "since_version": |- - v0.228.1 + v0.229.0 "aws_secret_access_key_plaintext": "since_version": |- - v0.228.1 + v0.229.0 "bedrock_provider": "since_version": |- - v0.228.1 + v0.229.0 "instance_profile_arn": "since_version": |- v0.243.0 github.com/databricks/databricks-sdk-go/service/serving.AnthropicConfig: "anthropic_api_key": "since_version": |- - v0.228.1 + v0.229.0 "anthropic_api_key_plaintext": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/serving.ApiKeyAuth: "key": "since_version": |- @@ -2636,16 +2636,16 @@ github.com/databricks/databricks-sdk-go/service/serving.ApiKeyAuth: github.com/databricks/databricks-sdk-go/service/serving.AutoCaptureConfigInput: "catalog_name": "since_version": |- - v0.228.1 + v0.229.0 "enabled": "since_version": |- - v0.228.1 + v0.229.0 "schema_name": "since_version": |- - v0.228.1 + v0.229.0 "table_name_prefix": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/serving.BearerTokenAuth: "token": "since_version": |- @@ -2656,13 +2656,13 @@ github.com/databricks/databricks-sdk-go/service/serving.BearerTokenAuth: github.com/databricks/databricks-sdk-go/service/serving.CohereConfig: "cohere_api_base": "since_version": |- - v0.228.1 + v0.229.0 "cohere_api_key": "since_version": |- - v0.228.1 + v0.229.0 "cohere_api_key_plaintext": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/serving.CustomProviderConfig: "api_key_auth": "since_version": |- @@ -2676,13 +2676,13 @@ github.com/databricks/databricks-sdk-go/service/serving.CustomProviderConfig: github.com/databricks/databricks-sdk-go/service/serving.DatabricksModelServingConfig: "databricks_api_token": "since_version": |- - v0.228.1 + v0.229.0 "databricks_api_token_plaintext": "since_version": |- - v0.228.1 + v0.229.0 "databricks_workspace_url": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/serving.EmailNotifications: "on_update_failure": "since_version": |- @@ -2693,60 +2693,60 @@ github.com/databricks/databricks-sdk-go/service/serving.EmailNotifications: github.com/databricks/databricks-sdk-go/service/serving.EndpointCoreConfigInput: "auto_capture_config": "since_version": |- - v0.228.1 + v0.229.0 "served_entities": "since_version": |- - v0.228.1 + v0.229.0 "served_models": "since_version": |- - v0.228.1 + v0.229.0 "traffic_config": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/serving.EndpointTag: "key": "since_version": |- - v0.228.1 + v0.229.0 "value": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/serving.ExternalModel: "ai21labs_config": "since_version": |- - v0.228.1 + v0.229.0 "amazon_bedrock_config": "since_version": |- - v0.228.1 + v0.229.0 "anthropic_config": "since_version": |- - v0.228.1 + v0.229.0 "cohere_config": "since_version": |- - v0.228.1 + v0.229.0 "custom_provider_config": "since_version": |- v0.246.0 "databricks_model_serving_config": "since_version": |- - v0.228.1 + v0.229.0 "google_cloud_vertex_ai_config": "since_version": |- - v0.228.1 + v0.229.0 "name": "since_version": |- - v0.228.1 + v0.229.0 "openai_config": "since_version": |- - v0.228.1 + v0.229.0 "palm_config": "since_version": |- - v0.228.1 + v0.229.0 "provider": "since_version": |- - v0.228.1 + v0.229.0 "task": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/serving.FallbackConfig: "enabled": "since_version": |- @@ -2754,67 +2754,67 @@ github.com/databricks/databricks-sdk-go/service/serving.FallbackConfig: github.com/databricks/databricks-sdk-go/service/serving.GoogleCloudVertexAiConfig: "private_key": "since_version": |- - v0.228.1 + v0.229.0 "private_key_plaintext": "since_version": |- - v0.228.1 + v0.229.0 "project_id": "since_version": |- - v0.228.1 + v0.229.0 "region": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/serving.OpenAiConfig: "microsoft_entra_client_id": "since_version": |- - v0.228.1 + v0.229.0 "microsoft_entra_client_secret": "since_version": |- - v0.228.1 + v0.229.0 "microsoft_entra_client_secret_plaintext": "since_version": |- - v0.228.1 + v0.229.0 "microsoft_entra_tenant_id": "since_version": |- - v0.228.1 + v0.229.0 "openai_api_base": "since_version": |- - v0.228.1 + v0.229.0 "openai_api_key": "since_version": |- - v0.228.1 + v0.229.0 "openai_api_key_plaintext": "since_version": |- - v0.228.1 + v0.229.0 "openai_api_type": "since_version": |- - v0.228.1 + v0.229.0 "openai_api_version": "since_version": |- - v0.228.1 + v0.229.0 "openai_deployment_name": "since_version": |- - v0.228.1 + v0.229.0 "openai_organization": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/serving.PaLmConfig: "palm_api_key": "since_version": |- - v0.228.1 + v0.229.0 "palm_api_key_plaintext": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/serving.RateLimit: "calls": "since_version": |- - v0.228.1 + v0.229.0 "key": "since_version": |- - v0.228.1 + v0.229.0 "renewal_period": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/serving.Route: "served_entity_name": "description": |- @@ -2823,103 +2823,103 @@ github.com/databricks/databricks-sdk-go/service/serving.Route: v0.260.0 "served_model_name": "since_version": |- - v0.228.1 + v0.229.0 "traffic_percentage": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput: "entity_name": "since_version": |- - v0.228.1 + v0.229.0 "entity_version": "description": |- PLACEHOLDER "since_version": |- - v0.228.1 + v0.229.0 "environment_vars": "since_version": |- - v0.228.1 + v0.229.0 "external_model": "since_version": |- - v0.228.1 + v0.229.0 "instance_profile_arn": "since_version": |- - v0.228.1 + v0.229.0 "max_provisioned_concurrency": "since_version": |- v0.256.0 "max_provisioned_throughput": "since_version": |- - v0.228.1 + v0.229.0 "min_provisioned_concurrency": "since_version": |- v0.256.0 "min_provisioned_throughput": "since_version": |- - v0.228.1 + v0.229.0 "name": "since_version": |- - v0.228.1 + v0.229.0 "provisioned_model_units": "since_version": |- v0.252.0 "scale_to_zero_enabled": "since_version": |- - v0.228.1 + v0.229.0 "workload_size": "since_version": |- - v0.228.1 + v0.229.0 "workload_type": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/serving.ServedModelInput: "environment_vars": "since_version": |- - v0.228.1 + v0.229.0 "instance_profile_arn": "since_version": |- - v0.228.1 + v0.229.0 "max_provisioned_concurrency": "since_version": |- v0.256.0 "max_provisioned_throughput": "since_version": |- - v0.228.1 + v0.229.0 "min_provisioned_concurrency": "since_version": |- v0.256.0 "min_provisioned_throughput": "since_version": |- - v0.228.1 + v0.229.0 "model_name": "description": |- PLACEHOLDER "since_version": |- - v0.228.1 + v0.229.0 "model_version": "description": |- PLACEHOLDER "since_version": |- - v0.228.1 + v0.229.0 "name": "since_version": |- - v0.228.1 + v0.229.0 "provisioned_model_units": "since_version": |- v0.252.0 "scale_to_zero_enabled": "since_version": |- - v0.228.1 + v0.229.0 "workload_size": "since_version": |- - v0.228.1 + v0.229.0 "workload_type": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/serving.TrafficConfig: "routes": "since_version": |- - v0.228.1 + v0.229.0 github.com/databricks/databricks-sdk-go/service/sql.AlertV2Evaluation: "comparison_operator": "since_version": |- diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index dc1a939e02..f7b3be1b55 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -3619,17 +3619,17 @@ "pause_status": { "description": "Read only field that indicates whether a schedule is paused or not.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorCronSchedulePauseStatus", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "quartz_cron_expression": { "description": "The expression that determines when to run the monitor. See [examples](https://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html).", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "timezone_id": { "description": "The timezone id (e.g., ``PST``) in which to evaluate the quartz expression.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -3670,7 +3670,7 @@ "enabled": { "description": "Whether to enable data classification.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -3689,7 +3689,7 @@ "email_addresses": { "description": "The list of email addresses to send the notification to. A maximum of 5 email addresses is supported.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -3708,37 +3708,37 @@ "granularities": { "description": "Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "label_col": { "description": "Column for the label.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "model_id_col": { "description": "Column for the model identifier.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "prediction_col": { "description": "Column for the prediction.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "prediction_proba_col": { "description": "Column for prediction probabilities", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "problem_type": { "description": "Problem type the model aims to solve.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorInferenceLogProblemType", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "timestamp_col": { "description": "Column for the timestamp.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -3780,27 +3780,27 @@ "definition": { "description": "Jinja template for a SQL expression that specifies how to compute the metric. See [create metric definition](https://docs.databricks.com/en/lakehouse-monitoring/custom-metrics.html#create-definition).", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "input_columns": { "description": "A list of column names in the input table the metric should be computed for.\nCan use ``\":table\"`` to indicate that the metric needs information from multiple columns.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "name": { "description": "Name of the metric in the output tables.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "output_data_type": { "description": "The output type of the custom metric.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "type": { "description": "Can only be one of ``\"CUSTOM_METRIC_TYPE_AGGREGATE\"``, ``\"CUSTOM_METRIC_TYPE_DERIVED\"``, or ``\"CUSTOM_METRIC_TYPE_DRIFT\"``.\nThe ``\"CUSTOM_METRIC_TYPE_AGGREGATE\"`` and ``\"CUSTOM_METRIC_TYPE_DERIVED\"`` metrics\nare computed on a single table, whereas the ``\"CUSTOM_METRIC_TYPE_DRIFT\"`` compare metrics across\nbaseline and input table, or across the two consecutive time windows.\n- CUSTOM_METRIC_TYPE_AGGREGATE: only depend on the existing columns in your table\n- CUSTOM_METRIC_TYPE_DERIVED: depend on previously computed aggregate metrics\n- CUSTOM_METRIC_TYPE_DRIFT: depend on previously computed aggregate or derived metrics", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorMetricType", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -3843,14 +3843,14 @@ "on_failure": { "description": "Destinations to send notifications on failure/timeout.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorDestination", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "on_new_classification_tag_detected": { "description": "Destinations to send notifications on new classification tag detected.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorDestination", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -3883,12 +3883,12 @@ "granularities": { "description": "Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "timestamp_col": { "description": "Column for the timestamp.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -3967,7 +3967,7 @@ "destination": { "description": "abfss destination, e.g. `abfss://\u003ccontainer-name\u003e@\u003cstorage-account-name\u003e.dfs.core.windows.net/\u003cdirectory-name\u003e`.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -3989,12 +3989,12 @@ "max_workers": { "description": "The maximum number of workers to which the cluster can scale up when overloaded.\nNote that `max_workers` must be strictly greater than `min_workers`.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "min_workers": { "description": "The minimum number of workers to which the cluster can scale down when underutilized.\nIt is also the initial number of workers the cluster will have after creation.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -4013,51 +4013,51 @@ "properties": { "availability": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AwsAvailability", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "ebs_volume_count": { "description": "The number of volumes launched for each instance. Users can choose up to 10 volumes.\nThis feature is only enabled for supported node types. Legacy node types cannot specify\ncustom EBS volumes.\nFor node types with no instance store, at least one EBS volume needs to be specified;\notherwise, cluster creation will fail.\n\nThese EBS volumes will be mounted at `/ebs0`, `/ebs1`, and etc.\nInstance store volumes will be mounted at `/local_disk0`, `/local_disk1`, and etc.\n\nIf EBS volumes are attached, Databricks will configure Spark to use only the EBS volumes for\nscratch storage because heterogenously sized scratch devices can lead to inefficient disk\nutilization. If no EBS volumes are attached, Databricks will configure Spark to use instance\nstore volumes.\n\nPlease note that if EBS volumes are specified, then the Spark configuration `spark.local.dir`\nwill be overridden.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "ebs_volume_iops": { "description": "If using gp3 volumes, what IOPS to use for the disk. If this is not set, the maximum performance of a gp2 volume with the same volume size will be used.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "ebs_volume_size": { "description": "The size of each EBS volume (in GiB) launched for each instance. For general purpose\nSSD, this value must be within the range 100 - 4096. For throughput optimized HDD,\nthis value must be within the range 500 - 4096.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "ebs_volume_throughput": { "description": "If using gp3 volumes, what throughput to use for the disk. If this is not set, the maximum performance of a gp2 volume with the same volume size will be used.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "ebs_volume_type": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.EbsVolumeType", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "first_on_demand": { "description": "The first `first_on_demand` nodes of the cluster will be placed on on-demand instances.\nIf this value is greater than 0, the cluster driver node in particular will be placed on an\non-demand instance. If this value is greater than or equal to the current cluster size, all\nnodes will be placed on on-demand instances. If this value is less than the current cluster\nsize, `first_on_demand` nodes will be placed on on-demand instances and the remainder will\nbe placed on `availability` instances. Note that this value does not affect\ncluster size and cannot currently be mutated over the lifetime of a cluster.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "instance_profile_arn": { "description": "Nodes for this cluster will only be placed on AWS instances with this instance profile. If\nommitted, nodes will be placed on instances without an IAM instance profile. The instance\nprofile must have previously been added to the Databricks environment by an account\nadministrator.\n\nThis feature may only be available to certain customer plans.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "spot_bid_price_percent": { "description": "The bid price for AWS spot instances, as a percentage of the corresponding instance type's\non-demand price.\nFor example, if this field is set to 50, and the cluster needs a new `r3.xlarge` spot\ninstance, then the bid price is half of the price of\non-demand `r3.xlarge` instances. Similarly, if this field is set to 200, the bid price is twice\nthe price of on-demand `r3.xlarge` instances. If not specified, the default value is 100.\nWhen spot instances are requested for this cluster, only spot instances whose bid price\npercentage matches this field will be considered.\nNote that, for safety, we enforce this field to be no more than 10000.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "zone_id": { "description": "Identifier for the availability zone/datacenter in which the cluster resides.\nThis string will be of a form like \"us-west-2a\". The provided availability\nzone must be in the same region as the Databricks deployment. For example, \"us-west-2a\"\nis not a valid zone id if the Databricks deployment resides in the \"us-east-1\" region.\nThis is an optional field at cluster creation, and if not specified, the zone \"auto\" will be used.\nIf the zone specified is \"auto\", will try to place cluster in a zone with high availability,\nand will retry placement in a different AZ if there is not enough capacity.\n\nThe list of available zones as well as the default value can be found by using the\n`List Zones` method.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -4093,22 +4093,22 @@ "properties": { "availability": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AzureAvailability", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "first_on_demand": { "description": "The first `first_on_demand` nodes of the cluster will be placed on on-demand instances.\nThis value should be greater than 0, to make sure the cluster driver node is placed on an\non-demand instance. If this value is greater than or equal to the current cluster size, all\nnodes will be placed on on-demand instances. If this value is less than the current cluster\nsize, `first_on_demand` nodes will be placed on on-demand instances and the remainder will\nbe placed on `availability` instances. Note that this value does not affect\ncluster size and cannot currently be mutated over the lifetime of a cluster.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "log_analytics_info": { "description": "Defines values necessary to configure and run Azure Log Analytics agent", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.LogAnalyticsInfo", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "spot_bid_max_price": { "description": "The max bid price to be used for Azure spot instances.\nThe Max price for the bid cannot be higher than the on-demand price of the instance.\nIf not specified, the default value is -1, which specifies that the instance cannot be evicted\non the basis of price, and only on the basis of availability. Further, the value should \u003e 0 or -1.", "$ref": "#/$defs/float64", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -4144,12 +4144,12 @@ "jobs": { "description": "With jobs set, the cluster can be used for jobs", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "notebooks": { "description": "With notebooks set, this cluster can be used for notebooks", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -4169,12 +4169,12 @@ "dbfs": { "description": "destination needs to be provided. e.g.\n`{ \"dbfs\" : { \"destination\" : \"dbfs:/home/cluster_log\" } }`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DbfsStorageInfo", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "s3": { "description": "destination and either the region or endpoint need to be provided. e.g.\n`{ \"s3\": { \"destination\" : \"s3://cluster_log_bucket/prefix\", \"region\" : \"us-west-2\" } }`\nCluster iam role is used to access s3, please make sure the cluster iam role in\n`instance_profile_arn` has permission to write data to the s3 destination.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.S3StorageInfo", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "volumes": { "description": "destination needs to be provided, e.g.\n`{ \"volumes\": { \"destination\": \"/Volumes/catalog/schema/volume/cluster_log\" } }`", @@ -4199,85 +4199,85 @@ "apply_policy_default_values": { "description": "When set to true, fixed and default values from the policy will be used for fields that are omitted. When set to false, only fixed values from the policy will be applied.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "autoscale": { "description": "Parameters needed in order to automatically scale clusters up and down based on load.\nNote: autoscaling works best with DB runtime versions 3.0 or later.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AutoScale", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "autotermination_minutes": { "description": "Automatically terminates the cluster after it is inactive for this time in minutes. If not set,\nthis cluster will not be automatically terminated. If specified, the threshold must be between\n10 and 10000 minutes.\nUsers can also set this value to 0 to explicitly disable automatic termination.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "aws_attributes": { "description": "Attributes related to clusters running on Amazon Web Services.\nIf not specified at cluster creation, a set of default values will be used.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AwsAttributes", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "azure_attributes": { "description": "Attributes related to clusters running on Microsoft Azure.\nIf not specified at cluster creation, a set of default values will be used.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AzureAttributes", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "cluster_log_conf": { "description": "The configuration for delivering spark logs to a long-term storage destination.\nThree kinds of destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be specified\nfor one cluster. If the conf is given, the logs will be delivered to the destination every\n`5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while\nthe destination of executor logs is `$destination/$clusterId/executor`.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClusterLogConf", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "cluster_name": { "description": "Cluster name requested by the user. This doesn't have to be unique.\nIf not specified at creation, the cluster name will be an empty string.\nFor job clusters, the cluster name is automatically set based on the job and job run IDs.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "custom_tags": { "description": "Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS\ninstances and EBS volumes) with these tags in addition to `default_tags`. Notes:\n\n- Currently, Databricks allows at most 45 custom tags\n\n- Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags", "$ref": "#/$defs/map/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "data_security_mode": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DataSecurityMode", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "docker_image": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DockerImage", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "driver_instance_pool_id": { "description": "The optional ID of the instance pool for the driver of the cluster belongs.\nThe pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not\nassigned.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "driver_node_type_id": { "description": "The node type of the Spark driver.\nNote that this field is optional; if unset, the driver node type will be set as the same value\nas `node_type_id` defined above.\n\nThis field, along with node_type_id, should not be set if virtual_cluster_size is set.\nIf both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id and node_type_id take precedence.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "enable_elastic_disk": { "description": "Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk\nspace when its Spark workers are running low on disk space.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "enable_local_disk_encryption": { "description": "Whether to enable LUKS on cluster VMs' local disks", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "gcp_attributes": { "description": "Attributes related to clusters running on Google Cloud Platform.\nIf not specified at cluster creation, a set of default values will be used.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.GcpAttributes", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "init_scripts": { "description": "The configuration for storing init scripts. Any number of destinations can be specified.\nThe scripts are executed sequentially in the order provided.\nIf `cluster_log_conf` is specified, init script logs are sent to `\u003cdestination\u003e/\u003ccluster-ID\u003e/init_scripts`.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/compute.InitScriptInfo", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "instance_pool_id": { "description": "The optional ID of the instance pool to which the cluster belongs.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "is_single_node": { "description": "This field can only be used when `kind = CLASSIC_PREVIEW`.\n\nWhen set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers`", @@ -4291,17 +4291,17 @@ "node_type_id": { "description": "This field encodes, through a single value, the resources available to each of\nthe Spark nodes in this cluster. For example, the Spark nodes can be provisioned\nand optimized for memory or compute intensive workloads. A list of available node\ntypes can be retrieved by using the :method:clusters/listNodeTypes API call.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "num_workers": { "description": "Number of worker nodes that this cluster should have. A cluster has one Spark Driver\nand `num_workers` Executors for a total of `num_workers` + 1 Spark nodes.\n\nNote: When reading the properties of a cluster, this field reflects the desired number\nof workers rather than the actual current number of workers. For instance, if a cluster\nis resized from 5 to 10 workers, this field will immediately be updated to reflect\nthe target size of 10 workers, whereas the workers listed in `spark_info` will gradually\nincrease from 5 to 10 as the new nodes are provisioned.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "policy_id": { "description": "The ID of the cluster policy used to create the cluster if applicable.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "remote_disk_throughput": { "description": "If set, what the configurable throughput (in Mb/s) for the remote disk is. Currently only supported for GCP HYPERDISK_BALANCED disks.", @@ -4310,32 +4310,32 @@ }, "runtime_engine": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.RuntimeEngine", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "single_user_name": { "description": "Single user name if data_security_mode is `SINGLE_USER`", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "spark_conf": { "description": "An object containing a set of optional, user-specified Spark configuration key-value pairs.\nUsers can also pass in a string of extra JVM options to the driver and the executors via\n`spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.", "$ref": "#/$defs/map/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "spark_env_vars": { "description": "An object containing a set of optional, user-specified environment variable key-value pairs.\nPlease note that key-value pair of the form (X,Y) will be exported as is (i.e.,\n`export X='Y'`) while launching the driver and workers.\n\nIn order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending\nthem to `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all\ndefault databricks managed environmental variables are included as well.\n\nExample Spark environment variables:\n`{\"SPARK_WORKER_MEMORY\": \"28000m\", \"SPARK_LOCAL_DIRS\": \"/local_disk0\"}` or\n`{\"SPARK_DAEMON_JAVA_OPTS\": \"$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true\"}`", "$ref": "#/$defs/map/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "spark_version": { "description": "The Spark version of the cluster, e.g. `3.3.x-scala2.11`.\nA list of available Spark versions can be retrieved by using\nthe :method:clusters/sparkVersions API call.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "ssh_public_keys": { "description": "SSH public key contents that will be added to each Spark node in this cluster. The\ncorresponding private keys can be used to login with the user name `ubuntu` on port `2200`.\nUp to 10 keys can be specified.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "total_initial_remote_disk_size": { "description": "If set, what the total initial volume size (in GB) of the remote disks should be. Currently only supported for GCP HYPERDISK_BALANCED disks.", @@ -4349,7 +4349,7 @@ }, "workload_type": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.WorkloadType", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -4393,7 +4393,7 @@ "destination": { "description": "dbfs destination, e.g. `dbfs:/my/path`", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -4415,12 +4415,12 @@ "password": { "description": "Password of the user", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "username": { "description": "Name of the user", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -4438,12 +4438,12 @@ "properties": { "basic_auth": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DockerBasicAuth", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "url": { "description": "URL of the docker image.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -4480,13 +4480,13 @@ "description": "Use `environment_version` instead.", "$ref": "#/$defs/string", "deprecationMessage": "This field is deprecated", - "since_version": "v0.228.1", + "since_version": "v0.229.0", "deprecated": true }, "dependencies": { "description": "List of pip dependencies, as supported by the version of pip in this environment.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "environment_version": { "description": "Required. Environment version used by the environment.\nEach version comes with a specific Python version and a set of Python packages.\nThe version is a string, consisting of an integer.", @@ -4514,12 +4514,12 @@ "properties": { "availability": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.GcpAvailability", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "boot_disk_size": { "description": "Boot disk size in GB", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "first_on_demand": { "description": "The first `first_on_demand` nodes of the cluster will be placed on on-demand instances.\nThis value should be greater than 0, to make sure the cluster driver node is placed on an\non-demand instance. If this value is greater than or equal to the current cluster size, all\nnodes will be placed on on-demand instances. If this value is less than the current cluster\nsize, `first_on_demand` nodes will be placed on on-demand instances and the remainder will\nbe placed on `availability` instances. Note that this value does not affect\ncluster size and cannot currently be mutated over the lifetime of a cluster.", @@ -4529,24 +4529,24 @@ "google_service_account": { "description": "If provided, the cluster will impersonate the google service account when accessing\ngcloud services (like GCS). The google service account\nmust have previously been added to the Databricks environment by an account\nadministrator.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "local_ssd_count": { "description": "If provided, each node (workers and driver) in the cluster will have this number of local SSDs attached.\nEach local SSD is 375GB in size.\nRefer to [GCP documentation](https://cloud.google.com/compute/docs/disks/local-ssd#choose_number_local_ssds)\nfor the supported number of local SSDs for each instance type.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "use_preemptible_executors": { "description": "This field determines whether the spark executors will be scheduled to run on preemptible\nVMs (when set to true) versus standard compute engine VMs (when set to false; default).\nNote: Soon to be deprecated, use the 'availability' field instead.", "$ref": "#/$defs/bool", "deprecationMessage": "This field is deprecated", - "since_version": "v0.228.1", + "since_version": "v0.229.0", "deprecated": true }, "zone_id": { "description": "Identifier for the availability zone in which the cluster resides.\nThis can be one of the following:\n- \"HA\" =\u003e High availability, spread nodes across availability zones for a Databricks deployment region [default].\n- \"AUTO\" =\u003e Databricks picks an availability zone to schedule the cluster on.\n- A GCP availability zone =\u003e Pick One of the available zones for (machine type + region) from\nhttps://cloud.google.com/compute/docs/regions-zones.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -4583,7 +4583,7 @@ "destination": { "description": "GCS destination/URI, e.g. `gs://my-bucket/some-prefix`", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -4606,39 +4606,39 @@ "abfss": { "description": "Contains the Azure Data Lake Storage destination path", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Adlsgen2Info", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "dbfs": { "description": "destination needs to be provided. e.g.\n`{ \"dbfs\": { \"destination\" : \"dbfs:/home/cluster_log\" } }`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DbfsStorageInfo", "deprecationMessage": "This field is deprecated", - "since_version": "v0.228.1", + "since_version": "v0.229.0", "deprecated": true }, "file": { "description": "destination needs to be provided, e.g.\n`{ \"file\": { \"destination\": \"file:/my/local/file.sh\" } }`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.LocalFileInfo", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "gcs": { "description": "destination needs to be provided, e.g.\n`{ \"gcs\": { \"destination\": \"gs://my-bucket/file.sh\" } }`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.GcsStorageInfo", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "s3": { "description": "destination and either the region or endpoint need to be provided. e.g.\n`{ \\\"s3\\\": { \\\"destination\\\": \\\"s3://cluster_log_bucket/prefix\\\", \\\"region\\\": \\\"us-west-2\\\" } }`\nCluster iam role is used to access s3, please make sure the cluster iam role in\n`instance_profile_arn` has permission to write data to the s3 destination.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.S3StorageInfo", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "volumes": { "description": "destination needs to be provided. e.g.\n`{ \\\"volumes\\\" : { \\\"destination\\\" : \\\"/Volumes/my-init.sh\\\" } }`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.VolumesStorageInfo", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "workspace": { "description": "destination needs to be provided, e.g.\n`{ \"workspace\": { \"destination\": \"/cluster-init-scripts/setup-datadog.sh\" } }`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.WorkspaceStorageInfo", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -4671,39 +4671,39 @@ "cran": { "description": "Specification of a CRAN library to be installed as part of the library", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.RCranLibrary", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "egg": { "description": "Deprecated. URI of the egg library to install. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above.", "$ref": "#/$defs/string", "deprecationMessage": "This field is deprecated", - "since_version": "v0.228.1", + "since_version": "v0.229.0", "deprecated": true }, "jar": { "description": "URI of the JAR library to install. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs.\nFor example: `{ \"jar\": \"/Workspace/path/to/library.jar\" }`, `{ \"jar\" : \"/Volumes/path/to/library.jar\" }` or\n`{ \"jar\": \"s3://my-bucket/library.jar\" }`.\nIf S3 is used, please make sure the cluster has read access on the library. You may need to\nlaunch the cluster with an IAM role to access the S3 URI.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "maven": { "description": "Specification of a maven library to be installed. For example:\n`{ \"coordinates\": \"org.jsoup:jsoup:1.7.2\" }`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.MavenLibrary", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "pypi": { "description": "Specification of a PyPi library to be installed. For example:\n`{ \"package\": \"simplejson\" }`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.PythonPyPiLibrary", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "requirements": { "description": "URI of the requirements.txt file to install. Only Workspace paths and Unity Catalog Volumes paths are supported.\nFor example: `{ \"requirements\": \"/Workspace/path/to/requirements.txt\" }` or `{ \"requirements\" : \"/Volumes/path/to/requirements.txt\" }`", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "whl": { "description": "URI of the wheel library to install. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs.\nFor example: `{ \"whl\": \"/Workspace/path/to/library.whl\" }`, `{ \"whl\" : \"/Volumes/path/to/library.whl\" }` or\n`{ \"whl\": \"s3://my-bucket/library.whl\" }`.\nIf S3 is used, please make sure the cluster has read access on the library. You may need to\nlaunch the cluster with an IAM role to access the S3 URI.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -4722,7 +4722,7 @@ "destination": { "description": "local file destination, e.g. `file:/my/local/file.sh`", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -4744,12 +4744,12 @@ "log_analytics_primary_key": { "description": "The primary key for the Azure Log Analytics agent configuration", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "log_analytics_workspace_id": { "description": "The workspace ID for the Azure Log Analytics agent configuration", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -4768,17 +4768,17 @@ "coordinates": { "description": "Gradle-style maven coordinates. For example: \"org.jsoup:jsoup:1.7.2\".", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "exclusions": { "description": "List of dependences to exclude. For example: `[\"slf4j:slf4j\", \"*:hadoop-client\"]`.\n\nMaven dependency exclusions:\nhttps://maven.apache.org/guides/introduction/introduction-to-optional-and-excludes-dependencies.html.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "repo": { "description": "Maven repo to install the Maven package from. If omitted, both Maven Central Repository\nand Spark Packages are searched.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -4800,12 +4800,12 @@ "package": { "description": "The name of the pypi package to install. An optional exact version specification is also\nsupported. Examples: \"simplejson\" and \"simplejson==3.8.0\".", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "repo": { "description": "The repository where the package can be found. If not specified, the default pip index is\nused.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -4827,12 +4827,12 @@ "package": { "description": "The name of the CRAN package to install.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "repo": { "description": "The repository where the package can be found. If not specified, the default CRAN repo is used.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -4871,37 +4871,37 @@ "canned_acl": { "description": "(Optional) Set canned access control list for the logs, e.g. `bucket-owner-full-control`.\nIf `canned_cal` is set, please make sure the cluster iam role has `s3:PutObjectAcl` permission on\nthe destination bucket and prefix. The full list of possible canned acl can be found at\nhttp://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl.\nPlease also note that by default only the object owner gets full controls. If you are using cross account\nrole for writing data, you may want to set `bucket-owner-full-control` to make bucket owner able to\nread the logs.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "destination": { "description": "S3 destination, e.g. `s3://my-bucket/some-prefix` Note that logs will be delivered using\ncluster iam role, please make sure you set cluster iam role and the role has write access to the\ndestination. Please also note that you cannot use AWS keys to deliver logs.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "enable_encryption": { "description": "(Optional) Flag to enable server side encryption, `false` by default.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "encryption_type": { "description": "(Optional) The encryption type, it could be `sse-s3` or `sse-kms`. It will be used only when\nencryption is enabled and the default type is `sse-s3`.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "endpoint": { "description": "S3 endpoint, e.g. `https://s3-us-west-2.amazonaws.com`. Either region or endpoint needs to be set.\nIf both are set, endpoint will be used.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "kms_key": { "description": "(Optional) Kms key which will be used if encryption is enabled and encryption type is set to `sse-kms`.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "region": { "description": "S3 region, e.g. `us-west-2`. Either region or endpoint needs to be set. If both are set,\nendpoint will be used.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -4924,7 +4924,7 @@ "destination": { "description": "UC Volumes destination, e.g. `/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh`\nor `dbfs:/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh`", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -4947,7 +4947,7 @@ "clients": { "description": "defined what type of clients can use the cluster. E.g. Notebooks, Jobs", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClientsTypes", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -4970,7 +4970,7 @@ "destination": { "description": "wsfs destination, e.g. `workspace:/cluster-init-scripts/setup-datadog.sh`", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -5461,17 +5461,17 @@ "left": { "description": "The left operand of the condition task. Can be either a string value or a job state or parameter reference.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "op": { "description": "* `EQUAL_TO`, `NOT_EQUAL` operators perform string comparison of their operands. This means that `“12.0” == “12”` will evaluate to `false`.\n* `GREATER_THAN`, `GREATER_THAN_OR_EQUAL`, `LESS_THAN`, `LESS_THAN_OR_EQUAL` operators perform numeric comparison of their operands. `“12.0” \u003e= “12”` will evaluate to `true`, `“10.0” \u003e= “12”` will evaluate to `false`.\n\nThe boolean comparison to task values can be implemented with operators `EQUAL_TO`, `NOT_EQUAL`. If a task value was set to a boolean value, it will be serialized to `“true”` or `“false”` for the comparison.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ConditionTaskOp", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "right": { "description": "The right operand of the condition task. Can be either a string value or a job state or parameter reference.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -5515,7 +5515,7 @@ "pause_status": { "description": "Indicate whether the continuous execution of the job is paused or not. Defaults to UNPAUSED.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PauseStatus", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "task_retry_mode": { "description": "Indicate whether the continuous job is applying task level retries or not. Defaults to NEVER.", @@ -5539,17 +5539,17 @@ "pause_status": { "description": "Indicate whether this schedule is paused or not.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PauseStatus", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "quartz_cron_expression": { "description": "A Cron expression using Quartz syntax that describes the schedule for a job. See [Cron Trigger](http://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html) for details. This field is required.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "timezone_id": { "description": "A Java timezone ID. The schedule for a job is resolved with respect to this timezone. See [Java TimeZone](https://docs.oracle.com/javase/7/docs/api/java/util/TimeZone.html) for details. This field is required.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -5649,37 +5649,37 @@ "catalog": { "description": "Optional name of the catalog to use. The value is the top level in the 3-level namespace of Unity Catalog (catalog / schema / relation). The catalog value can only be specified if a warehouse_id is specified. Requires dbt-databricks \u003e= 1.1.1.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "commands": { "description": "A list of dbt commands to execute. All commands must start with `dbt`. This parameter must not be empty. A maximum of up to 10 commands can be provided.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "profiles_directory": { "description": "Optional (relative) path to the profiles directory. Can only be specified if no warehouse_id is specified. If no warehouse_id is specified and this folder is unset, the root directory is used.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "project_directory": { "description": "Path to the project directory. Optional for Git sourced tasks, in which\ncase if no value is provided, the root of the Git repository is used.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "schema": { "description": "Optional schema to write to. This parameter is only used when a warehouse_id is also provided. If not provided, the `default` schema is used.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "source": { "description": "Optional location type of the project directory. When set to `WORKSPACE`, the project will be retrieved\nfrom the local Databricks workspace. When set to `GIT`, the project will be retrieved from a Git repository\ndefined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise.\n\n* `WORKSPACE`: Project is located in Databricks workspace.\n* `GIT`: Project is located in cloud Git provider.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Source", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "warehouse_id": { "description": "ID of the SQL warehouse to connect to. If provided, we automatically generate and provide the profile and connection details to dbt. It can be overridden on a per-command basis by using the `--profiles-dir` command line argument.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -5701,17 +5701,17 @@ "min_time_between_triggers_seconds": { "description": "If set, the trigger starts a run only after the specified amount of time passed since\nthe last time the trigger fired. The minimum allowed value is 60 seconds", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "url": { "description": "URL to be monitored for file arrivals. The path must point to the root or a subpath of the external location.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "wait_after_last_change_seconds": { "description": "If set, the trigger starts a run only after no file activity has occurred for the specified amount of time.\nThis makes it possible to wait for a batch of incoming files to arrive before triggering a run. The\nminimum allowed value is 60 seconds.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -5733,17 +5733,17 @@ "concurrency": { "description": "An optional maximum allowed number of concurrent runs of the task.\nSet this value if you want to be able to execute multiple runs of the task concurrently.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "inputs": { "description": "Array for task to iterate on. This can be a JSON string or a reference to\nan array parameter.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "task": { "description": "Configuration for the task that will be run for each element in the array", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Task", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -5859,7 +5859,7 @@ "used_commit": { "description": "Commit that was used to execute the run. If git_branch was specified, this points to the HEAD of the branch at the time of the run; if git_tag was specified, this points to the commit the tag points to.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -5879,27 +5879,27 @@ "git_branch": { "description": "Name of the branch to be checked out and used by this job. This field cannot be specified in conjunction with git_tag or git_commit.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "git_commit": { "description": "Commit to be checked out and used by this job. This field cannot be specified in conjunction with git_branch or git_tag.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "git_provider": { "description": "Unique identifier of the service used to host the Git repository. The value is case insensitive.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.GitProvider", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "git_tag": { "description": "Name of the tag to be checked out and used by this job. This field cannot be specified in conjunction with git_branch or git_commit.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "git_url": { "description": "URL of the repository to be cloned by this job.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -5922,12 +5922,12 @@ "job_cluster_key": { "description": "A unique name for the job cluster. This field is required and must be unique within the job.\n`JobTaskSettings` may refer to this field to determine which cluster to launch for the task execution.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "new_cluster": { "description": "If new_cluster, a description of a cluster that is created for each task.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClusterSpec", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -5950,12 +5950,12 @@ "kind": { "description": "The kind of deployment that manages the job.\n\n* `BUNDLE`: The job is managed by Databricks Asset Bundle.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobDeploymentKind", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "metadata_file_path": { "description": "Path of the file that contains deployment metadata.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -6009,33 +6009,33 @@ "description": "If true, do not send email to recipients specified in `on_failure` if the run is skipped.\nThis field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.", "$ref": "#/$defs/bool", "deprecationMessage": "This field is deprecated", - "since_version": "v0.228.1", + "since_version": "v0.229.0", "deprecated": true }, "on_duration_warning_threshold_exceeded": { "description": "A list of email addresses to be notified when the duration of a run exceeds the threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. If no rule for the `RUN_DURATION_SECONDS` metric is specified in the `health` field for the job, notifications are not sent.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "on_failure": { "description": "A list of email addresses to be notified when a run unsuccessfully completes. A run is considered to have completed unsuccessfully if it ends with an `INTERNAL_ERROR` `life_cycle_state` or a `FAILED`, or `TIMED_OUT` result_state. If this is not specified on job creation, reset, or update the list is empty, and notifications are not sent.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "on_start": { "description": "A list of email addresses to be notified when a run begins. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "on_streaming_backlog_exceeded": { "description": "A list of email addresses to notify when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "on_success": { "description": "A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -6054,11 +6054,11 @@ "environment_key": { "description": "The key of an environment. It has to be unique within a job.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "spec": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Environment", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -6080,12 +6080,12 @@ "no_alert_for_canceled_runs": { "description": "If true, do not send notifications to recipients specified in `on_failure` if the run is canceled.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "no_alert_for_skipped_runs": { "description": "If true, do not send notifications to recipients specified in `on_failure` if the run is skipped.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -6104,12 +6104,12 @@ "default": { "description": "Default value of the parameter.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "name": { "description": "The name of the defined parameter. May only contain alphanumeric characters, `_`, `-`, and `.`", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -6156,17 +6156,17 @@ "dirty_state": { "description": "Dirty state indicates the job is not fully synced with the job specification in the remote repository.\n\nPossible values are:\n* `NOT_SYNCED`: The job is not yet synced with the remote job specification. Import the remote job specification from UI to make the job fully synced.\n* `DISCONNECTED`: The job is temporary disconnected from the remote job specification and is allowed for live edit. Import the remote job specification again from UI to make the job fully synced.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobSourceDirtyState", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "import_from_git_branch": { "description": "Name of the branch which the job is imported from.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "job_config_path": { "description": "Path of the job YAML file that contains the job specification.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -6238,16 +6238,16 @@ "properties": { "metric": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthMetric", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "op": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthOperator", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "value": { "description": "Specifies the threshold value that the health metric should obey to satisfy the health rule.", "$ref": "#/$defs/int64", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -6271,7 +6271,7 @@ "properties": { "rules": { "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRule", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -6348,22 +6348,22 @@ "base_parameters": { "description": "Base parameters to be used for each run of this job. If the run is initiated by a call to :method:jobs/run\nNow with parameters specified, the two parameters maps are merged. If the same key is specified in\n`base_parameters` and in `run-now`, the value from `run-now` is used.\nUse [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.\n\nIf the notebook takes a parameter that is not specified in the job’s `base_parameters` or the `run-now` override parameters,\nthe default value from the notebook is used.\n\nRetrieve these parameters in a notebook using [dbutils.widgets.get](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-widgets).\n\nThe JSON representation of this field cannot exceed 1MB.", "$ref": "#/$defs/map/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "notebook_path": { "description": "The path of the notebook to be run in the Databricks workspace or remote repository.\nFor notebooks stored in the Databricks workspace, the path must be absolute and begin with a slash.\nFor notebooks stored in a remote repository, the path must be relative. This field is required.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "source": { "description": "Optional location type of the notebook. When set to `WORKSPACE`, the notebook will be retrieved from the local Databricks workspace. When set to `GIT`, the notebook will be retrieved from a Git repository\ndefined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise.\n* `WORKSPACE`: Notebook is located in Databricks workspace.\n* `GIT`: Notebook is located in cloud Git provider.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Source", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "warehouse_id": { "description": "Optional `warehouse_id` to run the notebook on a SQL warehouse. Classic SQL warehouses are NOT supported, please use serverless or pro SQL warehouses.\n\nNote that SQL warehouses only support SQL cells; if the notebook contains non-SQL cells, the run will fail.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -6416,12 +6416,12 @@ "interval": { "description": "The interval at which the trigger should run.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "unit": { "description": "The unit of time for the interval.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PeriodicTriggerConfigurationTimeUnit", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -6460,7 +6460,7 @@ "full_refresh": { "description": "If true, triggers a full refresh on the delta live table.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -6479,12 +6479,12 @@ "full_refresh": { "description": "If true, triggers a full refresh on the delta live table.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "pipeline_id": { "description": "The full name of the pipeline task to execute.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -6618,22 +6618,22 @@ "entry_point": { "description": "Named entry point to use, if it does not exist in the metadata of the package it executes the function from the package directly using `$packageName.$entryPoint()`", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "named_parameters": { "description": "Command-line parameters passed to Python wheel task in the form of `[\"--name=task\", \"--data=dbfs:/path/to/data.json\"]`. Leave it empty if `parameters` is not null.", "$ref": "#/$defs/map/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "package_name": { "description": "Name of the package to execute", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "parameters": { "description": "Command-line parameters passed to Python wheel task. Leave it empty if `named_parameters` is not null.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -6656,7 +6656,7 @@ "enabled": { "description": "If true, enable queueing for the job. This is a required field.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -6701,7 +6701,7 @@ "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, - "since_version": "v0.228.1", + "since_version": "v0.229.0", "deprecated": true }, "jar_params": { @@ -6710,18 +6710,18 @@ "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, - "since_version": "v0.228.1", + "since_version": "v0.229.0", "deprecated": true }, "job_id": { "description": "ID of the job to trigger.", "$ref": "#/$defs/int64", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "job_parameters": { "description": "Job-level parameters used to trigger the job.", "$ref": "#/$defs/map/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "notebook_params": { "description": "A map from keys to values for jobs with notebook task, for example `\"notebook_params\": {\"name\": \"john doe\", \"age\": \"35\"}`.\nThe map is passed to the notebook and is accessible through the [dbutils.widgets.get](https://docs.databricks.com/dev-tools/databricks-utils.html) function.\n\nIf not specified upon `run-now`, the triggered run uses the job’s base parameters.\n\nnotebook_params cannot be specified in conjunction with jar_params.\n\n⚠ **Deprecation note** Use [job parameters](https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown) to pass information down to tasks.\n\nThe JSON representation of this field (for example `{\"notebook_params\":{\"name\":\"john doe\",\"age\":\"35\"}}`) cannot exceed 10,000 bytes.", @@ -6729,20 +6729,20 @@ "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, - "since_version": "v0.228.1", + "since_version": "v0.229.0", "deprecated": true }, "pipeline_params": { "description": "Controls whether the pipeline should perform a full refresh", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PipelineParams", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "python_named_params": { "$ref": "#/$defs/map/string", "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, - "since_version": "v0.228.1", + "since_version": "v0.229.0", "deprecated": true }, "python_params": { @@ -6751,7 +6751,7 @@ "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, - "since_version": "v0.228.1", + "since_version": "v0.229.0", "deprecated": true }, "spark_submit_params": { @@ -6760,7 +6760,7 @@ "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, - "since_version": "v0.228.1", + "since_version": "v0.229.0", "deprecated": true }, "sql_params": { @@ -6769,7 +6769,7 @@ "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, - "since_version": "v0.228.1", + "since_version": "v0.229.0", "deprecated": true } }, @@ -6809,18 +6809,18 @@ "description": "Deprecated since 04/2016. For classic compute, provide a `jar` through the `libraries` field instead. For serverless compute, provide a `jar` though the `java_dependencies` field inside the `environments` list.\n\nSee the examples of classic and serverless compute usage at the top of the page.", "$ref": "#/$defs/string", "deprecationMessage": "This field is deprecated", - "since_version": "v0.228.1", + "since_version": "v0.229.0", "deprecated": true }, "main_class_name": { "description": "The full name of the class containing the main method to be executed. This class must be contained in a JAR provided as a library.\n\nThe code must use `SparkContext.getOrCreate` to obtain a Spark context; otherwise, runs of the job fail.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "parameters": { "description": "Parameters passed to the main method.\n\nUse [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "run_as_repl": { "description": "Deprecated. A value of `false` is no longer supported.", @@ -6846,17 +6846,17 @@ "parameters": { "description": "Command line parameters passed to the Python file.\n\nUse [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "python_file": { "description": "The Python file to be executed. Cloud file URIs (such as dbfs:/, s3:/, adls:/, gcs:/) and workspace paths are supported. For python files stored in the Databricks workspace, the path must be absolute and begin with `/`. For files stored in a remote repository, the path must be relative. This field is required.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "source": { "description": "Optional location type of the Python file. When set to `WORKSPACE` or not specified, the file will be retrieved from the local\nDatabricks workspace or cloud location (if the `python_file` has a URI format). When set to `GIT`,\nthe Python file will be retrieved from a Git repository defined in `git_source`.\n\n* `WORKSPACE`: The Python file is located in a Databricks workspace or at a cloud filesystem URI.\n* `GIT`: The Python file is located in a remote Git repository.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Source", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -6878,7 +6878,7 @@ "parameters": { "description": "Command-line parameters passed to spark submit.\n\nUse [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -6897,32 +6897,32 @@ "alert": { "description": "If alert, indicates that this job must refresh a SQL alert.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskAlert", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "dashboard": { "description": "If dashboard, indicates that this job must refresh a SQL dashboard.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskDashboard", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "file": { "description": "If file, indicates that this job runs a SQL file in a remote Git repository.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskFile", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "parameters": { "description": "Parameters to be used for each run of this job. The SQL alert task does not support custom parameters.", "$ref": "#/$defs/map/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "query": { "description": "If query, indicates that this job must execute a SQL query.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskQuery", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "warehouse_id": { "description": "The canonical identifier of the SQL warehouse. Recommended to use with serverless or pro SQL warehouses. Classic SQL warehouses are only supported for SQL alert, dashboard and query tasks and are limited to scheduled single-task jobs.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -6944,17 +6944,17 @@ "alert_id": { "description": "The canonical identifier of the SQL alert.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "pause_subscriptions": { "description": "If true, the alert notifications are not sent to subscribers.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "subscriptions": { "description": "If specified, alert notifications are sent to subscribers.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskSubscription", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -6976,22 +6976,22 @@ "custom_subject": { "description": "Subject of the email sent to subscribers of this task.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "dashboard_id": { "description": "The canonical identifier of the SQL dashboard.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "pause_subscriptions": { "description": "If true, the dashboard snapshot is not taken, and emails are not sent to subscribers.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "subscriptions": { "description": "If specified, dashboard snapshots are sent to subscriptions.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskSubscription", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -7013,12 +7013,12 @@ "path": { "description": "Path of the SQL file. Must be relative if the source is a remote Git repository and absolute for workspace paths.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "source": { "description": "Optional location type of the SQL file. When set to `WORKSPACE`, the SQL file will be retrieved\nfrom the local Databricks workspace. When set to `GIT`, the SQL file will be retrieved from a Git repository\ndefined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise.\n\n* `WORKSPACE`: SQL file is located in Databricks workspace.\n* `GIT`: SQL file is located in cloud Git provider.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Source", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -7040,7 +7040,7 @@ "query_id": { "description": "The canonical identifier of the SQL query.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -7062,12 +7062,12 @@ "destination_id": { "description": "The canonical identifier of the destination to receive email notification. This parameter is mutually exclusive with user_name. You cannot set both destination_id and user_name for subscription notifications.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "user_name": { "description": "The user name to receive the subscription email. This parameter is mutually exclusive with destination_id. You cannot set both destination_id and user_name for subscription notifications.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -7152,22 +7152,22 @@ "condition": { "description": "The table(s) condition based on which to trigger a job run.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Condition", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "min_time_between_triggers_seconds": { "description": "If set, the trigger starts a run only after the specified amount of time has passed since\nthe last time the trigger fired. The minimum allowed value is 60 seconds.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "table_names": { "description": "A list of tables to monitor for changes. The table name must be in the format `catalog_name.schema_name.table_name`.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "wait_after_last_change_seconds": { "description": "If set, the trigger starts a run only after no table updates have occurred for the specified time\nand can be used to wait for a series of table updates before triggering a run. The\nminimum allowed value is 60 seconds.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -7194,7 +7194,7 @@ "condition_task": { "description": "The task evaluates a condition that can be used to control the execution of other tasks when the `condition_task` field is present.\nThe condition task does not require a cluster to execute and does not support retries or notifications.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ConditionTask", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "dashboard_task": { "description": "The task refreshes a dashboard and sends a snapshot to subscribers.", @@ -7219,22 +7219,22 @@ "dbt_task": { "description": "The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.DbtTask", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "depends_on": { "description": "An optional array of objects specifying the dependency graph of the task. All tasks specified in this field must complete before executing this task. The task will run only if the `run_if` condition is true.\nThe key is `task_key`, and the value is the name assigned to the dependent task.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.TaskDependency", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "description": { "description": "An optional description for this task.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "disable_auto_optimization": { "description": "An option to disable auto optimization in serverless", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "disabled": { "description": "An optional flag to disable the task. If set to true, the task will not run even if it is part of a job.", @@ -7246,22 +7246,22 @@ "email_notifications": { "description": "An optional set of email addresses that is notified when runs of this task begin or complete as well as when this task is deleted. The default behavior is to not send any emails.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TaskEmailNotifications", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "environment_key": { "description": "The key that references an environment spec in a job. This field is required for Python script, Python wheel and dbt tasks when using serverless compute.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "existing_cluster_id": { "description": "If existing_cluster_id, the ID of an existing cluster that is used for all runs.\nWhen running jobs or tasks on an existing cluster, you may need to manually restart\nthe cluster if it stops responding. We suggest running jobs and tasks on new clusters for\ngreater reliability", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "for_each_task": { "description": "The task executes a nested task for every input provided when the `for_each_task` field is present.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ForEachTask", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "gen_ai_compute_task": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.GenAiComputeTask", @@ -7271,47 +7271,47 @@ }, "health": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRules", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "job_cluster_key": { "description": "If job_cluster_key, this task is executed reusing the cluster specified in `job.settings.job_clusters`.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "libraries": { "description": "An optional list of libraries to be installed on the cluster.\nThe default value is an empty list.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/compute.Library", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "max_retries": { "description": "An optional maximum number of times to retry an unsuccessful run. A run is considered to be unsuccessful if it completes with the `FAILED` result_state or `INTERNAL_ERROR` `life_cycle_state`. The value `-1` means to retry indefinitely and the value `0` means to never retry.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "min_retry_interval_millis": { "description": "An optional minimal interval in milliseconds between the start of the failed run and the subsequent retry run. The default behavior is that unsuccessful runs are immediately retried.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "new_cluster": { "description": "If new_cluster, a description of a new cluster that is created for each run.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClusterSpec", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "notebook_task": { "description": "The task runs a notebook when the `notebook_task` field is present.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.NotebookTask", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "notification_settings": { "description": "Optional notification settings that are used when sending notifications to each of the `email_notifications` and `webhook_notifications` for this task.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TaskNotificationSettings", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "pipeline_task": { "description": "The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines configured to use triggered more are supported.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PipelineTask", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "power_bi_task": { "description": "The task triggers a Power BI semantic model update when the `power_bi_task` field is present.", @@ -7321,59 +7321,59 @@ "python_wheel_task": { "description": "The task runs a Python wheel when the `python_wheel_task` field is present.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PythonWheelTask", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "retry_on_timeout": { "description": "An optional policy to specify whether to retry a job when it times out. The default behavior\nis to not retry on timeout.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "run_if": { "description": "An optional value specifying the condition determining whether the task is run once its dependencies have been completed.\n\n* `ALL_SUCCESS`: All dependencies have executed and succeeded\n* `AT_LEAST_ONE_SUCCESS`: At least one dependency has succeeded\n* `NONE_FAILED`: None of the dependencies have failed and at least one was executed\n* `ALL_DONE`: All dependencies have been completed\n* `AT_LEAST_ONE_FAILED`: At least one dependency failed\n* `ALL_FAILED`: ALl dependencies have failed", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.RunIf", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "run_job_task": { "description": "The task triggers another job when the `run_job_task` field is present.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.RunJobTask", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "spark_jar_task": { "description": "The task runs a JAR when the `spark_jar_task` field is present.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SparkJarTask", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "spark_python_task": { "description": "The task runs a Python file when the `spark_python_task` field is present.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SparkPythonTask", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "spark_submit_task": { "description": "(Legacy) The task runs the spark-submit script when the spark_submit_task field is present. Databricks recommends using the spark_jar_task instead; see [Spark Submit task for jobs](/jobs/spark-submit).", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SparkSubmitTask", "deprecationMessage": "This field is deprecated", - "since_version": "v0.228.1", + "since_version": "v0.229.0", "deprecated": true }, "sql_task": { "description": "The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when the `sql_task` field is present.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTask", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "task_key": { "description": "A unique name for the task. This field is used to refer to this task from other tasks.\nThis field is required and must be unique within its parent job.\nOn Update or Reset, this field is used to reference the tasks to be updated or reset.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "timeout_seconds": { "description": "An optional timeout applied to each run of this job task. A value of `0` means no timeout.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "webhook_notifications": { "description": "A collection of system notification IDs to notify when runs of this task begin or complete. The default behavior is to not send any system notifications.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.WebhookNotifications", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -7395,12 +7395,12 @@ "outcome": { "description": "Can only be specified on condition task dependencies. The outcome of the dependent task that must be met for this task to run.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "task_key": { "description": "The name of the task this task depends on.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -7423,33 +7423,33 @@ "description": "If true, do not send email to recipients specified in `on_failure` if the run is skipped.\nThis field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.", "$ref": "#/$defs/bool", "deprecationMessage": "This field is deprecated", - "since_version": "v0.228.1", + "since_version": "v0.229.0", "deprecated": true }, "on_duration_warning_threshold_exceeded": { "description": "A list of email addresses to be notified when the duration of a run exceeds the threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. If no rule for the `RUN_DURATION_SECONDS` metric is specified in the `health` field for the job, notifications are not sent.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "on_failure": { "description": "A list of email addresses to be notified when a run unsuccessfully completes. A run is considered to have completed unsuccessfully if it ends with an `INTERNAL_ERROR` `life_cycle_state` or a `FAILED`, or `TIMED_OUT` result_state. If this is not specified on job creation, reset, or update the list is empty, and notifications are not sent.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "on_start": { "description": "A list of email addresses to be notified when a run begins. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "on_streaming_backlog_exceeded": { "description": "A list of email addresses to notify when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "on_success": { "description": "A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -7468,17 +7468,17 @@ "alert_on_last_attempt": { "description": "If true, do not send notifications to recipients specified in `on_start` for the retried runs and do not send notifications to recipients specified in `on_failure` until the last retry of the run.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "no_alert_for_canceled_runs": { "description": "If true, do not send notifications to recipients specified in `on_failure` if the run is canceled.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "no_alert_for_skipped_runs": { "description": "If true, do not send notifications to recipients specified in `on_failure` if the run is skipped.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -7513,7 +7513,7 @@ "file_arrival": { "description": "File arrival trigger settings.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.FileArrivalTriggerConfiguration", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "model": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ModelTriggerConfiguration", @@ -7524,16 +7524,16 @@ "pause_status": { "description": "Whether this trigger is paused or not.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PauseStatus", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "periodic": { "description": "Periodic trigger settings.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PeriodicTriggerConfiguration", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "table_update": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TableUpdateTriggerConfiguration", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -7551,7 +7551,7 @@ "properties": { "id": { "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -7573,27 +7573,27 @@ "on_duration_warning_threshold_exceeded": { "description": "An optional list of system notification IDs to call when the duration of a run exceeds the threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. A maximum of 3 destinations can be specified for the `on_duration_warning_threshold_exceeded` property.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Webhook", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "on_failure": { "description": "An optional list of system notification IDs to call when the run fails. A maximum of 3 destinations can be specified for the `on_failure` property.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Webhook", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "on_start": { "description": "An optional list of system notification IDs to call when the run starts. A maximum of 3 destinations can be specified for the `on_start` property.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Webhook", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "on_streaming_backlog_exceeded": { "description": "An optional list of system notification IDs to call when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.\nA maximum of 3 destinations can be specified for the `on_streaming_backlog_exceeded` property.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Webhook", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "on_success": { "description": "An optional list of system notification IDs to call when the run completes successfully. A maximum of 3 destinations can be specified for the `on_success` property.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Webhook", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -7613,12 +7613,12 @@ "key": { "description": "The tag key.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "value": { "description": "The tag value.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -7638,12 +7638,12 @@ "key": { "description": "The tag key.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "value": { "description": "The tag value.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -7682,11 +7682,11 @@ "properties": { "quartz_cron_schedule": { "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "timezone_id": { "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -7771,7 +7771,7 @@ "path": { "description": "The absolute path of the source code.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -7790,12 +7790,12 @@ "exclude": { "description": "Paths to exclude.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "include": { "description": "Paths to include.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -7819,12 +7819,12 @@ "schema": { "description": "Select all tables from a specific source schema.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.SchemaSpec", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "table": { "description": "Select a specific source table.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpec", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -7844,7 +7844,7 @@ "description": "[Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.", "$ref": "#/$defs/string", "deprecationMessage": "This field is deprecated", - "since_version": "v0.228.1", + "since_version": "v0.229.0", "deprecated": true }, "connection_name": { @@ -7862,17 +7862,17 @@ "gateway_storage_catalog": { "description": "Required, Immutable. The name of the catalog for the gateway pipeline's storage location.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "gateway_storage_name": { "description": "Optional. The Unity Catalog-compatible name for the gateway storage location.\nThis is the destination to use for the data that is extracted by the gateway.\nSpark Declarative Pipelines system will automatically create the storage location under the catalog and schema.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "gateway_storage_schema": { "description": "Required, Immutable. The name of the schema for the gateway pipelines's storage location.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -7896,7 +7896,7 @@ "connection_name": { "description": "Immutable. The Unity Catalog connection that this ingestion pipeline uses to communicate with the source. This is used with connectors for applications like Salesforce, Workday, and so on.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "ingest_from_uc_foreign_catalog": { "description": "Immutable. If set to true, the pipeline will ingest tables from the\nUC foreign catalogs directly without the need to specify a UC connection or ingestion gateway.\nThe `source_catalog` fields in objects of IngestionConfig are interpreted as\nthe UC foreign catalogs to ingest from.", @@ -7908,7 +7908,7 @@ "ingestion_gateway_id": { "description": "Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate with the source database. This is used with connectors to databases like SQL Server.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "netsuite_jar_path": { "$ref": "#/$defs/string", @@ -7919,7 +7919,7 @@ "objects": { "description": "Required. Settings specifying tables to replicate and the destination for the replicated tables.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionConfig", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "source_configurations": { "description": "Top-level source configurations", @@ -7929,7 +7929,7 @@ "table_configuration": { "description": "Configuration settings to control the ingestion of tables. These settings are applied to all tables in the pipeline.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -8081,7 +8081,7 @@ "path": { "description": "The absolute path of the source code.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -8100,12 +8100,12 @@ "alerts": { "description": "A list of alerts that trigger the sending of notifications to the configured\ndestinations. The supported alerts are:\n\n* `on-update-success`: A pipeline update completes successfully.\n* `on-update-failure`: Each time a pipeline update fails.\n* `on-update-fatal-failure`: A pipeline update fails with a non-retryable (fatal) error.\n* `on-flow-failure`: A single data flow fails.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "email_recipients": { "description": "A list of email addresses notified when a configured alert is triggered.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -8143,97 +8143,97 @@ "apply_policy_default_values": { "description": "Note: This field won't be persisted. Only API users will check this field.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "autoscale": { "description": "Parameters needed in order to automatically scale clusters up and down based on load.\nNote: autoscaling works best with DB runtime versions 3.0 or later.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineClusterAutoscale", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "aws_attributes": { "description": "Attributes related to clusters running on Amazon Web Services.\nIf not specified at cluster creation, a set of default values will be used.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AwsAttributes", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "azure_attributes": { "description": "Attributes related to clusters running on Microsoft Azure.\nIf not specified at cluster creation, a set of default values will be used.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AzureAttributes", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "cluster_log_conf": { "description": "The configuration for delivering spark logs to a long-term storage destination.\nOnly dbfs destinations are supported. Only one destination can be specified\nfor one cluster. If the conf is given, the logs will be delivered to the destination every\n`5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while\nthe destination of executor logs is `$destination/$clusterId/executor`.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClusterLogConf", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "custom_tags": { "description": "Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS\ninstances and EBS volumes) with these tags in addition to `default_tags`. Notes:\n\n- Currently, Databricks allows at most 45 custom tags\n\n- Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags", "$ref": "#/$defs/map/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "driver_instance_pool_id": { "description": "The optional ID of the instance pool for the driver of the cluster belongs.\nThe pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not\nassigned.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "driver_node_type_id": { "description": "The node type of the Spark driver.\nNote that this field is optional; if unset, the driver node type will be set as the same value\nas `node_type_id` defined above.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "enable_local_disk_encryption": { "description": "Whether to enable local disk encryption for the cluster.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "gcp_attributes": { "description": "Attributes related to clusters running on Google Cloud Platform.\nIf not specified at cluster creation, a set of default values will be used.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.GcpAttributes", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "init_scripts": { "description": "The configuration for storing init scripts. Any number of destinations can be specified. The scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, init script logs are sent to `\u003cdestination\u003e/\u003ccluster-ID\u003e/init_scripts`.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/compute.InitScriptInfo", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "instance_pool_id": { "description": "The optional ID of the instance pool to which the cluster belongs.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "label": { "description": "A label for the cluster specification, either `default` to configure the default cluster, or `maintenance` to configure the maintenance cluster. This field is optional. The default value is `default`.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "node_type_id": { "description": "This field encodes, through a single value, the resources available to each of\nthe Spark nodes in this cluster. For example, the Spark nodes can be provisioned\nand optimized for memory or compute intensive workloads. A list of available node\ntypes can be retrieved by using the :method:clusters/listNodeTypes API call.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "num_workers": { "description": "Number of worker nodes that this cluster should have. A cluster has one Spark Driver\nand `num_workers` Executors for a total of `num_workers` + 1 Spark nodes.\n\nNote: When reading the properties of a cluster, this field reflects the desired number\nof workers rather than the actual current number of workers. For instance, if a cluster\nis resized from 5 to 10 workers, this field will immediately be updated to reflect\nthe target size of 10 workers, whereas the workers listed in `spark_info` will gradually\nincrease from 5 to 10 as the new nodes are provisioned.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "policy_id": { "description": "The ID of the cluster policy used to create the cluster if applicable.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "spark_conf": { "description": "An object containing a set of optional, user-specified Spark configuration key-value pairs.\nSee :method:clusters/create for more details.", "$ref": "#/$defs/map/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "spark_env_vars": { "description": "An object containing a set of optional, user-specified environment variable key-value pairs.\nPlease note that key-value pair of the form (X,Y) will be exported as is (i.e.,\n`export X='Y'`) while launching the driver and workers.\n\nIn order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending\nthem to `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all\ndefault databricks managed environmental variables are included as well.\n\nExample Spark environment variables:\n`{\"SPARK_WORKER_MEMORY\": \"28000m\", \"SPARK_LOCAL_DIRS\": \"/local_disk0\"}` or\n`{\"SPARK_DAEMON_JAVA_OPTS\": \"$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true\"}`", "$ref": "#/$defs/map/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "ssh_public_keys": { "description": "SSH public key contents that will be added to each Spark node in this cluster. The\ncorresponding private keys can be used to login with the user name `ubuntu` on port `2200`.\nUp to 10 keys can be specified.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -8252,17 +8252,17 @@ "max_workers": { "description": "The maximum number of workers to which the cluster can scale up when overloaded. `max_workers` must be strictly greater than `min_workers`.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "min_workers": { "description": "The minimum number of workers the cluster can scale down to when underutilized.\nIt is also the initial number of workers the cluster will have after creation.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "mode": { "description": "Databricks Enhanced Autoscaling optimizes cluster utilization by automatically\nallocating cluster resources based on workload volume, with minimal impact to\nthe data processing latency of your pipelines. Enhanced Autoscaling is available\nfor `updates` clusters only. The legacy autoscaling feature is used for `maintenance`\nclusters.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineClusterAutoscaleMode", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -8301,12 +8301,12 @@ "kind": { "description": "The deployment method that manages the pipeline.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.DeploymentKind", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "metadata_file_path": { "description": "The path to the file containing metadata about the deployment.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -8328,7 +8328,7 @@ "file": { "description": "The path to a file that defines a pipeline and is stored in the Databricks Repos.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.FileLibrary", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "glob": { "description": "The unified field to include source codes.\nEach entry can be a notebook path, a file path, or a folder path that ends `/**`.\nThis field cannot be used together with `notebook` or `file`.", @@ -8340,25 +8340,25 @@ "$ref": "#/$defs/string", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "maven": { "description": "Specification of a maven library to be installed.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.MavenLibrary", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "notebook": { "description": "The path to a notebook that defines a pipeline and is stored in the Databricks workspace.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.NotebookLibrary", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "whl": { "description": "URI of the whl to be installed.", "$ref": "#/$defs/string", "deprecationMessage": "This field is deprecated", - "since_version": "v0.228.1", + "since_version": "v0.229.0", "deprecated": true } }, @@ -8377,11 +8377,11 @@ "properties": { "cron": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.CronTrigger", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "manual": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.ManualTrigger", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -8566,27 +8566,27 @@ "destination_catalog": { "description": "Required. Destination catalog to store tables.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "destination_schema": { "description": "Required. Destination schema to store tables in. Tables with the same name as the source tables are created in this destination schema. The pipeline fails If a table with the same name already exists.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "source_catalog": { "description": "The source catalog name. Might be optional depending on the type of source.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "source_schema": { "description": "Required. Schema name in the source database.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "table_configuration": { "description": "Configuration settings to control the ingestion of tables. These settings are applied to all tables in this schema and override the table_configuration defined in the IngestionPipelineDefinition object.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -8654,37 +8654,37 @@ "destination_catalog": { "description": "Required. Destination catalog to store table.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "destination_schema": { "description": "Required. Destination schema to store table.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "destination_table": { "description": "Optional. Destination table name. The pipeline fails if a table with that name already exists. If not set, the source table name is used.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "source_catalog": { "description": "Source catalog name. Might be optional depending on the type of source.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "source_schema": { "description": "Schema name in the source database. Might be optional depending on the type of source.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "source_table": { "description": "Required. Table name in the source database.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "table_configuration": { "description": "Configuration settings to control the ingestion of tables. These settings override the table_configuration defined in the IngestionPipelineDefinition object and the SchemaSpec.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -8718,7 +8718,7 @@ "primary_keys": { "description": "The primary key of the table used to apply changes.", "$ref": "#/$defs/slice/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "query_based_connector_config": { "description": "Configurations that are only applicable for query-based ingestion connectors.", @@ -8732,14 +8732,14 @@ "$ref": "#/$defs/bool", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "scd_type": { "description": "The SCD type to use to ingest the table.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfigScdType", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "sequence_by": { "description": "The column names specifying the logical order of events in the source data. Spark Declarative Pipelines uses this sequencing to handle change events that arrive out of order.", @@ -8786,12 +8786,12 @@ "ai21labs_api_key": { "description": "The Databricks secret key reference for an AI21 Labs API key. If you\nprefer to paste your API key directly, see `ai21labs_api_key_plaintext`.\nYou must provide an API key using one of the following fields:\n`ai21labs_api_key` or `ai21labs_api_key_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "ai21labs_api_key_plaintext": { "description": "An AI21 Labs API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `ai21labs_api_key`. You\nmust provide an API key using one of the following fields:\n`ai21labs_api_key` or `ai21labs_api_key_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -9072,32 +9072,32 @@ "aws_access_key_id": { "description": "The Databricks secret key reference for an AWS access key ID with\npermissions to interact with Bedrock services. If you prefer to paste\nyour API key directly, see `aws_access_key_id_plaintext`. You must provide an API\nkey using one of the following fields: `aws_access_key_id` or\n`aws_access_key_id_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "aws_access_key_id_plaintext": { "description": "An AWS access key ID with permissions to interact with Bedrock services\nprovided as a plaintext string. If you prefer to reference your key using\nDatabricks Secrets, see `aws_access_key_id`. You must provide an API key\nusing one of the following fields: `aws_access_key_id` or\n`aws_access_key_id_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "aws_region": { "description": "The AWS region to use. Bedrock has to be enabled there.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "aws_secret_access_key": { "description": "The Databricks secret key reference for an AWS secret access key paired\nwith the access key ID, with permissions to interact with Bedrock\nservices. If you prefer to paste your API key directly, see\n`aws_secret_access_key_plaintext`. You must provide an API key using one\nof the following fields: `aws_secret_access_key` or\n`aws_secret_access_key_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "aws_secret_access_key_plaintext": { "description": "An AWS secret access key paired with the access key ID, with permissions\nto interact with Bedrock services provided as a plaintext string. If you\nprefer to reference your key using Databricks Secrets, see\n`aws_secret_access_key`. You must provide an API key using one of the\nfollowing fields: `aws_secret_access_key` or\n`aws_secret_access_key_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "bedrock_provider": { "description": "The underlying provider in Amazon Bedrock. Supported values (case\ninsensitive) include: Anthropic, Cohere, AI21Labs, Amazon.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfigBedrockProvider", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "instance_profile_arn": { "description": "ARN of the instance profile that the external model will use to access AWS resources.\nYou must authenticate using an instance profile or access keys.\nIf you prefer to authenticate using access keys, see `aws_access_key_id`,\n`aws_access_key_id_plaintext`, `aws_secret_access_key` and `aws_secret_access_key_plaintext`.", @@ -9142,12 +9142,12 @@ "anthropic_api_key": { "description": "The Databricks secret key reference for an Anthropic API key. If you\nprefer to paste your API key directly, see `anthropic_api_key_plaintext`.\nYou must provide an API key using one of the following fields:\n`anthropic_api_key` or `anthropic_api_key_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "anthropic_api_key_plaintext": { "description": "The Anthropic API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `anthropic_api_key`. You\nmust provide an API key using one of the following fields:\n`anthropic_api_key` or `anthropic_api_key_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -9198,22 +9198,22 @@ "catalog_name": { "description": "The name of the catalog in Unity Catalog. NOTE: On update, you cannot change the catalog name if the inference table is already enabled.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "enabled": { "description": "Indicates whether the inference table is enabled.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "schema_name": { "description": "The name of the schema in Unity Catalog. NOTE: On update, you cannot change the schema name if the inference table is already enabled.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "table_name_prefix": { "description": "The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if the inference table is already enabled.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -9256,17 +9256,17 @@ "cohere_api_base": { "description": "This is an optional field to provide a customized base URL for the Cohere\nAPI. If left unspecified, the standard Cohere base URL is used.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "cohere_api_key": { "description": "The Databricks secret key reference for a Cohere API key. If you prefer\nto paste your API key directly, see `cohere_api_key_plaintext`. You must\nprovide an API key using one of the following fields: `cohere_api_key` or\n`cohere_api_key_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "cohere_api_key_plaintext": { "description": "The Cohere API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `cohere_api_key`. You\nmust provide an API key using one of the following fields:\n`cohere_api_key` or `cohere_api_key_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -9318,17 +9318,17 @@ "databricks_api_token": { "description": "The Databricks secret key reference for a Databricks API token that\ncorresponds to a user or service principal with Can Query access to the\nmodel serving endpoint pointed to by this external model. If you prefer\nto paste your API key directly, see `databricks_api_token_plaintext`. You\nmust provide an API key using one of the following fields:\n`databricks_api_token` or `databricks_api_token_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "databricks_api_token_plaintext": { "description": "The Databricks API token that corresponds to a user or service principal\nwith Can Query access to the model serving endpoint pointed to by this\nexternal model provided as a plaintext string. If you prefer to reference\nyour key using Databricks Secrets, see `databricks_api_token`. You must\nprovide an API key using one of the following fields:\n`databricks_api_token` or `databricks_api_token_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "databricks_workspace_url": { "description": "The URL of the Databricks workspace containing the model serving endpoint\npointed to by this external model.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -9374,22 +9374,22 @@ "auto_capture_config": { "description": "Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.\nNote: this field is deprecated for creating new provisioned throughput endpoints,\nor updating existing provisioned throughput endpoints that never have inference table configured;\nin these cases please use AI Gateway to manage inference tables.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AutoCaptureConfigInput", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "served_entities": { "description": "The list of served entities under the serving endpoint config.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "served_models": { "description": "(Deprecated, use served_entities instead) The list of served models under the serving endpoint config.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.ServedModelInput", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "traffic_config": { "description": "The traffic configuration associated with the serving endpoint config.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.TrafficConfig", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -9408,12 +9408,12 @@ "key": { "description": "Key field for a serving endpoint tag.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "value": { "description": "Optional value field for a serving endpoint tag.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -9435,22 +9435,22 @@ "ai21labs_config": { "description": "AI21Labs Config. Only required if the provider is 'ai21labs'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.Ai21LabsConfig", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "amazon_bedrock_config": { "description": "Amazon Bedrock Config. Only required if the provider is 'amazon-bedrock'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfig", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "anthropic_config": { "description": "Anthropic Config. Only required if the provider is 'anthropic'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AnthropicConfig", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "cohere_config": { "description": "Cohere Config. Only required if the provider is 'cohere'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.CohereConfig", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "custom_provider_config": { "description": "Custom Provider Config. Only required if the provider is 'custom'.", @@ -9460,37 +9460,37 @@ "databricks_model_serving_config": { "description": "Databricks Model Serving Config. Only required if the provider is 'databricks-model-serving'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.DatabricksModelServingConfig", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "google_cloud_vertex_ai_config": { "description": "Google Cloud Vertex AI Config. Only required if the provider is 'google-cloud-vertex-ai'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.GoogleCloudVertexAiConfig", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "name": { "description": "The name of the external model.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "openai_config": { "description": "OpenAI Config. Only required if the provider is 'openai'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.OpenAiConfig", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "palm_config": { "description": "PaLM Config. Only required if the provider is 'palm'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.PaLmConfig", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "provider": { "description": "The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', 'palm', and 'custom'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ExternalModelProvider", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "task": { "description": "The task type of the external model.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -9558,22 +9558,22 @@ "private_key": { "description": "The Databricks secret key reference for a private key for the service\naccount which has access to the Google Cloud Vertex AI Service. See [Best\npractices for managing service account keys]. If you prefer to paste your\nAPI key directly, see `private_key_plaintext`. You must provide an API\nkey using one of the following fields: `private_key` or\n`private_key_plaintext`\n\n[Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "private_key_plaintext": { "description": "The private key for the service account which has access to the Google\nCloud Vertex AI Service provided as a plaintext secret. See [Best\npractices for managing service account keys]. If you prefer to reference\nyour key using Databricks Secrets, see `private_key`. You must provide an\nAPI key using one of the following fields: `private_key` or\n`private_key_plaintext`.\n\n[Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "project_id": { "description": "This is the Google Cloud project id that the service account is\nassociated with.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "region": { "description": "This is the region for the Google Cloud Vertex AI Service. See [supported\nregions] for more details. Some models are only available in specific\nregions.\n\n[supported regions]: https://cloud.google.com/vertex-ai/docs/general/locations", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -9597,57 +9597,57 @@ "microsoft_entra_client_id": { "description": "This field is only required for Azure AD OpenAI and is the Microsoft\nEntra Client ID.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "microsoft_entra_client_secret": { "description": "The Databricks secret key reference for a client secret used for\nMicrosoft Entra ID authentication. If you prefer to paste your client\nsecret directly, see `microsoft_entra_client_secret_plaintext`. You must\nprovide an API key using one of the following fields:\n`microsoft_entra_client_secret` or\n`microsoft_entra_client_secret_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "microsoft_entra_client_secret_plaintext": { "description": "The client secret used for Microsoft Entra ID authentication provided as\na plaintext string. If you prefer to reference your key using Databricks\nSecrets, see `microsoft_entra_client_secret`. You must provide an API key\nusing one of the following fields: `microsoft_entra_client_secret` or\n`microsoft_entra_client_secret_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "microsoft_entra_tenant_id": { "description": "This field is only required for Azure AD OpenAI and is the Microsoft\nEntra Tenant ID.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "openai_api_base": { "description": "This is a field to provide a customized base URl for the OpenAI API. For\nAzure OpenAI, this field is required, and is the base URL for the Azure\nOpenAI API service provided by Azure. For other OpenAI API types, this\nfield is optional, and if left unspecified, the standard OpenAI base URL\nis used.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "openai_api_key": { "description": "The Databricks secret key reference for an OpenAI API key using the\nOpenAI or Azure service. If you prefer to paste your API key directly,\nsee `openai_api_key_plaintext`. You must provide an API key using one of\nthe following fields: `openai_api_key` or `openai_api_key_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "openai_api_key_plaintext": { "description": "The OpenAI API key using the OpenAI or Azure service provided as a\nplaintext string. If you prefer to reference your key using Databricks\nSecrets, see `openai_api_key`. You must provide an API key using one of\nthe following fields: `openai_api_key` or `openai_api_key_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "openai_api_type": { "description": "This is an optional field to specify the type of OpenAI API to use. For\nAzure OpenAI, this field is required, and adjust this parameter to\nrepresent the preferred security access validation protocol. For access\ntoken validation, use azure. For authentication using Azure Active\nDirectory (Azure AD) use, azuread.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "openai_api_version": { "description": "This is an optional field to specify the OpenAI API version. For Azure\nOpenAI, this field is required, and is the version of the Azure OpenAI\nservice to utilize, specified by a date.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "openai_deployment_name": { "description": "This field is only required for Azure OpenAI and is the name of the\ndeployment resource for the Azure OpenAI service.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "openai_organization": { "description": "This is an optional field to specify the organization in OpenAI or Azure\nOpenAI.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -9666,12 +9666,12 @@ "palm_api_key": { "description": "The Databricks secret key reference for a PaLM API key. If you prefer to\npaste your API key directly, see `palm_api_key_plaintext`. You must\nprovide an API key using one of the following fields: `palm_api_key` or\n`palm_api_key_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "palm_api_key_plaintext": { "description": "The PaLM API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `palm_api_key`. You must\nprovide an API key using one of the following fields: `palm_api_key` or\n`palm_api_key_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -9690,17 +9690,17 @@ "calls": { "description": "Used to specify how many calls are allowed for a key within the renewal_period.", "$ref": "#/$defs/int64", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "key": { "description": "Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.RateLimitKey", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "renewal_period": { "description": "Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.RateLimitRenewalPeriod", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -9756,12 +9756,12 @@ "served_model_name": { "description": "The name of the served model this route configures traffic for.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "traffic_percentage": { "description": "The percentage of endpoint traffic to send to this route. It must be an integer between 0 and 100 inclusive.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -9783,26 +9783,26 @@ "entity_name": { "description": "The name of the entity to be served. The entity may be a model in the Databricks Model Registry, a model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC object, the full name of the object should be given in the form of **catalog_name.schema_name.model_name**.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "entity_version": { "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "environment_vars": { "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`", "$ref": "#/$defs/map/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "external_model": { "description": "The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled) can be specified with the latter set being used for custom model serving for a Databricks registered model. For an existing endpoint with external_model, it cannot be updated to an endpoint without external_model. If the endpoint is created without external_model, users cannot update it to add external_model later. The task type of all external models within an endpoint must be the same.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ExternalModel", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "instance_profile_arn": { "description": "ARN of the instance profile that the served entity uses to access AWS resources.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "max_provisioned_concurrency": { "description": "The maximum provisioned concurrency that the endpoint can scale up to. Do not use if workload_size is specified.", @@ -9812,7 +9812,7 @@ "max_provisioned_throughput": { "description": "The maximum tokens per second that the endpoint can scale up to.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "min_provisioned_concurrency": { "description": "The minimum provisioned concurrency that the endpoint can scale down to. Do not use if workload_size is specified.", @@ -9822,12 +9822,12 @@ "min_provisioned_throughput": { "description": "The minimum tokens per second that the endpoint can scale down to.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "name": { "description": "The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "provisioned_model_units": { "description": "The number of model units provisioned.", @@ -9837,17 +9837,17 @@ "scale_to_zero_enabled": { "description": "Whether the compute resources for the served entity should scale down to zero.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "workload_size": { "description": "The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency). Additional custom workload sizes can also be used when available in the workspace. If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0. Do not use if min_provisioned_concurrency and max_provisioned_concurrency are specified.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "workload_type": { "description": "The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is \"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types](https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ServingModelWorkloadType", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false @@ -9866,12 +9866,12 @@ "environment_vars": { "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`", "$ref": "#/$defs/map/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "instance_profile_arn": { "description": "ARN of the instance profile that the served entity uses to access AWS resources.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "max_provisioned_concurrency": { "description": "The maximum provisioned concurrency that the endpoint can scale up to. Do not use if workload_size is specified.", @@ -9881,7 +9881,7 @@ "max_provisioned_throughput": { "description": "The maximum tokens per second that the endpoint can scale up to.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "min_provisioned_concurrency": { "description": "The minimum provisioned concurrency that the endpoint can scale down to. Do not use if workload_size is specified.", @@ -9891,20 +9891,20 @@ "min_provisioned_throughput": { "description": "The minimum tokens per second that the endpoint can scale down to.", "$ref": "#/$defs/int", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "model_name": { "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "model_version": { "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "name": { "description": "The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "provisioned_model_units": { "description": "The number of model units provisioned.", @@ -9914,17 +9914,17 @@ "scale_to_zero_enabled": { "description": "Whether the compute resources for the served entity should scale down to zero.", "$ref": "#/$defs/bool", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "workload_size": { "description": "The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency). Additional custom workload sizes can also be used when available in the workspace. If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0. Do not use if min_provisioned_concurrency and max_provisioned_concurrency are specified.", "$ref": "#/$defs/string", - "since_version": "v0.228.1" + "since_version": "v0.229.0" }, "workload_type": { "description": "The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is \"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types](https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ServedModelInputWorkloadType", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false, @@ -9986,7 +9986,7 @@ "routes": { "description": "The list of routes that define traffic to each served entity.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.Route", - "since_version": "v0.228.1" + "since_version": "v0.229.0" } }, "additionalProperties": false From 435fc660cbb2a6299bf48965f6db909e2f3b3e9c Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Wed, 14 Jan 2026 11:52:02 +0100 Subject: [PATCH 7/8] fix unit test --- bundle/internal/schema/annotations_openapi_overrides.yml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/bundle/internal/schema/annotations_openapi_overrides.yml b/bundle/internal/schema/annotations_openapi_overrides.yml index 61ab17d0c6..17edad1c4e 100644 --- a/bundle/internal/schema/annotations_openapi_overrides.yml +++ b/bundle/internal/schema/annotations_openapi_overrides.yml @@ -654,10 +654,6 @@ github.com/databricks/cli/bundle/config/resources.SqlWarehousePermissionLevel: CAN_MONITOR - |- CAN_VIEW -github.com/databricks/cli/bundle/config/resources.SyncedDatabaseTable: - "lifecycle": - "description": |- - PLACEHOLDER github.com/databricks/cli/bundle/config/resources.Volume: "_": "markdown_description": |- @@ -813,7 +809,6 @@ github.com/databricks/databricks-sdk-go/service/apps.AppResourceExperiment: "permission": "description": |- PLACEHOLDER -github.com/databricks/databricks-sdk-go/service/apps.AppResourceGenieSpace: github.com/databricks/databricks-sdk-go/service/apps.AppResourceGenieSpace: "name": "description": |- From a933a29cd9c433dbe0536e54ba57c812810530cb Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Wed, 14 Jan 2026 13:53:18 +0100 Subject: [PATCH 8/8] change to camel case --- .github/workflows/push.yml | 1 + bundle/internal/annotation/descriptor.go | 2 +- bundle/internal/schema/annotations.yml | 700 +++--- .../schema/annotations_openapi_overrides.yml | 1308 +++++------ bundle/schema/jsonschema.json | 1996 ++++++++--------- libs/jsonschema/extension.go | 2 +- 6 files changed, 2005 insertions(+), 2004 deletions(-) diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index 37f2a8fee7..fb2d52e16c 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -318,6 +318,7 @@ jobs: a.addKeyword('deprecationMessage'); a.addKeyword('doNotSuggest'); a.addKeyword('markdownDescription'); + a.addKeyword('sinceVersion'); a.addKeyword('x-databricks-preview'); }" >> keywords.js diff --git a/bundle/internal/annotation/descriptor.go b/bundle/internal/annotation/descriptor.go index 832cc80646..44dc728410 100644 --- a/bundle/internal/annotation/descriptor.go +++ b/bundle/internal/annotation/descriptor.go @@ -10,7 +10,7 @@ type Descriptor struct { DeprecationMessage string `json:"deprecation_message,omitempty"` Preview string `json:"x-databricks-preview,omitempty"` OutputOnly *bool `json:"x-databricks-field-behaviors_output_only,omitempty"` - SinceVersion string `json:"since_version,omitempty"` + SinceVersion string `json:"sinceVersion,omitempty"` } const Placeholder = "PLACEHOLDER" diff --git a/bundle/internal/schema/annotations.yml b/bundle/internal/schema/annotations.yml index 3eded1977a..e4d029479d 100644 --- a/bundle/internal/schema/annotations.yml +++ b/bundle/internal/schema/annotations.yml @@ -2,40 +2,40 @@ github.com/databricks/cli/bundle/config.Artifact: "build": "description": |- An optional set of build commands to run locally before deployment. - "since_version": |- + "sinceVersion": |- v0.229.0 "dynamic_version": "description": |- Whether to patch the wheel version dynamically based on the timestamp of the whl file. If this is set to `true`, new code can be deployed without having to update the version in `setup.py` or `pyproject.toml`. This setting is only valid when `type` is set to `whl`. See [\_](/dev-tools/bundles/settings.md#bundle-syntax-mappings-artifacts). - "since_version": |- + "sinceVersion": |- v0.245.0 "executable": "description": |- The executable type. Valid values are `bash`, `sh`, and `cmd`. - "since_version": |- + "sinceVersion": |- v0.229.0 "files": "description": |- The relative or absolute path to the built artifact files. - "since_version": |- + "sinceVersion": |- v0.229.0 "path": "description": |- The local path of the directory for the artifact. - "since_version": |- + "sinceVersion": |- v0.229.0 "type": "description": |- Required if the artifact is a Python wheel. The type of the artifact. Valid values are `whl` and `jar`. "markdown_description": |- Required if the artifact is a Python wheel. The type of the artifact. Valid values are `whl` and `jar`. - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/cli/bundle/config.ArtifactFile: "source": "description": |- Required. The artifact source file. - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/cli/bundle/config.Bundle: "cluster_id": @@ -43,93 +43,93 @@ github.com/databricks/cli/bundle/config.Bundle: The ID of a cluster to use to run the bundle. "markdown_description": |- The ID of a cluster to use to run the bundle. See [\_](/dev-tools/bundles/settings.md#cluster_id). - "since_version": |- + "sinceVersion": |- v0.229.0 "compute_id": "description": |- Deprecated. The ID of the compute to use to run the bundle. - "since_version": |- + "sinceVersion": |- v0.229.0 "databricks_cli_version": "description": |- The Databricks CLI version to use for the bundle. "markdown_description": |- The Databricks CLI version to use for the bundle. See [\_](/dev-tools/bundles/settings.md#databricks_cli_version). - "since_version": |- + "sinceVersion": |- v0.229.0 "deployment": "description": |- The definition of the bundle deployment "markdown_description": |- The definition of the bundle deployment. For supported attributes see [\_](/dev-tools/bundles/deployment-modes.md). - "since_version": |- + "sinceVersion": |- v0.229.0 "git": "description": |- The Git version control details that are associated with your bundle. "markdown_description": |- The Git version control details that are associated with your bundle. For supported attributes see [\_](/dev-tools/bundles/settings.md#git). - "since_version": |- + "sinceVersion": |- v0.229.0 "name": "description": |- The name of the bundle. - "since_version": |- + "sinceVersion": |- v0.229.0 "uuid": "description": |- Reserved. A Universally Unique Identifier (UUID) for the bundle that uniquely identifies the bundle in internal Databricks systems. This is generated when a bundle project is initialized using a Databricks template (using the `databricks bundle init` command). - "since_version": |- + "sinceVersion": |- v0.236.0 github.com/databricks/cli/bundle/config.Deployment: "fail_on_active_runs": "description": |- Whether to fail on active runs. If this is set to true a deployment that is running can be interrupted. - "since_version": |- + "sinceVersion": |- v0.229.0 "lock": "description": |- The deployment lock attributes. - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/cli/bundle/config.Experimental: "pydabs": "description": |- The PyDABs configuration. - "since_version": |- - v0.229.0 "deprecation_message": |- Deprecated: please use python instead + "sinceVersion": |- + v0.229.0 "python": "description": |- Configures loading of Python code defined with 'databricks-bundles' package. - "since_version": |- + "sinceVersion": |- v0.238.0 "python_wheel_wrapper": "description": |- Whether to use a Python wheel wrapper. - "since_version": |- + "sinceVersion": |- v0.229.0 "scripts": "description": |- The commands to run. - "since_version": |- + "sinceVersion": |- v0.229.0 "skip_artifact_cleanup": "description": |- Determines whether to skip cleaning up the .internal folder - "since_version": |- + "sinceVersion": |- v0.254.0 "skip_name_prefix_for_schema": "description": |- Skip adding the prefix that is either set in `presets.name_prefix` or computed when `mode: development` is set, to the names of UC schemas defined in the bundle. - "since_version": |- + "sinceVersion": |- v0.255.0 "use_legacy_run_as": "description": |- Whether to use the legacy run_as behavior. - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/cli/bundle/config.Git: "branch": @@ -137,67 +137,67 @@ github.com/databricks/cli/bundle/config.Git: The Git branch name. "markdown_description": |- The Git branch name. See [\_](/dev-tools/bundles/settings.md#git). - "since_version": |- + "sinceVersion": |- v0.229.0 "origin_url": "description": |- The origin URL of the repository. "markdown_description": |- The origin URL of the repository. See [\_](/dev-tools/bundles/settings.md#git). - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/cli/bundle/config.Lock: "enabled": "description": |- Whether this lock is enabled. - "since_version": |- + "sinceVersion": |- v0.229.0 "force": "description": |- Whether to force this lock if it is enabled. - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/cli/bundle/config.Presets: "artifacts_dynamic_version": "description": |- Whether to enable dynamic_version on all artifacts. - "since_version": |- + "sinceVersion": |- v0.256.0 "jobs_max_concurrent_runs": "description": |- The maximum concurrent runs for a job. - "since_version": |- + "sinceVersion": |- v0.229.0 "name_prefix": "description": |- The prefix for job runs of the bundle. - "since_version": |- + "sinceVersion": |- v0.229.0 "pipelines_development": "description": |- Whether pipeline deployments should be locked in development mode. - "since_version": |- + "sinceVersion": |- v0.229.0 "source_linked_deployment": "description": |- Whether to link the deployment to the bundle source. - "since_version": |- + "sinceVersion": |- v0.236.0 "tags": "description": |- The tags for the bundle deployment. - "since_version": |- + "sinceVersion": |- v0.229.0 "trigger_pause_status": "description": |- A pause status to apply to all job triggers and schedules. Valid values are PAUSED or UNPAUSED. - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/cli/bundle/config.PyDABs: "enabled": "description": |- Whether or not PyDABs (Private Preview) is enabled - "since_version": |- + "sinceVersion": |- v0.229.0 "import": "description": |- @@ -211,7 +211,7 @@ github.com/databricks/cli/bundle/config.Python: Mutators contains a list of fully qualified function paths to mutator functions. Example: ["my_project.mutators:add_default_cluster"] - "since_version": |- + "sinceVersion": |- v0.238.0 "resources": "description": |- @@ -219,7 +219,7 @@ github.com/databricks/cli/bundle/config.Python: defined in Python code. Example: ["my_project.resources:load_resources"] - "since_version": |- + "sinceVersion": |- v0.238.0 "venv_path": "description": |- @@ -227,126 +227,126 @@ github.com/databricks/cli/bundle/config.Python: If enabled, Python code will execute within this environment. If disabled, it defaults to using the Python interpreter available in the current shell. - "since_version": |- + "sinceVersion": |- v0.238.0 github.com/databricks/cli/bundle/config.Resources: "alerts": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.279.0 "apps": "description": |- The app resource defines a Databricks app. "markdown_description": |- The app resource defines a [Databricks app](/api/workspace/apps/create). For information about Databricks Apps, see [\_](/dev-tools/databricks-apps/index.md). - "since_version": |- + "sinceVersion": |- v0.239.0 "clusters": "description": |- The cluster definitions for the bundle, where each key is the name of a cluster. "markdown_description": |- The cluster definitions for the bundle, where each key is the name of a cluster. See [\_](/dev-tools/bundles/resources.md#clusters). - "since_version": |- + "sinceVersion": |- v0.229.0 "dashboards": "description": |- The dashboard definitions for the bundle, where each key is the name of the dashboard. "markdown_description": |- The dashboard definitions for the bundle, where each key is the name of the dashboard. See [\_](/dev-tools/bundles/resources.md#dashboards). - "since_version": |- + "sinceVersion": |- v0.232.0 "database_catalogs": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.265.0 "database_instances": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.265.0 "experiments": "description": |- The experiment definitions for the bundle, where each key is the name of the experiment. "markdown_description": |- The experiment definitions for the bundle, where each key is the name of the experiment. See [\_](/dev-tools/bundles/resources.md#experiments). - "since_version": |- + "sinceVersion": |- v0.229.0 "jobs": "description": |- The job definitions for the bundle, where each key is the name of the job. "markdown_description": |- The job definitions for the bundle, where each key is the name of the job. See [\_](/dev-tools/bundles/resources.md#jobs). - "since_version": |- + "sinceVersion": |- v0.229.0 "model_serving_endpoints": "description": |- The model serving endpoint definitions for the bundle, where each key is the name of the model serving endpoint. "markdown_description": |- The model serving endpoint definitions for the bundle, where each key is the name of the model serving endpoint. See [\_](/dev-tools/bundles/resources.md#model_serving_endpoints). - "since_version": |- + "sinceVersion": |- v0.229.0 "models": "description": |- The model definitions for the bundle, where each key is the name of the model. "markdown_description": |- The model definitions for the bundle, where each key is the name of the model. See [\_](/dev-tools/bundles/resources.md#models). - "since_version": |- + "sinceVersion": |- v0.229.0 "pipelines": "description": |- The pipeline definitions for the bundle, where each key is the name of the pipeline. "markdown_description": |- The pipeline definitions for the bundle, where each key is the name of the pipeline. See [\_](/dev-tools/bundles/resources.md#pipelines). - "since_version": |- + "sinceVersion": |- v0.229.0 "quality_monitors": "description": |- The quality monitor definitions for the bundle, where each key is the name of the quality monitor. "markdown_description": |- The quality monitor definitions for the bundle, where each key is the name of the quality monitor. See [\_](/dev-tools/bundles/resources.md#quality_monitors). - "since_version": |- + "sinceVersion": |- v0.229.0 "registered_models": "description": |- The registered model definitions for the bundle, where each key is the name of the Unity Catalog registered model. "markdown_description": |- The registered model definitions for the bundle, where each key is the name of the Unity Catalog registered model. See [\_](/dev-tools/bundles/resources.md#registered_models) - "since_version": |- + "sinceVersion": |- v0.229.0 "schemas": "description": |- The schema definitions for the bundle, where each key is the name of the schema. "markdown_description": |- The schema definitions for the bundle, where each key is the name of the schema. See [\_](/dev-tools/bundles/resources.md#schemas). - "since_version": |- + "sinceVersion": |- v0.229.0 "secret_scopes": "description": |- The secret scope definitions for the bundle, where each key is the name of the secret scope. "markdown_description": |- The secret scope definitions for the bundle, where each key is the name of the secret scope. See [\_](/dev-tools/bundles/resources.md#secret_scopes). - "since_version": |- + "sinceVersion": |- v0.252.0 "sql_warehouses": "description": |- The SQL warehouse definitions for the bundle, where each key is the name of the warehouse. "markdown_description": |- The SQL warehouse definitions for the bundle, where each key is the name of the warehouse. See [\_](/dev-tools/bundles/resources.md#sql_warehouses). - "since_version": |- + "sinceVersion": |- v0.260.0 "synced_database_tables": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.266.0 "volumes": "description": |- The volume definitions for the bundle, where each key is the name of the volume. "markdown_description": |- The volume definitions for the bundle, where each key is the name of the volume. See [\_](/dev-tools/bundles/resources.md#volumes). - "since_version": |- + "sinceVersion": |- v0.236.0 github.com/databricks/cli/bundle/config.Root: "artifacts": @@ -356,8 +356,6 @@ github.com/databricks/cli/bundle/config.Root: Defines the attributes to build artifacts, where each key is the name of the artifact, and the value is a Map that defines the artifact build settings. For information about the `artifacts` mapping, see [\_](/dev-tools/bundles/settings.md#artifacts). Artifact settings defined in the top level of the bundle configuration can be overridden in the `targets` mapping. See [\_](/dev-tools/bundles/artifact-overrides.md). - "since_version": |- - v0.229.0 "markdown_examples": |- ```yaml artifacts: @@ -366,31 +364,33 @@ github.com/databricks/cli/bundle/config.Root: build: poetry build path: . ``` + "sinceVersion": |- + v0.229.0 "bundle": "description": |- The bundle attributes when deploying to this target. "markdown_description": |- The bundle attributes when deploying to this target, - "since_version": |- + "sinceVersion": |- v0.229.0 "environments": "description": |- PLACEHOLDER - "since_version": |- - v0.243.0 "deprecation_message": |- Deprecated: please use targets instead + "sinceVersion": |- + v0.243.0 "experimental": "description": |- Defines attributes for experimental features. - "since_version": |- + "sinceVersion": |- v0.229.0 "include": "description": |- Specifies a list of path globs that contain configuration files to include within the bundle. "markdown_description": |- Specifies a list of path globs that contain configuration files to include within the bundle. See [\_](/dev-tools/bundles/settings.md#include). - "since_version": |- + "sinceVersion": |- v0.229.0 "permissions": "description": |- @@ -399,8 +399,6 @@ github.com/databricks/cli/bundle/config.Root: A Sequence that defines the permissions to apply to experiments, jobs, pipelines, and models defined in the bundle, where each item in the sequence is a permission for a specific entity. See [\_](/dev-tools/bundles/settings.md#permissions) and [\_](/dev-tools/bundles/permissions.md). - "since_version": |- - v0.229.0 "markdown_examples": |- ```yaml permissions: @@ -411,17 +409,19 @@ github.com/databricks/cli/bundle/config.Root: - level: CAN_RUN service_principal_name: 123456-abcdef ``` + "sinceVersion": |- + v0.229.0 "presets": "description": |- Defines bundle deployment presets. "markdown_description": |- Defines bundle deployment presets. See [\_](/dev-tools/bundles/deployment-modes.md#presets). - "since_version": |- + "sinceVersion": |- v0.229.0 "python": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.275.0 "resources": "description": |- @@ -435,225 +435,225 @@ github.com/databricks/cli/bundle/config.Root: : : ``` - "since_version": |- + "sinceVersion": |- v0.229.0 "run_as": "description": |- The identity to use when running Databricks Asset Bundles workflows. "markdown_description": |- The identity to use when running Databricks Asset Bundles workflows. See [\_](/dev-tools/bundles/run-as.md). - "since_version": |- + "sinceVersion": |- v0.229.0 "scripts": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.259.0 "sync": "description": |- The files and file paths to include or exclude in the bundle. "markdown_description": |- The files and file paths to include or exclude in the bundle. See [\_](/dev-tools/bundles/settings.md#sync). - "since_version": |- + "sinceVersion": |- v0.229.0 "targets": "description": |- Defines deployment targets for the bundle. "markdown_description": |- Defines deployment targets for the bundle. See [\_](/dev-tools/bundles/settings.md#targets) - "since_version": |- + "sinceVersion": |- v0.229.0 "variables": "description": |- A Map that defines the custom variables for the bundle, where each key is the name of the variable, and the value is a Map that defines the variable. - "since_version": |- + "sinceVersion": |- v0.229.0 "workspace": "description": |- Defines the Databricks workspace for the bundle. "markdown_description": |- Defines the Databricks workspace for the bundle. See [\_](/dev-tools/bundles/settings.md#workspace). - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/cli/bundle/config.Script: "content": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.259.0 github.com/databricks/cli/bundle/config.Sync: "exclude": "description": |- A list of files or folders to exclude from the bundle. - "since_version": |- + "sinceVersion": |- v0.229.0 "include": "description": |- A list of files or folders to include in the bundle. - "since_version": |- + "sinceVersion": |- v0.229.0 "paths": "description": |- The local folder paths, which can be outside the bundle root, to synchronize to the workspace when the bundle is deployed. - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/cli/bundle/config.Target: "artifacts": "description": |- The artifacts to include in the target deployment. - "since_version": |- + "sinceVersion": |- v0.229.0 "bundle": "description": |- The bundle attributes when deploying to this target. - "since_version": |- + "sinceVersion": |- v0.229.0 "cluster_id": "description": |- The ID of the cluster to use for this target. - "since_version": |- + "sinceVersion": |- v0.229.0 "compute_id": "description": |- Deprecated. The ID of the compute to use for this target. - "since_version": |- - v0.229.0 "deprecation_message": |- Deprecated: please use cluster_id instead + "sinceVersion": |- + v0.229.0 "default": "description": |- Whether this target is the default target. - "since_version": |- + "sinceVersion": |- v0.229.0 "git": "description": |- The Git version control settings for the target. - "since_version": |- + "sinceVersion": |- v0.229.0 "mode": "description": |- The deployment mode for the target. "markdown_description": |- The deployment mode for the target. Valid values are `development` or `production`. See [\_](/dev-tools/bundles/deployment-modes.md). - "since_version": |- + "sinceVersion": |- v0.229.0 "permissions": "description": |- The permissions for deploying and running the bundle in the target. - "since_version": |- + "sinceVersion": |- v0.229.0 "presets": "description": |- The deployment presets for the target. - "since_version": |- + "sinceVersion": |- v0.229.0 "resources": "description": |- The resource definitions for the target. - "since_version": |- + "sinceVersion": |- v0.229.0 "run_as": "description": |- The identity to use to run the bundle. "markdown_description": |- The identity to use to run the bundle, see [\_](/dev-tools/bundles/run-as.md). - "since_version": |- + "sinceVersion": |- v0.229.0 "sync": "description": |- The local paths to sync to the target workspace when a bundle is run or deployed. - "since_version": |- + "sinceVersion": |- v0.229.0 "variables": "description": |- The custom variable definitions for the target. - "since_version": |- + "sinceVersion": |- v0.229.0 "workspace": "description": |- The Databricks workspace for the target. - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/cli/bundle/config.Workspace: "artifact_path": "description": |- The artifact path to use within the workspace for both deployments and workflow runs - "since_version": |- + "sinceVersion": |- v0.229.0 "auth_type": "description": |- The authentication type. - "since_version": |- + "sinceVersion": |- v0.229.0 "azure_client_id": "description": |- The Azure client ID - "since_version": |- + "sinceVersion": |- v0.229.0 "azure_environment": "description": |- The Azure environment - "since_version": |- + "sinceVersion": |- v0.229.0 "azure_login_app_id": "description": |- The Azure login app ID - "since_version": |- + "sinceVersion": |- v0.229.0 "azure_tenant_id": "description": |- The Azure tenant ID - "since_version": |- + "sinceVersion": |- v0.229.0 "azure_use_msi": "description": |- Whether to use MSI for Azure - "since_version": |- + "sinceVersion": |- v0.229.0 "azure_workspace_resource_id": "description": |- The Azure workspace resource ID - "since_version": |- + "sinceVersion": |- v0.229.0 "client_id": "description": |- The client ID for the workspace - "since_version": |- + "sinceVersion": |- v0.229.0 "file_path": "description": |- The file path to use within the workspace for both deployments and workflow runs - "since_version": |- + "sinceVersion": |- v0.229.0 "google_service_account": "description": |- The Google service account name - "since_version": |- + "sinceVersion": |- v0.229.0 "host": "description": |- The Databricks workspace host URL - "since_version": |- + "sinceVersion": |- v0.229.0 "profile": "description": |- The Databricks workspace profile name - "since_version": |- + "sinceVersion": |- v0.229.0 "resource_path": "description": |- The workspace resource path - "since_version": |- + "sinceVersion": |- v0.230.0 "root_path": "description": |- The Databricks workspace root path - "since_version": |- + "sinceVersion": |- v0.229.0 "state_path": "description": |- The workspace state path - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/cli/bundle/config/resources.Alert: "create_time": @@ -662,17 +662,17 @@ github.com/databricks/cli/bundle/config/resources.Alert: "custom_description": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.279.0 "custom_summary": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.279.0 "display_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.279.0 "effective_run_as": "description": |- @@ -689,22 +689,22 @@ github.com/databricks/cli/bundle/config/resources.Alert: "parent_path": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.279.0 "query_text": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.279.0 "run_as": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.279.0 "run_as_user_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.279.0 "update_time": "description": |- @@ -712,466 +712,466 @@ github.com/databricks/cli/bundle/config/resources.Alert: "warehouse_id": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.279.0 github.com/databricks/cli/bundle/config/resources.AlertPermission: "group_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.279.0 "level": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.279.0 "service_principal_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.279.0 "user_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.279.0 github.com/databricks/cli/bundle/config/resources.App: "description": - "since_version": |- + "sinceVersion": |- v0.239.0 "name": - "since_version": |- + "sinceVersion": |- v0.239.0 "resources": - "since_version": |- + "sinceVersion": |- v0.239.0 github.com/databricks/cli/bundle/config/resources.AppPermission: "group_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 "level": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 "service_principal_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 "user_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 github.com/databricks/cli/bundle/config/resources.Cluster: "apply_policy_default_values": - "since_version": |- + "sinceVersion": |- v0.229.0 "autoscale": - "since_version": |- + "sinceVersion": |- v0.229.0 "autotermination_minutes": - "since_version": |- + "sinceVersion": |- v0.229.0 "aws_attributes": - "since_version": |- + "sinceVersion": |- v0.229.0 "azure_attributes": - "since_version": |- + "sinceVersion": |- v0.229.0 "cluster_log_conf": - "since_version": |- + "sinceVersion": |- v0.229.0 "cluster_name": - "since_version": |- + "sinceVersion": |- v0.229.0 "custom_tags": - "since_version": |- + "sinceVersion": |- v0.229.0 "driver_instance_pool_id": - "since_version": |- + "sinceVersion": |- v0.229.0 "driver_node_type_id": - "since_version": |- + "sinceVersion": |- v0.229.0 "enable_elastic_disk": - "since_version": |- + "sinceVersion": |- v0.229.0 "enable_local_disk_encryption": - "since_version": |- + "sinceVersion": |- v0.229.0 "gcp_attributes": - "since_version": |- + "sinceVersion": |- v0.229.0 "init_scripts": - "since_version": |- + "sinceVersion": |- v0.229.0 "instance_pool_id": - "since_version": |- + "sinceVersion": |- v0.229.0 "is_single_node": - "since_version": |- + "sinceVersion": |- v0.237.0 "node_type_id": - "since_version": |- + "sinceVersion": |- v0.229.0 "num_workers": - "since_version": |- + "sinceVersion": |- v0.229.0 "policy_id": - "since_version": |- + "sinceVersion": |- v0.229.0 "remote_disk_throughput": - "since_version": |- + "sinceVersion": |- v0.257.0 "single_user_name": - "since_version": |- + "sinceVersion": |- v0.229.0 "spark_conf": - "since_version": |- + "sinceVersion": |- v0.229.0 "spark_env_vars": - "since_version": |- + "sinceVersion": |- v0.229.0 "spark_version": - "since_version": |- + "sinceVersion": |- v0.229.0 "ssh_public_keys": - "since_version": |- + "sinceVersion": |- v0.229.0 "total_initial_remote_disk_size": - "since_version": |- + "sinceVersion": |- v0.257.0 "use_ml_runtime": - "since_version": |- + "sinceVersion": |- v0.237.0 github.com/databricks/cli/bundle/config/resources.ClusterPermission: "group_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 "level": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 "service_principal_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 "user_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 github.com/databricks/cli/bundle/config/resources.Dashboard: "dataset_catalog": "description": |- Sets the default catalog for all datasets in this dashboard. When set, this overrides the catalog specified in individual dataset definitions. - "since_version": |- + "sinceVersion": |- v0.281.0 "dataset_schema": "description": |- Sets the default schema for all datasets in this dashboard. When set, this overrides the schema specified in individual dataset definitions. - "since_version": |- + "sinceVersion": |- v0.281.0 github.com/databricks/cli/bundle/config/resources.DashboardPermission: "group_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 "level": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 "service_principal_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 "user_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 github.com/databricks/cli/bundle/config/resources.DatabaseCatalog: "database_instance_name": - "since_version": |- + "sinceVersion": |- v0.265.0 "database_name": - "since_version": |- + "sinceVersion": |- v0.265.0 "name": - "since_version": |- + "sinceVersion": |- v0.265.0 github.com/databricks/cli/bundle/config/resources.DatabaseInstance: "capacity": - "since_version": |- + "sinceVersion": |- v0.265.0 "custom_tags": - "since_version": |- + "sinceVersion": |- v0.273.0 "effective_capacity": "description": |- PLACEHOLDER "enable_pg_native_login": - "since_version": |- + "sinceVersion": |- v0.267.0 "enable_readable_secondaries": - "since_version": |- + "sinceVersion": |- v0.265.0 "name": - "since_version": |- + "sinceVersion": |- v0.265.0 "node_count": - "since_version": |- + "sinceVersion": |- v0.265.0 "parent_instance_ref": - "since_version": |- + "sinceVersion": |- v0.265.0 "retention_window_in_days": - "since_version": |- + "sinceVersion": |- v0.265.0 "stopped": - "since_version": |- + "sinceVersion": |- v0.265.0 "usage_policy_id": - "since_version": |- + "sinceVersion": |- v0.273.0 github.com/databricks/cli/bundle/config/resources.DatabaseInstancePermission: "group_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.265.0 "level": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.265.0 "service_principal_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.265.0 "user_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.265.0 github.com/databricks/cli/bundle/config/resources.Grant: "principal": "description": |- The name of the principal that will be granted privileges - "since_version": |- + "sinceVersion": |- v0.229.0 "privileges": "description": |- The privileges to grant to the specified entity - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/cli/bundle/config/resources.Job: "budget_policy_id": - "since_version": |- + "sinceVersion": |- v0.231.0 "continuous": - "since_version": |- + "sinceVersion": |- v0.229.0 "description": - "since_version": |- + "sinceVersion": |- v0.229.0 "email_notifications": - "since_version": |- + "sinceVersion": |- v0.229.0 "environments": - "since_version": |- + "sinceVersion": |- v0.229.0 "git_source": - "since_version": |- + "sinceVersion": |- v0.229.0 "job_clusters": - "since_version": |- + "sinceVersion": |- v0.229.0 "max_concurrent_runs": - "since_version": |- + "sinceVersion": |- v0.229.0 "name": - "since_version": |- + "sinceVersion": |- v0.229.0 "notification_settings": - "since_version": |- + "sinceVersion": |- v0.229.0 "parameters": - "since_version": |- + "sinceVersion": |- v0.229.0 "performance_target": - "since_version": |- + "sinceVersion": |- v0.241.0 "queue": - "since_version": |- + "sinceVersion": |- v0.229.0 "schedule": - "since_version": |- + "sinceVersion": |- v0.229.0 "tags": - "since_version": |- + "sinceVersion": |- v0.229.0 "tasks": - "since_version": |- + "sinceVersion": |- v0.229.0 "timeout_seconds": - "since_version": |- + "sinceVersion": |- v0.229.0 "trigger": - "since_version": |- + "sinceVersion": |- v0.229.0 "usage_policy_id": - "since_version": |- + "sinceVersion": |- v0.265.0 "webhook_notifications": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/cli/bundle/config/resources.JobPermission: "group_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 "level": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 "service_principal_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 "user_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 github.com/databricks/cli/bundle/config/resources.Lifecycle: "prevent_destroy": "description": |- Lifecycle setting to prevent the resource from being destroyed. - "since_version": |- + "sinceVersion": |- v0.268.0 github.com/databricks/cli/bundle/config/resources.MlflowExperiment: "artifact_location": - "since_version": |- + "sinceVersion": |- v0.229.0 "name": - "since_version": |- + "sinceVersion": |- v0.229.0 "tags": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/cli/bundle/config/resources.MlflowExperimentPermission: "group_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 "level": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 "service_principal_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 "user_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 github.com/databricks/cli/bundle/config/resources.MlflowModel: "description": - "since_version": |- + "sinceVersion": |- v0.229.0 "name": - "since_version": |- + "sinceVersion": |- v0.229.0 "tags": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/cli/bundle/config/resources.MlflowModelPermission: "group_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 "level": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 "service_principal_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 "user_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint: "ai_gateway": - "since_version": |- + "sinceVersion": |- v0.230.0 "budget_policy_id": - "since_version": |- + "sinceVersion": |- v0.244.0 "config": - "since_version": |- + "sinceVersion": |- v0.229.0 "email_notifications": - "since_version": |- + "sinceVersion": |- v0.264.0 "name": - "since_version": |- + "sinceVersion": |- v0.229.0 "rate_limits": - "since_version": |- + "sinceVersion": |- v0.229.0 "route_optimized": - "since_version": |- + "sinceVersion": |- v0.229.0 "tags": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/cli/bundle/config/resources.ModelServingEndpointPermission: "group_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 "level": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 "service_principal_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 "user_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 github.com/databricks/cli/bundle/config/resources.Permission: "-": @@ -1182,304 +1182,304 @@ github.com/databricks/cli/bundle/config/resources.Permission: "group_name": "description": |- The name of the group that has the permission set in level. - "since_version": |- + "sinceVersion": |- v0.229.0 "level": "description": |- The allowed permission for user, group, service principal defined for this permission. - "since_version": |- + "sinceVersion": |- v0.229.0 "service_principal_name": "description": |- The name of the service principal that has the permission set in level. - "since_version": |- + "sinceVersion": |- v0.229.0 "user_name": "description": |- The name of the user that has the permission set in level. - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/cli/bundle/config/resources.Pipeline: "allow_duplicate_names": - "since_version": |- + "sinceVersion": |- v0.261.0 "budget_policy_id": - "since_version": |- + "sinceVersion": |- v0.230.0 "catalog": - "since_version": |- + "sinceVersion": |- v0.229.0 "channel": - "since_version": |- + "sinceVersion": |- v0.229.0 "clusters": - "since_version": |- + "sinceVersion": |- v0.229.0 "configuration": - "since_version": |- + "sinceVersion": |- v0.229.0 "continuous": - "since_version": |- + "sinceVersion": |- v0.229.0 "development": - "since_version": |- + "sinceVersion": |- v0.229.0 "edition": - "since_version": |- + "sinceVersion": |- v0.229.0 "environment": - "since_version": |- + "sinceVersion": |- v0.257.0 "event_log": - "since_version": |- + "sinceVersion": |- v0.246.0 "filters": - "since_version": |- + "sinceVersion": |- v0.229.0 "gateway_definition": - "since_version": |- + "sinceVersion": |- v0.229.0 "id": - "since_version": |- + "sinceVersion": |- v0.229.0 "ingestion_definition": - "since_version": |- + "sinceVersion": |- v0.229.0 "libraries": - "since_version": |- + "sinceVersion": |- v0.229.0 "name": - "since_version": |- + "sinceVersion": |- v0.229.0 "notifications": - "since_version": |- + "sinceVersion": |- v0.229.0 "photon": - "since_version": |- + "sinceVersion": |- v0.229.0 "restart_window": - "since_version": |- + "sinceVersion": |- v0.234.0 "root_path": - "since_version": |- + "sinceVersion": |- v0.253.0 "schema": - "since_version": |- + "sinceVersion": |- v0.230.0 "serverless": - "since_version": |- + "sinceVersion": |- v0.229.0 "storage": - "since_version": |- + "sinceVersion": |- v0.229.0 "tags": - "since_version": |- + "sinceVersion": |- v0.256.0 "target": - "since_version": |- + "sinceVersion": |- v0.229.0 "usage_policy_id": - "since_version": |- + "sinceVersion": |- v0.276.0 github.com/databricks/cli/bundle/config/resources.PipelinePermission: "group_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 "level": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 "service_principal_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 "user_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.247.0 github.com/databricks/cli/bundle/config/resources.QualityMonitor: "assets_dir": - "since_version": |- + "sinceVersion": |- v0.229.0 "baseline_table_name": - "since_version": |- + "sinceVersion": |- v0.229.0 "custom_metrics": - "since_version": |- + "sinceVersion": |- v0.229.0 "data_classification_config": - "since_version": |- + "sinceVersion": |- v0.229.0 "latest_monitor_failure_msg": - "since_version": |- + "sinceVersion": |- v0.264.0 "notifications": - "since_version": |- + "sinceVersion": |- v0.229.0 "output_schema_name": - "since_version": |- + "sinceVersion": |- v0.229.0 "schedule": - "since_version": |- + "sinceVersion": |- v0.229.0 "skip_builtin_dashboard": - "since_version": |- + "sinceVersion": |- v0.229.0 "slicing_exprs": - "since_version": |- + "sinceVersion": |- v0.229.0 "snapshot": - "since_version": |- + "sinceVersion": |- v0.229.0 "time_series": - "since_version": |- + "sinceVersion": |- v0.229.0 "warehouse_id": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/cli/bundle/config/resources.RegisteredModel: "catalog_name": - "since_version": |- + "sinceVersion": |- v0.229.0 "comment": - "since_version": |- + "sinceVersion": |- v0.229.0 "name": - "since_version": |- + "sinceVersion": |- v0.229.0 "schema_name": - "since_version": |- + "sinceVersion": |- v0.229.0 "storage_location": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/cli/bundle/config/resources.Schema: "catalog_name": - "since_version": |- + "sinceVersion": |- v0.229.0 "comment": - "since_version": |- + "sinceVersion": |- v0.229.0 "name": - "since_version": |- + "sinceVersion": |- v0.229.0 "storage_root": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/cli/bundle/config/resources.SchemaGrant: "principal": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.267.0 "privileges": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.267.0 github.com/databricks/cli/bundle/config/resources.SecretScope: "backend_type": "description": |- The backend type the scope will be created with. If not specified, will default to `DATABRICKS` - "since_version": |- + "sinceVersion": |- v0.252.0 "keyvault_metadata": "description": |- The metadata for the secret scope if the `backend_type` is `AZURE_KEYVAULT` - "since_version": |- + "sinceVersion": |- v0.252.0 "lifecycle": "description": |- Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed. - "since_version": |- + "sinceVersion": |- v0.268.0 "name": "description": |- Scope name requested by the user. Scope names are unique. - "since_version": |- + "sinceVersion": |- v0.252.0 "permissions": "description": |- The permissions to apply to the secret scope. Permissions are managed via secret scope ACLs. - "since_version": |- + "sinceVersion": |- v0.252.0 github.com/databricks/cli/bundle/config/resources.SecretScopePermission: "group_name": "description": |- The name of the group that has the permission set in level. This field translates to a `principal` field in secret scope ACL. - "since_version": |- + "sinceVersion": |- v0.252.0 "level": "description": |- The allowed permission for user, group, service principal defined for this permission. - "since_version": |- + "sinceVersion": |- v0.252.0 "service_principal_name": "description": |- The application ID of an active service principal. This field translates to a `principal` field in secret scope ACL. - "since_version": |- + "sinceVersion": |- v0.252.0 "user_name": "description": |- The name of the user that has the permission set in level. This field translates to a `principal` field in secret scope ACL. - "since_version": |- + "sinceVersion": |- v0.252.0 github.com/databricks/cli/bundle/config/resources.SqlWarehouse: "auto_stop_mins": - "since_version": |- + "sinceVersion": |- v0.260.0 "channel": - "since_version": |- + "sinceVersion": |- v0.260.0 "cluster_size": - "since_version": |- + "sinceVersion": |- v0.260.0 "creator_name": - "since_version": |- + "sinceVersion": |- v0.260.0 "enable_serverless_compute": - "since_version": |- + "sinceVersion": |- v0.260.0 "instance_profile_arn": - "since_version": |- + "sinceVersion": |- v0.260.0 "max_num_clusters": - "since_version": |- + "sinceVersion": |- v0.260.0 "min_num_clusters": - "since_version": |- + "sinceVersion": |- v0.260.0 "name": - "since_version": |- + "sinceVersion": |- v0.260.0 "tags": - "since_version": |- + "sinceVersion": |- v0.260.0 github.com/databricks/cli/bundle/config/resources.SqlWarehousePermission: "group_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.260.0 "level": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.260.0 "service_principal_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.260.0 "user_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.260.0 github.com/databricks/cli/bundle/config/resources.SyncedDatabaseTable: "data_synchronization_status": @@ -1488,7 +1488,7 @@ github.com/databricks/cli/bundle/config/resources.SyncedDatabaseTable: "database_instance_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.266.0 "effective_database_instance_name": "description": |- @@ -1499,129 +1499,129 @@ github.com/databricks/cli/bundle/config/resources.SyncedDatabaseTable: "lifecycle": "description": |- Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed. - "since_version": |- + "sinceVersion": |- v0.268.0 "logical_database_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.266.0 "name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.266.0 "spec": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.266.0 "unity_catalog_provisioning_state": "description": |- PLACEHOLDER github.com/databricks/cli/bundle/config/resources.Volume: "catalog_name": - "since_version": |- + "sinceVersion": |- v0.236.0 "comment": - "since_version": |- + "sinceVersion": |- v0.236.0 "name": - "since_version": |- + "sinceVersion": |- v0.236.0 "schema_name": - "since_version": |- + "sinceVersion": |- v0.236.0 "storage_location": - "since_version": |- + "sinceVersion": |- v0.236.0 github.com/databricks/cli/bundle/config/resources.VolumeGrant: "principal": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.264.1 "privileges": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.264.1 github.com/databricks/cli/bundle/config/variable.Lookup: "alert": "description": |- The name of the alert for which to retrieve an ID. - "since_version": |- + "sinceVersion": |- v0.229.0 "cluster": "description": |- The name of the cluster for which to retrieve an ID. - "since_version": |- + "sinceVersion": |- v0.229.0 "cluster_policy": "description": |- The name of the cluster_policy for which to retrieve an ID. - "since_version": |- + "sinceVersion": |- v0.229.0 "dashboard": "description": |- The name of the dashboard for which to retrieve an ID. - "since_version": |- + "sinceVersion": |- v0.229.0 "instance_pool": "description": |- The name of the instance_pool for which to retrieve an ID. - "since_version": |- + "sinceVersion": |- v0.229.0 "job": "description": |- The name of the job for which to retrieve an ID. - "since_version": |- + "sinceVersion": |- v0.229.0 "metastore": "description": |- The name of the metastore for which to retrieve an ID. - "since_version": |- + "sinceVersion": |- v0.229.0 "notification_destination": "description": |- The name of the notification_destination for which to retrieve an ID. - "since_version": |- + "sinceVersion": |- v0.236.0 "pipeline": "description": |- The name of the pipeline for which to retrieve an ID. - "since_version": |- + "sinceVersion": |- v0.229.0 "query": "description": |- The name of the query for which to retrieve an ID. - "since_version": |- + "sinceVersion": |- v0.229.0 "service_principal": "description": |- The name of the service_principal for which to retrieve an ID. - "since_version": |- + "sinceVersion": |- v0.229.0 "warehouse": "description": |- The name of the warehouse for which to retrieve an ID. - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/cli/bundle/config/variable.TargetVariable: "default": "description": |- The default value for the variable. - "since_version": |- + "sinceVersion": |- v0.229.0 "description": "description": |- The description of the variable. - "since_version": |- + "sinceVersion": |- v0.229.0 "lookup": "description": |- The name of the alert, cluster_policy, cluster, dashboard, instance_pool, job, metastore, pipeline, query, service_principal, or warehouse object for which to retrieve an ID. - "since_version": |- + "sinceVersion": |- v0.229.0 "markdown_description": "description": |- @@ -1629,7 +1629,7 @@ github.com/databricks/cli/bundle/config/variable.TargetVariable: "type": "description": |- The type of the variable. - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/cli/bundle/config/variable.Variable: "_": @@ -1640,24 +1640,24 @@ github.com/databricks/cli/bundle/config/variable.Variable: "default": "description": |- The default value for the variable. - "since_version": |- + "sinceVersion": |- v0.229.0 "description": "description": |- The description of the variable - "since_version": |- + "sinceVersion": |- v0.229.0 "lookup": "description": |- The name of the alert, cluster_policy, cluster, dashboard, instance_pool, job, metastore, pipeline, query, service_principal, or warehouse object for which to retrieve an ID. "markdown_description": |- The name of the `alert`, `cluster_policy`, `cluster`, `dashboard`, `instance_pool`, `job`, `metastore`, `pipeline`, `query`, `service_principal`, or `warehouse` object for which to retrieve an ID. - "since_version": |- + "sinceVersion": |- v0.229.0 "type": "description": |- The type of the variable. - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs: "service_principal_name": diff --git a/bundle/internal/schema/annotations_openapi_overrides.yml b/bundle/internal/schema/annotations_openapi_overrides.yml index 17edad1c4e..b3f2474984 100644 --- a/bundle/internal/schema/annotations_openapi_overrides.yml +++ b/bundle/internal/schema/annotations_openapi_overrides.yml @@ -710,17 +710,17 @@ github.com/databricks/databricks-sdk-go/service/apps.AppDeployment: "deployment_id": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.239.0 "mode": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.239.0 "source_code_path": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.239.0 "status": "description": |- @@ -732,7 +732,7 @@ github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentArtifacts: "source_code_path": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.239.0 github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentStatus: "message": @@ -745,10 +745,10 @@ github.com/databricks/databricks-sdk-go/service/apps.AppResource: "database": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.260.0 "description": - "since_version": |- + "sinceVersion": |- v0.239.0 "experiment": "description": |- @@ -756,51 +756,51 @@ github.com/databricks/databricks-sdk-go/service/apps.AppResource: "genie_space": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.273.0 "job": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.239.0 "name": - "since_version": |- + "sinceVersion": |- v0.239.0 "secret": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.239.0 "serving_endpoint": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.239.0 "sql_warehouse": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.239.0 "uc_securable": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.253.0 github.com/databricks/databricks-sdk-go/service/apps.AppResourceDatabase: "database_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.260.0 "instance_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.260.0 "permission": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.260.0 github.com/databricks/databricks-sdk-go/service/apps.AppResourceExperiment: "experiment_id": @@ -813,82 +813,82 @@ github.com/databricks/databricks-sdk-go/service/apps.AppResourceGenieSpace: "name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.273.0 "permission": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.273.0 "space_id": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.273.0 github.com/databricks/databricks-sdk-go/service/apps.AppResourceJob: "id": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.239.0 "permission": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.239.0 github.com/databricks/databricks-sdk-go/service/apps.AppResourceSecret: "key": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.239.0 "permission": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.239.0 "scope": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.239.0 github.com/databricks/databricks-sdk-go/service/apps.AppResourceServingEndpoint: "name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.239.0 "permission": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.239.0 github.com/databricks/databricks-sdk-go/service/apps.AppResourceSqlWarehouse: "id": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.239.0 "permission": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.239.0 github.com/databricks/databricks-sdk-go/service/apps.AppResourceUcSecurable: "permission": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.253.0 "securable_full_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.253.0 "securable_type": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.253.0 github.com/databricks/databricks-sdk-go/service/apps.ApplicationStatus: "message": @@ -904,378 +904,378 @@ github.com/databricks/databricks-sdk-go/service/apps.ComputeStatus: "state": {} github.com/databricks/databricks-sdk-go/service/catalog.MonitorCronSchedule: "pause_status": - "since_version": |- + "sinceVersion": |- v0.229.0 "quartz_cron_expression": - "since_version": |- + "sinceVersion": |- v0.229.0 "timezone_id": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/catalog.MonitorDataClassificationConfig: "enabled": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/catalog.MonitorDestination: "email_addresses": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/catalog.MonitorInferenceLog: "granularities": "description": |- Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year. - "since_version": |- + "sinceVersion": |- v0.229.0 "label_col": - "since_version": |- + "sinceVersion": |- v0.229.0 "model_id_col": - "since_version": |- + "sinceVersion": |- v0.229.0 "prediction_col": - "since_version": |- + "sinceVersion": |- v0.229.0 "prediction_proba_col": - "since_version": |- + "sinceVersion": |- v0.229.0 "problem_type": - "since_version": |- + "sinceVersion": |- v0.229.0 "timestamp_col": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/catalog.MonitorMetric: "definition": - "since_version": |- + "sinceVersion": |- v0.229.0 "input_columns": - "since_version": |- + "sinceVersion": |- v0.229.0 "name": - "since_version": |- + "sinceVersion": |- v0.229.0 "output_data_type": - "since_version": |- + "sinceVersion": |- v0.229.0 "type": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/catalog.MonitorNotifications: "on_failure": - "since_version": |- + "sinceVersion": |- v0.229.0 "on_new_classification_tag_detected": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/catalog.MonitorTimeSeries: "granularities": "description": |- Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year. - "since_version": |- + "sinceVersion": |- v0.229.0 "timestamp_col": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/catalog.RegisteredModelAlias: "alias_name": - "since_version": |- + "sinceVersion": |- v0.273.0 "catalog_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.273.0 "id": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.273.0 "model_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.273.0 "schema_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.273.0 "version_num": - "since_version": |- + "sinceVersion": |- v0.273.0 github.com/databricks/databricks-sdk-go/service/compute.Adlsgen2Info: "destination": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.AutoScale: "max_workers": - "since_version": |- + "sinceVersion": |- v0.229.0 "min_workers": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.AwsAttributes: "availability": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.229.0 "ebs_volume_count": - "since_version": |- + "sinceVersion": |- v0.229.0 "ebs_volume_iops": - "since_version": |- + "sinceVersion": |- v0.229.0 "ebs_volume_size": - "since_version": |- + "sinceVersion": |- v0.229.0 "ebs_volume_throughput": - "since_version": |- + "sinceVersion": |- v0.229.0 "ebs_volume_type": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.229.0 "first_on_demand": - "since_version": |- + "sinceVersion": |- v0.229.0 "instance_profile_arn": - "since_version": |- + "sinceVersion": |- v0.229.0 "spot_bid_price_percent": - "since_version": |- + "sinceVersion": |- v0.229.0 "zone_id": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.AzureAttributes: "availability": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.229.0 "first_on_demand": - "since_version": |- + "sinceVersion": |- v0.229.0 "log_analytics_info": - "since_version": |- + "sinceVersion": |- v0.229.0 "spot_bid_max_price": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.ClientsTypes: "jobs": - "since_version": |- + "sinceVersion": |- v0.229.0 "notebooks": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.ClusterLogConf: "dbfs": - "since_version": |- + "sinceVersion": |- v0.229.0 "s3": - "since_version": |- + "sinceVersion": |- v0.229.0 "volumes": - "since_version": |- + "sinceVersion": |- v0.242.0 github.com/databricks/databricks-sdk-go/service/compute.ClusterSpec: "apply_policy_default_values": - "since_version": |- + "sinceVersion": |- v0.229.0 "autoscale": - "since_version": |- + "sinceVersion": |- v0.229.0 "autotermination_minutes": - "since_version": |- + "sinceVersion": |- v0.229.0 "aws_attributes": - "since_version": |- + "sinceVersion": |- v0.229.0 "azure_attributes": - "since_version": |- + "sinceVersion": |- v0.229.0 "cluster_log_conf": - "since_version": |- + "sinceVersion": |- v0.229.0 "cluster_name": - "since_version": |- + "sinceVersion": |- v0.229.0 "custom_tags": - "since_version": |- + "sinceVersion": |- v0.229.0 "data_security_mode": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.229.0 "docker_image": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.229.0 "driver_instance_pool_id": - "since_version": |- + "sinceVersion": |- v0.229.0 "driver_node_type_id": - "since_version": |- + "sinceVersion": |- v0.229.0 "enable_elastic_disk": - "since_version": |- + "sinceVersion": |- v0.229.0 "enable_local_disk_encryption": - "since_version": |- + "sinceVersion": |- v0.229.0 "gcp_attributes": - "since_version": |- + "sinceVersion": |- v0.229.0 "init_scripts": - "since_version": |- + "sinceVersion": |- v0.229.0 "instance_pool_id": - "since_version": |- + "sinceVersion": |- v0.229.0 "is_single_node": - "since_version": |- + "sinceVersion": |- v0.237.0 "kind": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.237.0 "node_type_id": - "since_version": |- + "sinceVersion": |- v0.229.0 "num_workers": - "since_version": |- + "sinceVersion": |- v0.229.0 "policy_id": - "since_version": |- + "sinceVersion": |- v0.229.0 "remote_disk_throughput": - "since_version": |- + "sinceVersion": |- v0.257.0 "runtime_engine": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.229.0 "single_user_name": - "since_version": |- + "sinceVersion": |- v0.229.0 "spark_conf": - "since_version": |- + "sinceVersion": |- v0.229.0 "spark_env_vars": - "since_version": |- + "sinceVersion": |- v0.229.0 "spark_version": - "since_version": |- + "sinceVersion": |- v0.229.0 "ssh_public_keys": - "since_version": |- + "sinceVersion": |- v0.229.0 "total_initial_remote_disk_size": - "since_version": |- + "sinceVersion": |- v0.257.0 "use_ml_runtime": - "since_version": |- + "sinceVersion": |- v0.237.0 "workload_type": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.DbfsStorageInfo: "destination": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.DockerBasicAuth: "password": - "since_version": |- + "sinceVersion": |- v0.229.0 "username": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.DockerImage: "basic_auth": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.229.0 "url": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.Environment: "client": - "since_version": |- + "sinceVersion": |- v0.229.0 "dependencies": "description": |- List of pip dependencies, as supported by the version of pip in this environment. - "since_version": |- + "sinceVersion": |- v0.229.0 "environment_version": - "since_version": |- + "sinceVersion": |- v0.252.0 "java_dependencies": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.271.0 github.com/databricks/databricks-sdk-go/service/compute.GcpAttributes: "availability": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.229.0 "boot_disk_size": - "since_version": |- + "sinceVersion": |- v0.229.0 "first_on_demand": - "since_version": |- + "sinceVersion": |- v0.265.0 "google_service_account": - "since_version": |- + "sinceVersion": |- v0.229.0 "local_ssd_count": - "since_version": |- + "sinceVersion": |- v0.229.0 "use_preemptible_executors": - "since_version": |- + "sinceVersion": |- v0.229.0 "zone_id": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.GcsStorageInfo: "destination": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.InitScriptInfo: "abfss": "description": |- Contains the Azure Data Lake Storage destination path - "since_version": |- + "sinceVersion": |- v0.229.0 "dbfs": - "since_version": |- + "sinceVersion": |- v0.229.0 "file": - "since_version": |- + "sinceVersion": |- v0.229.0 "gcs": - "since_version": |- + "sinceVersion": |- v0.229.0 "s3": - "since_version": |- + "sinceVersion": |- v0.229.0 "volumes": - "since_version": |- + "sinceVersion": |- v0.229.0 "workspace": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.Kind: "_": @@ -1284,125 +1284,125 @@ github.com/databricks/databricks-sdk-go/service/compute.Kind: CLASSIC_PREVIEW github.com/databricks/databricks-sdk-go/service/compute.Library: "cran": - "since_version": |- + "sinceVersion": |- v0.229.0 "egg": - "since_version": |- + "sinceVersion": |- v0.229.0 "jar": - "since_version": |- + "sinceVersion": |- v0.229.0 "maven": - "since_version": |- + "sinceVersion": |- v0.229.0 "pypi": - "since_version": |- + "sinceVersion": |- v0.229.0 "requirements": - "since_version": |- + "sinceVersion": |- v0.229.0 "whl": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.LocalFileInfo: "destination": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.LogAnalyticsInfo: "log_analytics_primary_key": "description": |- The primary key for the Azure Log Analytics agent configuration - "since_version": |- + "sinceVersion": |- v0.229.0 "log_analytics_workspace_id": "description": |- The workspace ID for the Azure Log Analytics agent configuration - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.MavenLibrary: "coordinates": - "since_version": |- + "sinceVersion": |- v0.229.0 "exclusions": - "since_version": |- + "sinceVersion": |- v0.229.0 "repo": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.PythonPyPiLibrary: "package": - "since_version": |- + "sinceVersion": |- v0.229.0 "repo": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.RCranLibrary: "package": - "since_version": |- + "sinceVersion": |- v0.229.0 "repo": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.S3StorageInfo: "canned_acl": - "since_version": |- + "sinceVersion": |- v0.229.0 "destination": - "since_version": |- + "sinceVersion": |- v0.229.0 "enable_encryption": - "since_version": |- + "sinceVersion": |- v0.229.0 "encryption_type": - "since_version": |- + "sinceVersion": |- v0.229.0 "endpoint": - "since_version": |- + "sinceVersion": |- v0.229.0 "kms_key": - "since_version": |- + "sinceVersion": |- v0.229.0 "region": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.VolumesStorageInfo: "destination": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.WorkloadType: "clients": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/compute.WorkspaceStorageInfo: "destination": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/database.CustomTag: "key": - "since_version": |- + "sinceVersion": |- v0.273.0 "value": - "since_version": |- + "sinceVersion": |- v0.273.0 github.com/databricks/databricks-sdk-go/service/database.DatabaseInstanceRef: "branch_time": - "since_version": |- + "sinceVersion": |- v0.265.0 "lsn": - "since_version": |- + "sinceVersion": |- v0.265.0 "name": - "since_version": |- + "sinceVersion": |- v0.265.0 github.com/databricks/databricks-sdk-go/service/database.NewPipelineSpec: "budget_policy_id": - "since_version": |- + "sinceVersion": |- v0.279.0 "storage_catalog": - "since_version": |- + "sinceVersion": |- v0.266.0 "storage_schema": - "since_version": |- + "sinceVersion": |- v0.266.0 github.com/databricks/databricks-sdk-go/service/database.SyncedTablePosition: "delta_table_sync_info": @@ -1410,1666 +1410,1666 @@ github.com/databricks/databricks-sdk-go/service/database.SyncedTablePosition: PLACEHOLDER github.com/databricks/databricks-sdk-go/service/database.SyncedTableSpec: "create_database_objects_if_missing": - "since_version": |- + "sinceVersion": |- v0.266.0 "existing_pipeline_id": - "since_version": |- + "sinceVersion": |- v0.266.0 "new_pipeline_spec": - "since_version": |- + "sinceVersion": |- v0.266.0 "primary_key_columns": - "since_version": |- + "sinceVersion": |- v0.266.0 "scheduling_policy": - "since_version": |- + "sinceVersion": |- v0.266.0 "source_table_full_name": - "since_version": |- + "sinceVersion": |- v0.266.0 "timeseries_key": - "since_version": |- + "sinceVersion": |- v0.266.0 github.com/databricks/databricks-sdk-go/service/database.SyncedTableStatus: "continuous_update_status": - "since_version": |- + "sinceVersion": |- v0.266.0 "failed_status": - "since_version": |- + "sinceVersion": |- v0.266.0 "provisioning_status": - "since_version": |- + "sinceVersion": |- v0.266.0 "triggered_update_status": - "since_version": |- + "sinceVersion": |- v0.266.0 github.com/databricks/databricks-sdk-go/service/jobs.CleanRoomsNotebookTask: "clean_room_name": - "since_version": |- + "sinceVersion": |- v0.237.0 "etag": - "since_version": |- + "sinceVersion": |- v0.237.0 "notebook_base_parameters": - "since_version": |- + "sinceVersion": |- v0.237.0 "notebook_name": - "since_version": |- + "sinceVersion": |- v0.237.0 github.com/databricks/databricks-sdk-go/service/jobs.ComputeConfig: "gpu_node_pool_id": - "since_version": |- + "sinceVersion": |- v0.243.0 "gpu_type": - "since_version": |- + "sinceVersion": |- v0.243.0 "num_gpus": - "since_version": |- + "sinceVersion": |- v0.243.0 github.com/databricks/databricks-sdk-go/service/jobs.ConditionTask: "left": - "since_version": |- + "sinceVersion": |- v0.229.0 "op": - "since_version": |- + "sinceVersion": |- v0.229.0 "right": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.Continuous: "pause_status": - "since_version": |- + "sinceVersion": |- v0.229.0 "task_retry_mode": - "since_version": |- + "sinceVersion": |- v0.267.0 github.com/databricks/databricks-sdk-go/service/jobs.CronSchedule: "pause_status": - "since_version": |- + "sinceVersion": |- v0.229.0 "quartz_cron_expression": - "since_version": |- + "sinceVersion": |- v0.229.0 "timezone_id": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.DashboardTask: "dashboard_id": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.248.0 "subscription": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.248.0 "warehouse_id": - "since_version": |- + "sinceVersion": |- v0.248.0 github.com/databricks/databricks-sdk-go/service/jobs.DbtCloudTask: "connection_resource_name": - "since_version": |- + "sinceVersion": |- v0.256.0 "dbt_cloud_job_id": - "since_version": |- + "sinceVersion": |- v0.256.0 github.com/databricks/databricks-sdk-go/service/jobs.DbtPlatformTask: "connection_resource_name": - "since_version": |- + "sinceVersion": |- v0.257.0 "dbt_platform_job_id": - "since_version": |- + "sinceVersion": |- v0.257.0 github.com/databricks/databricks-sdk-go/service/jobs.DbtTask: "catalog": - "since_version": |- + "sinceVersion": |- v0.229.0 "commands": - "since_version": |- + "sinceVersion": |- v0.229.0 "profiles_directory": - "since_version": |- + "sinceVersion": |- v0.229.0 "project_directory": - "since_version": |- + "sinceVersion": |- v0.229.0 "schema": - "since_version": |- + "sinceVersion": |- v0.229.0 "source": - "since_version": |- + "sinceVersion": |- v0.229.0 "warehouse_id": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.FileArrivalTriggerConfiguration: "min_time_between_triggers_seconds": - "since_version": |- + "sinceVersion": |- v0.229.0 "url": - "since_version": |- + "sinceVersion": |- v0.229.0 "wait_after_last_change_seconds": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.ForEachTask: "concurrency": - "since_version": |- + "sinceVersion": |- v0.229.0 "inputs": - "since_version": |- + "sinceVersion": |- v0.229.0 "task": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.GenAiComputeTask: "command": - "since_version": |- + "sinceVersion": |- v0.243.0 "compute": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.243.0 "dl_runtime_image": - "since_version": |- + "sinceVersion": |- v0.243.0 "mlflow_experiment_name": - "since_version": |- + "sinceVersion": |- v0.243.0 "source": - "since_version": |- + "sinceVersion": |- v0.243.0 "training_script_path": - "since_version": |- + "sinceVersion": |- v0.243.0 "yaml_parameters": - "since_version": |- + "sinceVersion": |- v0.243.0 "yaml_parameters_file_path": - "since_version": |- + "sinceVersion": |- v0.243.0 github.com/databricks/databricks-sdk-go/service/jobs.GitSnapshot: "used_commit": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.GitSource: "git_branch": - "since_version": |- + "sinceVersion": |- v0.229.0 "git_commit": - "since_version": |- + "sinceVersion": |- v0.229.0 "git_provider": - "since_version": |- + "sinceVersion": |- v0.229.0 "git_snapshot": "description": |- PLACEHOLDER "git_tag": - "since_version": |- + "sinceVersion": |- v0.229.0 "git_url": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.JobCluster: "job_cluster_key": - "since_version": |- + "sinceVersion": |- v0.229.0 "new_cluster": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.JobDeployment: "kind": - "since_version": |- + "sinceVersion": |- v0.229.0 "metadata_file_path": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.JobEmailNotifications: "no_alert_for_skipped_runs": - "since_version": |- + "sinceVersion": |- v0.229.0 "on_duration_warning_threshold_exceeded": - "since_version": |- + "sinceVersion": |- v0.229.0 "on_failure": - "since_version": |- + "sinceVersion": |- v0.229.0 "on_start": - "since_version": |- + "sinceVersion": |- v0.229.0 "on_streaming_backlog_exceeded": - "since_version": |- + "sinceVersion": |- v0.229.0 "on_success": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.JobEnvironment: "environment_key": - "since_version": |- + "sinceVersion": |- v0.229.0 "spec": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.JobNotificationSettings: "no_alert_for_canceled_runs": - "since_version": |- + "sinceVersion": |- v0.229.0 "no_alert_for_skipped_runs": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.JobParameterDefinition: "default": - "since_version": |- + "sinceVersion": |- v0.229.0 "name": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.JobSource: "dirty_state": - "since_version": |- + "sinceVersion": |- v0.229.0 "import_from_git_branch": - "since_version": |- + "sinceVersion": |- v0.229.0 "job_config_path": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRule: "metric": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.229.0 "op": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.229.0 "value": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRules: "rules": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.ModelTriggerConfiguration: "aliases": - "since_version": |- + "sinceVersion": |- v0.279.0 "condition": - "since_version": |- + "sinceVersion": |- v0.279.0 "min_time_between_triggers_seconds": - "since_version": |- + "sinceVersion": |- v0.279.0 "securable_name": - "since_version": |- + "sinceVersion": |- v0.279.0 "wait_after_last_change_seconds": - "since_version": |- + "sinceVersion": |- v0.279.0 github.com/databricks/databricks-sdk-go/service/jobs.NotebookTask: "base_parameters": - "since_version": |- + "sinceVersion": |- v0.229.0 "notebook_path": - "since_version": |- + "sinceVersion": |- v0.229.0 "source": - "since_version": |- + "sinceVersion": |- v0.229.0 "warehouse_id": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.PeriodicTriggerConfiguration: "interval": - "since_version": |- + "sinceVersion": |- v0.229.0 "unit": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.PipelineParams: "full_refresh": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.PipelineTask: "full_refresh": - "since_version": |- + "sinceVersion": |- v0.229.0 "pipeline_id": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.PowerBiModel: "authentication_method": - "since_version": |- + "sinceVersion": |- v0.248.0 "model_name": - "since_version": |- + "sinceVersion": |- v0.248.0 "overwrite_existing": - "since_version": |- + "sinceVersion": |- v0.248.0 "storage_mode": - "since_version": |- + "sinceVersion": |- v0.248.0 "workspace_name": - "since_version": |- + "sinceVersion": |- v0.248.0 github.com/databricks/databricks-sdk-go/service/jobs.PowerBiTable: "catalog": - "since_version": |- + "sinceVersion": |- v0.248.0 "name": - "since_version": |- + "sinceVersion": |- v0.248.0 "schema": - "since_version": |- + "sinceVersion": |- v0.248.0 "storage_mode": - "since_version": |- + "sinceVersion": |- v0.248.0 github.com/databricks/databricks-sdk-go/service/jobs.PowerBiTask: "connection_resource_name": - "since_version": |- + "sinceVersion": |- v0.248.0 "power_bi_model": - "since_version": |- + "sinceVersion": |- v0.248.0 "refresh_after_update": - "since_version": |- + "sinceVersion": |- v0.248.0 "tables": - "since_version": |- + "sinceVersion": |- v0.248.0 "warehouse_id": - "since_version": |- + "sinceVersion": |- v0.248.0 github.com/databricks/databricks-sdk-go/service/jobs.PythonWheelTask: "entry_point": - "since_version": |- + "sinceVersion": |- v0.229.0 "named_parameters": - "since_version": |- + "sinceVersion": |- v0.229.0 "package_name": - "since_version": |- + "sinceVersion": |- v0.229.0 "parameters": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.QueueSettings: "enabled": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.RunJobTask: "dbt_commands": - "since_version": |- + "sinceVersion": |- v0.229.0 "jar_params": - "since_version": |- + "sinceVersion": |- v0.229.0 "job_id": - "since_version": |- + "sinceVersion": |- v0.229.0 "job_parameters": - "since_version": |- + "sinceVersion": |- v0.229.0 "notebook_params": - "since_version": |- + "sinceVersion": |- v0.229.0 "pipeline_params": - "since_version": |- + "sinceVersion": |- v0.229.0 "python_named_params": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.229.0 "python_params": - "since_version": |- + "sinceVersion": |- v0.229.0 "spark_submit_params": - "since_version": |- + "sinceVersion": |- v0.229.0 "sql_params": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.SparkJarTask: "jar_uri": - "since_version": |- + "sinceVersion": |- v0.229.0 "main_class_name": - "since_version": |- + "sinceVersion": |- v0.229.0 "parameters": - "since_version": |- + "sinceVersion": |- v0.229.0 "run_as_repl": - "since_version": |- + "sinceVersion": |- v0.240.0 github.com/databricks/databricks-sdk-go/service/jobs.SparkPythonTask: "parameters": - "since_version": |- + "sinceVersion": |- v0.229.0 "python_file": - "since_version": |- + "sinceVersion": |- v0.229.0 "source": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.SparkSubmitTask: "parameters": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.SqlTask: "alert": - "since_version": |- + "sinceVersion": |- v0.229.0 "dashboard": - "since_version": |- + "sinceVersion": |- v0.229.0 "file": - "since_version": |- + "sinceVersion": |- v0.229.0 "parameters": - "since_version": |- + "sinceVersion": |- v0.229.0 "query": - "since_version": |- + "sinceVersion": |- v0.229.0 "warehouse_id": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskAlert: "alert_id": - "since_version": |- + "sinceVersion": |- v0.229.0 "pause_subscriptions": - "since_version": |- + "sinceVersion": |- v0.229.0 "subscriptions": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskDashboard: "custom_subject": - "since_version": |- + "sinceVersion": |- v0.229.0 "dashboard_id": - "since_version": |- + "sinceVersion": |- v0.229.0 "pause_subscriptions": - "since_version": |- + "sinceVersion": |- v0.229.0 "subscriptions": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskFile: "path": - "since_version": |- + "sinceVersion": |- v0.229.0 "source": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskQuery: "query_id": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskSubscription: "destination_id": - "since_version": |- + "sinceVersion": |- v0.229.0 "user_name": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.Subscription: "custom_subject": - "since_version": |- + "sinceVersion": |- v0.248.0 "paused": - "since_version": |- + "sinceVersion": |- v0.248.0 "subscribers": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.248.0 github.com/databricks/databricks-sdk-go/service/jobs.SubscriptionSubscriber: "destination_id": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.248.0 "user_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.248.0 github.com/databricks/databricks-sdk-go/service/jobs.TableUpdateTriggerConfiguration: "condition": - "since_version": |- + "sinceVersion": |- v0.229.0 "min_time_between_triggers_seconds": - "since_version": |- + "sinceVersion": |- v0.229.0 "table_names": - "since_version": |- + "sinceVersion": |- v0.229.0 "wait_after_last_change_seconds": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.Task: "clean_rooms_notebook_task": - "since_version": |- + "sinceVersion": |- v0.237.0 "condition_task": - "since_version": |- + "sinceVersion": |- v0.229.0 "dashboard_task": - "since_version": |- + "sinceVersion": |- v0.248.0 "dbt_cloud_task": - "since_version": |- + "sinceVersion": |- v0.256.0 "dbt_platform_task": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.257.0 "dbt_task": - "since_version": |- + "sinceVersion": |- v0.229.0 "depends_on": - "since_version": |- + "sinceVersion": |- v0.229.0 "description": - "since_version": |- + "sinceVersion": |- v0.229.0 "disable_auto_optimization": - "since_version": |- + "sinceVersion": |- v0.229.0 "disabled": - "since_version": |- + "sinceVersion": |- v0.271.0 "email_notifications": - "since_version": |- + "sinceVersion": |- v0.229.0 "environment_key": - "since_version": |- + "sinceVersion": |- v0.229.0 "existing_cluster_id": - "since_version": |- + "sinceVersion": |- v0.229.0 "for_each_task": - "since_version": |- + "sinceVersion": |- v0.229.0 "gen_ai_compute_task": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.243.0 "health": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.229.0 "job_cluster_key": - "since_version": |- + "sinceVersion": |- v0.229.0 "libraries": - "since_version": |- + "sinceVersion": |- v0.229.0 "max_retries": - "since_version": |- + "sinceVersion": |- v0.229.0 "min_retry_interval_millis": - "since_version": |- + "sinceVersion": |- v0.229.0 "new_cluster": - "since_version": |- + "sinceVersion": |- v0.229.0 "notebook_task": - "since_version": |- + "sinceVersion": |- v0.229.0 "notification_settings": - "since_version": |- + "sinceVersion": |- v0.229.0 "pipeline_task": - "since_version": |- + "sinceVersion": |- v0.229.0 "power_bi_task": - "since_version": |- + "sinceVersion": |- v0.248.0 "python_wheel_task": - "since_version": |- + "sinceVersion": |- v0.229.0 "retry_on_timeout": - "since_version": |- + "sinceVersion": |- v0.229.0 "run_if": - "since_version": |- + "sinceVersion": |- v0.229.0 "run_job_task": - "since_version": |- + "sinceVersion": |- v0.229.0 "spark_jar_task": - "since_version": |- + "sinceVersion": |- v0.229.0 "spark_python_task": - "since_version": |- + "sinceVersion": |- v0.229.0 "spark_submit_task": - "since_version": |- + "sinceVersion": |- v0.229.0 "sql_task": - "since_version": |- + "sinceVersion": |- v0.229.0 "task_key": - "since_version": |- + "sinceVersion": |- v0.229.0 "timeout_seconds": - "since_version": |- + "sinceVersion": |- v0.229.0 "webhook_notifications": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.TaskDependency: "outcome": - "since_version": |- + "sinceVersion": |- v0.229.0 "task_key": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.TaskEmailNotifications: "no_alert_for_skipped_runs": - "since_version": |- + "sinceVersion": |- v0.229.0 "on_duration_warning_threshold_exceeded": - "since_version": |- + "sinceVersion": |- v0.229.0 "on_failure": - "since_version": |- + "sinceVersion": |- v0.229.0 "on_start": - "since_version": |- + "sinceVersion": |- v0.229.0 "on_streaming_backlog_exceeded": - "since_version": |- + "sinceVersion": |- v0.229.0 "on_success": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.TaskNotificationSettings: "alert_on_last_attempt": - "since_version": |- + "sinceVersion": |- v0.229.0 "no_alert_for_canceled_runs": - "since_version": |- + "sinceVersion": |- v0.229.0 "no_alert_for_skipped_runs": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.TriggerSettings: "file_arrival": - "since_version": |- + "sinceVersion": |- v0.229.0 "model": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.279.0 "pause_status": - "since_version": |- + "sinceVersion": |- v0.229.0 "periodic": - "since_version": |- + "sinceVersion": |- v0.229.0 "table_update": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.Webhook: "id": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/jobs.WebhookNotifications: "on_duration_warning_threshold_exceeded": - "since_version": |- + "sinceVersion": |- v0.229.0 "on_failure": - "since_version": |- + "sinceVersion": |- v0.229.0 "on_start": - "since_version": |- + "sinceVersion": |- v0.229.0 "on_streaming_backlog_exceeded": - "since_version": |- + "sinceVersion": |- v0.229.0 "on_success": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/ml.ExperimentTag: "key": - "since_version": |- + "sinceVersion": |- v0.229.0 "value": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/ml.ModelTag: "key": - "since_version": |- + "sinceVersion": |- v0.229.0 "value": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/pipelines.ConnectionParameters: "source_catalog": - "since_version": |- + "sinceVersion": |- v0.279.0 github.com/databricks/databricks-sdk-go/service/pipelines.CronTrigger: "quartz_cron_schedule": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.229.0 "timezone_id": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/pipelines.EventLogSpec: "catalog": - "since_version": |- + "sinceVersion": |- v0.246.0 "name": - "since_version": |- + "sinceVersion": |- v0.246.0 "schema": - "since_version": |- + "sinceVersion": |- v0.246.0 github.com/databricks/databricks-sdk-go/service/pipelines.FileLibrary: "path": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/pipelines.Filters: "exclude": - "since_version": |- + "sinceVersion": |- v0.229.0 "include": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/pipelines.IngestionConfig: "report": - "since_version": |- + "sinceVersion": |- v0.231.0 "schema": - "since_version": |- + "sinceVersion": |- v0.229.0 "table": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/pipelines.IngestionGatewayPipelineDefinition: "connection_id": - "since_version": |- + "sinceVersion": |- v0.229.0 "connection_name": - "since_version": |- + "sinceVersion": |- v0.234.0 "connection_parameters": - "since_version": |- + "sinceVersion": |- v0.279.0 "gateway_storage_catalog": - "since_version": |- + "sinceVersion": |- v0.229.0 "gateway_storage_name": - "since_version": |- + "sinceVersion": |- v0.229.0 "gateway_storage_schema": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinition: "connection_name": - "since_version": |- + "sinceVersion": |- v0.229.0 "ingest_from_uc_foreign_catalog": - "since_version": |- + "sinceVersion": |- v0.279.0 "ingestion_gateway_id": - "since_version": |- + "sinceVersion": |- v0.229.0 "netsuite_jar_path": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.271.0 "objects": - "since_version": |- + "sinceVersion": |- v0.229.0 "source_configurations": - "since_version": |- + "sinceVersion": |- v0.267.0 "table_configuration": - "since_version": |- + "sinceVersion": |- v0.229.0 ? github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig : "cursor_columns": - "since_version": |- + "sinceVersion": |- v0.264.0 "deletion_condition": - "since_version": |- + "sinceVersion": |- v0.264.0 "hard_deletion_sync_min_interval_in_seconds": - "since_version": |- + "sinceVersion": |- v0.264.0 github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinitionWorkdayReportParameters: "incremental": - "since_version": |- + "sinceVersion": |- v0.271.0 "parameters": - "since_version": |- + "sinceVersion": |- v0.271.0 "report_parameters": - "since_version": |- + "sinceVersion": |- v0.271.0 github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue: "key": - "since_version": |- + "sinceVersion": |- v0.271.0 "value": - "since_version": |- + "sinceVersion": |- v0.271.0 github.com/databricks/databricks-sdk-go/service/pipelines.NotebookLibrary: "path": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/pipelines.Notifications: "alerts": - "since_version": |- + "sinceVersion": |- v0.229.0 "email_recipients": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/pipelines.PathPattern: "include": - "since_version": |- + "sinceVersion": |- v0.252.0 github.com/databricks/databricks-sdk-go/service/pipelines.PipelineCluster: "apply_policy_default_values": - "since_version": |- + "sinceVersion": |- v0.229.0 "autoscale": - "since_version": |- + "sinceVersion": |- v0.229.0 "aws_attributes": - "since_version": |- + "sinceVersion": |- v0.229.0 "azure_attributes": - "since_version": |- + "sinceVersion": |- v0.229.0 "cluster_log_conf": - "since_version": |- + "sinceVersion": |- v0.229.0 "custom_tags": - "since_version": |- + "sinceVersion": |- v0.229.0 "driver_instance_pool_id": - "since_version": |- + "sinceVersion": |- v0.229.0 "driver_node_type_id": - "since_version": |- + "sinceVersion": |- v0.229.0 "enable_local_disk_encryption": - "since_version": |- + "sinceVersion": |- v0.229.0 "gcp_attributes": - "since_version": |- + "sinceVersion": |- v0.229.0 "init_scripts": - "since_version": |- + "sinceVersion": |- v0.229.0 "instance_pool_id": - "since_version": |- + "sinceVersion": |- v0.229.0 "label": - "since_version": |- + "sinceVersion": |- v0.229.0 "node_type_id": - "since_version": |- + "sinceVersion": |- v0.229.0 "num_workers": - "since_version": |- + "sinceVersion": |- v0.229.0 "policy_id": - "since_version": |- + "sinceVersion": |- v0.229.0 "spark_conf": - "since_version": |- + "sinceVersion": |- v0.229.0 "spark_env_vars": - "since_version": |- + "sinceVersion": |- v0.229.0 "ssh_public_keys": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/pipelines.PipelineClusterAutoscale: "max_workers": - "since_version": |- + "sinceVersion": |- v0.229.0 "min_workers": - "since_version": |- + "sinceVersion": |- v0.229.0 "mode": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/pipelines.PipelineDeployment: "kind": - "since_version": |- + "sinceVersion": |- v0.229.0 "metadata_file_path": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/pipelines.PipelineLibrary: "file": - "since_version": |- + "sinceVersion": |- v0.229.0 "glob": - "since_version": |- + "sinceVersion": |- v0.252.0 "jar": - "since_version": |- + "sinceVersion": |- v0.229.0 "maven": - "since_version": |- + "sinceVersion": |- v0.229.0 "notebook": - "since_version": |- + "sinceVersion": |- v0.229.0 "whl": - "since_version": |- - v0.229.0 "deprecation_message": |- This field is deprecated + "sinceVersion": |- + v0.229.0 github.com/databricks/databricks-sdk-go/service/pipelines.PipelineTrigger: "cron": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.229.0 "manual": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/pipelines.PipelinesEnvironment: "dependencies": - "since_version": |- + "sinceVersion": |- v0.257.0 github.com/databricks/databricks-sdk-go/service/pipelines.PostgresCatalogConfig: "slot_config": - "since_version": |- + "sinceVersion": |- v0.267.0 github.com/databricks/databricks-sdk-go/service/pipelines.PostgresSlotConfig: "publication_name": - "since_version": |- + "sinceVersion": |- v0.267.0 "slot_name": - "since_version": |- + "sinceVersion": |- v0.267.0 github.com/databricks/databricks-sdk-go/service/pipelines.ReportSpec: "destination_catalog": - "since_version": |- + "sinceVersion": |- v0.231.0 "destination_schema": - "since_version": |- + "sinceVersion": |- v0.231.0 "destination_table": - "since_version": |- + "sinceVersion": |- v0.231.0 "source_url": - "since_version": |- + "sinceVersion": |- v0.231.0 "table_configuration": - "since_version": |- + "sinceVersion": |- v0.231.0 github.com/databricks/databricks-sdk-go/service/pipelines.RestartWindow: "days_of_week": - "since_version": |- + "sinceVersion": |- v0.234.0 "start_hour": - "since_version": |- + "sinceVersion": |- v0.234.0 "time_zone_id": - "since_version": |- + "sinceVersion": |- v0.234.0 github.com/databricks/databricks-sdk-go/service/pipelines.RunAs: "service_principal_name": - "since_version": |- + "sinceVersion": |- v0.241.0 "user_name": - "since_version": |- + "sinceVersion": |- v0.241.0 github.com/databricks/databricks-sdk-go/service/pipelines.SchemaSpec: "destination_catalog": - "since_version": |- + "sinceVersion": |- v0.229.0 "destination_schema": - "since_version": |- + "sinceVersion": |- v0.229.0 "source_catalog": - "since_version": |- + "sinceVersion": |- v0.229.0 "source_schema": - "since_version": |- + "sinceVersion": |- v0.229.0 "table_configuration": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/pipelines.SourceCatalogConfig: "postgres": - "since_version": |- + "sinceVersion": |- v0.267.0 "source_catalog": - "since_version": |- + "sinceVersion": |- v0.267.0 github.com/databricks/databricks-sdk-go/service/pipelines.SourceConfig: "catalog": - "since_version": |- + "sinceVersion": |- v0.267.0 github.com/databricks/databricks-sdk-go/service/pipelines.TableSpec: "destination_catalog": - "since_version": |- + "sinceVersion": |- v0.229.0 "destination_schema": - "since_version": |- + "sinceVersion": |- v0.229.0 "destination_table": - "since_version": |- + "sinceVersion": |- v0.229.0 "source_catalog": - "since_version": |- + "sinceVersion": |- v0.229.0 "source_schema": - "since_version": |- + "sinceVersion": |- v0.229.0 "source_table": - "since_version": |- + "sinceVersion": |- v0.229.0 "table_configuration": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig: "exclude_columns": - "since_version": |- + "sinceVersion": |- v0.251.0 "include_columns": - "since_version": |- + "sinceVersion": |- v0.251.0 "primary_keys": - "since_version": |- + "sinceVersion": |- v0.229.0 "query_based_connector_config": - "since_version": |- + "sinceVersion": |- v0.264.0 "salesforce_include_formula_fields": - "since_version": |- + "sinceVersion": |- v0.229.0 "scd_type": - "since_version": |- + "sinceVersion": |- v0.229.0 "sequence_by": - "since_version": |- + "sinceVersion": |- v0.231.0 "workday_report_parameters": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.271.0 github.com/databricks/databricks-sdk-go/service/serving.Ai21LabsConfig: "ai21labs_api_key": - "since_version": |- + "sinceVersion": |- v0.229.0 "ai21labs_api_key_plaintext": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayConfig: "fallback_config": - "since_version": |- + "sinceVersion": |- v0.246.0 "guardrails": - "since_version": |- + "sinceVersion": |- v0.230.0 "inference_table_config": - "since_version": |- + "sinceVersion": |- v0.230.0 "rate_limits": - "since_version": |- + "sinceVersion": |- v0.230.0 "usage_tracking_config": - "since_version": |- + "sinceVersion": |- v0.230.0 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailParameters: "invalid_keywords": - "since_version": |- + "sinceVersion": |- v0.230.0 "pii": - "since_version": |- + "sinceVersion": |- v0.230.0 "safety": - "since_version": |- + "sinceVersion": |- v0.230.0 "valid_topics": - "since_version": |- + "sinceVersion": |- v0.230.0 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailPiiBehavior: "behavior": - "since_version": |- + "sinceVersion": |- v0.230.0 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrails: "input": - "since_version": |- + "sinceVersion": |- v0.230.0 "output": - "since_version": |- + "sinceVersion": |- v0.230.0 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayInferenceTableConfig: "catalog_name": - "since_version": |- + "sinceVersion": |- v0.230.0 "enabled": - "since_version": |- + "sinceVersion": |- v0.230.0 "schema_name": - "since_version": |- + "sinceVersion": |- v0.230.0 "table_name_prefix": - "since_version": |- + "sinceVersion": |- v0.230.0 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimit: "calls": - "since_version": |- + "sinceVersion": |- v0.230.0 "key": - "since_version": |- + "sinceVersion": |- v0.230.0 "principal": - "since_version": |- + "sinceVersion": |- v0.260.0 "renewal_period": - "since_version": |- + "sinceVersion": |- v0.230.0 "tokens": - "since_version": |- + "sinceVersion": |- v0.265.0 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayUsageTrackingConfig: "enabled": - "since_version": |- + "sinceVersion": |- v0.230.0 github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfig: "aws_access_key_id": - "since_version": |- + "sinceVersion": |- v0.229.0 "aws_access_key_id_plaintext": - "since_version": |- + "sinceVersion": |- v0.229.0 "aws_region": - "since_version": |- + "sinceVersion": |- v0.229.0 "aws_secret_access_key": - "since_version": |- + "sinceVersion": |- v0.229.0 "aws_secret_access_key_plaintext": - "since_version": |- + "sinceVersion": |- v0.229.0 "bedrock_provider": - "since_version": |- + "sinceVersion": |- v0.229.0 "instance_profile_arn": - "since_version": |- + "sinceVersion": |- v0.243.0 github.com/databricks/databricks-sdk-go/service/serving.AnthropicConfig: "anthropic_api_key": - "since_version": |- + "sinceVersion": |- v0.229.0 "anthropic_api_key_plaintext": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/serving.ApiKeyAuth: "key": - "since_version": |- + "sinceVersion": |- v0.246.0 "value": - "since_version": |- + "sinceVersion": |- v0.246.0 "value_plaintext": - "since_version": |- + "sinceVersion": |- v0.246.0 github.com/databricks/databricks-sdk-go/service/serving.AutoCaptureConfigInput: "catalog_name": - "since_version": |- + "sinceVersion": |- v0.229.0 "enabled": - "since_version": |- + "sinceVersion": |- v0.229.0 "schema_name": - "since_version": |- + "sinceVersion": |- v0.229.0 "table_name_prefix": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/serving.BearerTokenAuth: "token": - "since_version": |- + "sinceVersion": |- v0.246.0 "token_plaintext": - "since_version": |- + "sinceVersion": |- v0.246.0 github.com/databricks/databricks-sdk-go/service/serving.CohereConfig: "cohere_api_base": - "since_version": |- + "sinceVersion": |- v0.229.0 "cohere_api_key": - "since_version": |- + "sinceVersion": |- v0.229.0 "cohere_api_key_plaintext": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/serving.CustomProviderConfig: "api_key_auth": - "since_version": |- + "sinceVersion": |- v0.246.0 "bearer_token_auth": - "since_version": |- + "sinceVersion": |- v0.246.0 "custom_provider_url": - "since_version": |- + "sinceVersion": |- v0.246.0 github.com/databricks/databricks-sdk-go/service/serving.DatabricksModelServingConfig: "databricks_api_token": - "since_version": |- + "sinceVersion": |- v0.229.0 "databricks_api_token_plaintext": - "since_version": |- + "sinceVersion": |- v0.229.0 "databricks_workspace_url": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/serving.EmailNotifications: "on_update_failure": - "since_version": |- + "sinceVersion": |- v0.264.0 "on_update_success": - "since_version": |- + "sinceVersion": |- v0.264.0 github.com/databricks/databricks-sdk-go/service/serving.EndpointCoreConfigInput: "auto_capture_config": - "since_version": |- + "sinceVersion": |- v0.229.0 "served_entities": - "since_version": |- + "sinceVersion": |- v0.229.0 "served_models": - "since_version": |- + "sinceVersion": |- v0.229.0 "traffic_config": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/serving.EndpointTag: "key": - "since_version": |- + "sinceVersion": |- v0.229.0 "value": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/serving.ExternalModel: "ai21labs_config": - "since_version": |- + "sinceVersion": |- v0.229.0 "amazon_bedrock_config": - "since_version": |- + "sinceVersion": |- v0.229.0 "anthropic_config": - "since_version": |- + "sinceVersion": |- v0.229.0 "cohere_config": - "since_version": |- + "sinceVersion": |- v0.229.0 "custom_provider_config": - "since_version": |- + "sinceVersion": |- v0.246.0 "databricks_model_serving_config": - "since_version": |- + "sinceVersion": |- v0.229.0 "google_cloud_vertex_ai_config": - "since_version": |- + "sinceVersion": |- v0.229.0 "name": - "since_version": |- + "sinceVersion": |- v0.229.0 "openai_config": - "since_version": |- + "sinceVersion": |- v0.229.0 "palm_config": - "since_version": |- + "sinceVersion": |- v0.229.0 "provider": - "since_version": |- + "sinceVersion": |- v0.229.0 "task": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/serving.FallbackConfig: "enabled": - "since_version": |- + "sinceVersion": |- v0.246.0 github.com/databricks/databricks-sdk-go/service/serving.GoogleCloudVertexAiConfig: "private_key": - "since_version": |- + "sinceVersion": |- v0.229.0 "private_key_plaintext": - "since_version": |- + "sinceVersion": |- v0.229.0 "project_id": - "since_version": |- + "sinceVersion": |- v0.229.0 "region": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/serving.OpenAiConfig: "microsoft_entra_client_id": - "since_version": |- + "sinceVersion": |- v0.229.0 "microsoft_entra_client_secret": - "since_version": |- + "sinceVersion": |- v0.229.0 "microsoft_entra_client_secret_plaintext": - "since_version": |- + "sinceVersion": |- v0.229.0 "microsoft_entra_tenant_id": - "since_version": |- + "sinceVersion": |- v0.229.0 "openai_api_base": - "since_version": |- + "sinceVersion": |- v0.229.0 "openai_api_key": - "since_version": |- + "sinceVersion": |- v0.229.0 "openai_api_key_plaintext": - "since_version": |- + "sinceVersion": |- v0.229.0 "openai_api_type": - "since_version": |- + "sinceVersion": |- v0.229.0 "openai_api_version": - "since_version": |- + "sinceVersion": |- v0.229.0 "openai_deployment_name": - "since_version": |- + "sinceVersion": |- v0.229.0 "openai_organization": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/serving.PaLmConfig: "palm_api_key": - "since_version": |- + "sinceVersion": |- v0.229.0 "palm_api_key_plaintext": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/serving.RateLimit: "calls": - "since_version": |- + "sinceVersion": |- v0.229.0 "key": - "since_version": |- + "sinceVersion": |- v0.229.0 "renewal_period": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/serving.Route: "served_entity_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.260.0 "served_model_name": - "since_version": |- + "sinceVersion": |- v0.229.0 "traffic_percentage": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput: "entity_name": - "since_version": |- + "sinceVersion": |- v0.229.0 "entity_version": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.229.0 "environment_vars": - "since_version": |- + "sinceVersion": |- v0.229.0 "external_model": - "since_version": |- + "sinceVersion": |- v0.229.0 "instance_profile_arn": - "since_version": |- + "sinceVersion": |- v0.229.0 "max_provisioned_concurrency": - "since_version": |- + "sinceVersion": |- v0.256.0 "max_provisioned_throughput": - "since_version": |- + "sinceVersion": |- v0.229.0 "min_provisioned_concurrency": - "since_version": |- + "sinceVersion": |- v0.256.0 "min_provisioned_throughput": - "since_version": |- + "sinceVersion": |- v0.229.0 "name": - "since_version": |- + "sinceVersion": |- v0.229.0 "provisioned_model_units": - "since_version": |- + "sinceVersion": |- v0.252.0 "scale_to_zero_enabled": - "since_version": |- + "sinceVersion": |- v0.229.0 "workload_size": - "since_version": |- + "sinceVersion": |- v0.229.0 "workload_type": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/serving.ServedModelInput: "environment_vars": - "since_version": |- + "sinceVersion": |- v0.229.0 "instance_profile_arn": - "since_version": |- + "sinceVersion": |- v0.229.0 "max_provisioned_concurrency": - "since_version": |- + "sinceVersion": |- v0.256.0 "max_provisioned_throughput": - "since_version": |- + "sinceVersion": |- v0.229.0 "min_provisioned_concurrency": - "since_version": |- + "sinceVersion": |- v0.256.0 "min_provisioned_throughput": - "since_version": |- + "sinceVersion": |- v0.229.0 "model_name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.229.0 "model_version": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.229.0 "name": - "since_version": |- + "sinceVersion": |- v0.229.0 "provisioned_model_units": - "since_version": |- + "sinceVersion": |- v0.252.0 "scale_to_zero_enabled": - "since_version": |- + "sinceVersion": |- v0.229.0 "workload_size": - "since_version": |- + "sinceVersion": |- v0.229.0 "workload_type": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/serving.TrafficConfig: "routes": - "since_version": |- + "sinceVersion": |- v0.229.0 github.com/databricks/databricks-sdk-go/service/sql.AlertV2Evaluation: "comparison_operator": - "since_version": |- + "sinceVersion": |- v0.279.0 "empty_result_state": - "since_version": |- + "sinceVersion": |- v0.279.0 "notification": - "since_version": |- + "sinceVersion": |- v0.279.0 "source": - "since_version": |- + "sinceVersion": |- v0.279.0 "threshold": - "since_version": |- + "sinceVersion": |- v0.279.0 github.com/databricks/databricks-sdk-go/service/sql.AlertV2Notification: "notify_on_ok": - "since_version": |- + "sinceVersion": |- v0.279.0 "retrigger_seconds": - "since_version": |- + "sinceVersion": |- v0.279.0 "subscriptions": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.279.0 github.com/databricks/databricks-sdk-go/service/sql.AlertV2Operand: "column": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.279.0 "value": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.279.0 github.com/databricks/databricks-sdk-go/service/sql.AlertV2OperandColumn: "aggregation": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.279.0 "display": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.279.0 "name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.279.0 github.com/databricks/databricks-sdk-go/service/sql.AlertV2OperandValue: "bool_value": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.279.0 "double_value": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.279.0 "string_value": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.279.0 github.com/databricks/databricks-sdk-go/service/sql.AlertV2RunAs: "service_principal_name": - "since_version": |- + "sinceVersion": |- v0.279.0 "user_name": - "since_version": |- + "sinceVersion": |- v0.279.0 github.com/databricks/databricks-sdk-go/service/sql.AlertV2Subscription: "destination_id": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.279.0 "user_email": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.279.0 github.com/databricks/databricks-sdk-go/service/sql.Channel: "dbsql_version": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.260.0 "name": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.260.0 github.com/databricks/databricks-sdk-go/service/sql.CronSchedule: "pause_status": - "since_version": |- + "sinceVersion": |- v0.279.0 "quartz_cron_schedule": - "since_version": |- + "sinceVersion": |- v0.279.0 "timezone_id": - "since_version": |- + "sinceVersion": |- v0.279.0 github.com/databricks/databricks-sdk-go/service/sql.EndpointTagPair: "key": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.260.0 "value": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.260.0 github.com/databricks/databricks-sdk-go/service/sql.EndpointTags: "custom_tags": "description": |- PLACEHOLDER - "since_version": |- + "sinceVersion": |- v0.260.0 github.com/databricks/databricks-sdk-go/service/workspace.AzureKeyVaultSecretScopeMetadata: "dns_name": - "since_version": |- + "sinceVersion": |- v0.252.0 "resource_id": - "since_version": |- + "sinceVersion": |- v0.252.0 diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index 3ae08ea4f2..50eb9a2a1b 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -66,15 +66,15 @@ "properties": { "custom_description": { "$ref": "#/$defs/string", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "custom_summary": { "$ref": "#/$defs/string", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "display_name": { "$ref": "#/$defs/string", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "evaluation": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2Evaluation" @@ -87,23 +87,23 @@ }, "parent_path": { "$ref": "#/$defs/string", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.AlertPermission" }, "query_text": { "$ref": "#/$defs/string", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "run_as": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2RunAs", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "run_as_user_name": { "$ref": "#/$defs/string", "deprecationMessage": "This field is deprecated", - "since_version": "v0.279.0", + "sinceVersion": "v0.279.0", "deprecated": true }, "schedule": { @@ -111,7 +111,7 @@ }, "warehouse_id": { "$ref": "#/$defs/string", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" } }, "additionalProperties": false, @@ -136,19 +136,19 @@ "properties": { "group_name": { "$ref": "#/$defs/string", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "level": { "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.AlertPermissionLevel", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "service_principal_name": { "$ref": "#/$defs/string", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "user_name": { "$ref": "#/$defs/string", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" } }, "additionalProperties": false, @@ -193,7 +193,7 @@ "description": { "description": "The description of the app.", "$ref": "#/$defs/string", - "since_version": "v0.239.0" + "sinceVersion": "v0.239.0" }, "git_repository": { "description": "Git repository configuration for app deployments. When specified, deployments can\nreference code from this repository by providing only the git reference (branch, tag, or commit).", @@ -208,7 +208,7 @@ "name": { "description": "The name of the app. The name must contain only lowercase alphanumeric characters and hyphens.\nIt must be unique within the workspace.", "$ref": "#/$defs/string", - "since_version": "v0.239.0" + "sinceVersion": "v0.239.0" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.AppPermission" @@ -216,7 +216,7 @@ "resources": { "description": "Resources for the app.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/apps.AppResource", - "since_version": "v0.239.0" + "sinceVersion": "v0.239.0" }, "source_code_path": { "$ref": "#/$defs/string" @@ -247,19 +247,19 @@ "properties": { "group_name": { "$ref": "#/$defs/string", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" }, "level": { "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.AppPermissionLevel", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" }, "service_principal_name": { "$ref": "#/$defs/string", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" }, "user_name": { "$ref": "#/$defs/string", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" } }, "additionalProperties": false, @@ -297,42 +297,42 @@ "apply_policy_default_values": { "description": "When set to true, fixed and default values from the policy will be used for fields that are omitted. When set to false, only fixed values from the policy will be applied.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "autoscale": { "description": "Parameters needed in order to automatically scale clusters up and down based on load.\nNote: autoscaling works best with DB runtime versions 3.0 or later.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AutoScale", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "autotermination_minutes": { "description": "Automatically terminates the cluster after it is inactive for this time in minutes. If not set,\nthis cluster will not be automatically terminated. If specified, the threshold must be between\n10 and 10000 minutes.\nUsers can also set this value to 0 to explicitly disable automatic termination.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "aws_attributes": { "description": "Attributes related to clusters running on Amazon Web Services.\nIf not specified at cluster creation, a set of default values will be used.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AwsAttributes", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "azure_attributes": { "description": "Attributes related to clusters running on Microsoft Azure.\nIf not specified at cluster creation, a set of default values will be used.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AzureAttributes", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "cluster_log_conf": { "description": "The configuration for delivering spark logs to a long-term storage destination.\nThree kinds of destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be specified\nfor one cluster. If the conf is given, the logs will be delivered to the destination every\n`5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while\nthe destination of executor logs is `$destination/$clusterId/executor`.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClusterLogConf", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "cluster_name": { "description": "Cluster name requested by the user. This doesn't have to be unique.\nIf not specified at creation, the cluster name will be an empty string.\nFor job clusters, the cluster name is automatically set based on the job and job run IDs.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "custom_tags": { "description": "Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS\ninstances and EBS volumes) with these tags in addition to `default_tags`. Notes:\n\n- Currently, Databricks allows at most 45 custom tags\n\n- Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags", "$ref": "#/$defs/map/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "data_security_mode": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DataSecurityMode" @@ -343,42 +343,42 @@ "driver_instance_pool_id": { "description": "The optional ID of the instance pool for the driver of the cluster belongs.\nThe pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not\nassigned.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "driver_node_type_id": { "description": "The node type of the Spark driver.\nNote that this field is optional; if unset, the driver node type will be set as the same value\nas `node_type_id` defined above.\n\nThis field, along with node_type_id, should not be set if virtual_cluster_size is set.\nIf both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id and node_type_id take precedence.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "enable_elastic_disk": { "description": "Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk\nspace when its Spark workers are running low on disk space.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "enable_local_disk_encryption": { "description": "Whether to enable LUKS on cluster VMs' local disks", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "gcp_attributes": { "description": "Attributes related to clusters running on Google Cloud Platform.\nIf not specified at cluster creation, a set of default values will be used.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.GcpAttributes", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "init_scripts": { "description": "The configuration for storing init scripts. Any number of destinations can be specified.\nThe scripts are executed sequentially in the order provided.\nIf `cluster_log_conf` is specified, init script logs are sent to `\u003cdestination\u003e/\u003ccluster-ID\u003e/init_scripts`.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/compute.InitScriptInfo", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "instance_pool_id": { "description": "The optional ID of the instance pool to which the cluster belongs.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "is_single_node": { "description": "This field can only be used when `kind = CLASSIC_PREVIEW`.\n\nWhen set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers`", "$ref": "#/$defs/bool", - "since_version": "v0.237.0" + "sinceVersion": "v0.237.0" }, "kind": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Kind" @@ -390,12 +390,12 @@ "node_type_id": { "description": "This field encodes, through a single value, the resources available to each of\nthe Spark nodes in this cluster. For example, the Spark nodes can be provisioned\nand optimized for memory or compute intensive workloads. A list of available node\ntypes can be retrieved by using the :method:clusters/listNodeTypes API call.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "num_workers": { "description": "Number of worker nodes that this cluster should have. A cluster has one Spark Driver\nand `num_workers` Executors for a total of `num_workers` + 1 Spark nodes.\n\nNote: When reading the properties of a cluster, this field reflects the desired number\nof workers rather than the actual current number of workers. For instance, if a cluster\nis resized from 5 to 10 workers, this field will immediately be updated to reflect\nthe target size of 10 workers, whereas the workers listed in `spark_info` will gradually\nincrease from 5 to 10 as the new nodes are provisioned.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.ClusterPermission" @@ -403,12 +403,12 @@ "policy_id": { "description": "The ID of the cluster policy used to create the cluster if applicable.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "remote_disk_throughput": { "description": "If set, what the configurable throughput (in Mb/s) for the remote disk is. Currently only supported for GCP HYPERDISK_BALANCED disks.", "$ref": "#/$defs/int", - "since_version": "v0.257.0" + "sinceVersion": "v0.257.0" }, "runtime_engine": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.RuntimeEngine" @@ -416,37 +416,37 @@ "single_user_name": { "description": "Single user name if data_security_mode is `SINGLE_USER`", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "spark_conf": { "description": "An object containing a set of optional, user-specified Spark configuration key-value pairs.\nUsers can also pass in a string of extra JVM options to the driver and the executors via\n`spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.", "$ref": "#/$defs/map/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "spark_env_vars": { "description": "An object containing a set of optional, user-specified environment variable key-value pairs.\nPlease note that key-value pair of the form (X,Y) will be exported as is (i.e.,\n`export X='Y'`) while launching the driver and workers.\n\nIn order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending\nthem to `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all\ndefault databricks managed environmental variables are included as well.\n\nExample Spark environment variables:\n`{\"SPARK_WORKER_MEMORY\": \"28000m\", \"SPARK_LOCAL_DIRS\": \"/local_disk0\"}` or\n`{\"SPARK_DAEMON_JAVA_OPTS\": \"$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true\"}`", "$ref": "#/$defs/map/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "spark_version": { "description": "The Spark version of the cluster, e.g. `3.3.x-scala2.11`.\nA list of available Spark versions can be retrieved by using\nthe :method:clusters/sparkVersions API call.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "ssh_public_keys": { "description": "SSH public key contents that will be added to each Spark node in this cluster. The\ncorresponding private keys can be used to login with the user name `ubuntu` on port `2200`.\nUp to 10 keys can be specified.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "total_initial_remote_disk_size": { "description": "If set, what the total initial volume size (in GB) of the remote disks should be. Currently only supported for GCP HYPERDISK_BALANCED disks.", "$ref": "#/$defs/int", - "since_version": "v0.257.0" + "sinceVersion": "v0.257.0" }, "use_ml_runtime": { "description": "This field can only be used when `kind = CLASSIC_PREVIEW`.\n\n`effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not.", "$ref": "#/$defs/bool", - "since_version": "v0.237.0" + "sinceVersion": "v0.237.0" }, "workload_type": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.WorkloadType" @@ -468,19 +468,19 @@ "properties": { "group_name": { "$ref": "#/$defs/string", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" }, "level": { "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.ClusterPermissionLevel", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" }, "service_principal_name": { "$ref": "#/$defs/string", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" }, "user_name": { "$ref": "#/$defs/string", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" } }, "additionalProperties": false, @@ -526,12 +526,12 @@ "dataset_catalog": { "description": "Sets the default catalog for all datasets in this dashboard. When set, this overrides the catalog specified in individual dataset definitions.", "$ref": "#/$defs/string", - "since_version": "v0.281.0" + "sinceVersion": "v0.281.0" }, "dataset_schema": { "description": "Sets the default schema for all datasets in this dashboard. When set, this overrides the schema specified in individual dataset definitions.", "$ref": "#/$defs/string", - "since_version": "v0.281.0" + "sinceVersion": "v0.281.0" }, "display_name": { "description": "The display name of the dashboard.", @@ -595,19 +595,19 @@ "properties": { "group_name": { "$ref": "#/$defs/string", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" }, "level": { "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.DashboardPermissionLevel", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" }, "service_principal_name": { "$ref": "#/$defs/string", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" }, "user_name": { "$ref": "#/$defs/string", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" } }, "additionalProperties": false, @@ -649,12 +649,12 @@ "database_instance_name": { "description": "The name of the DatabaseInstance housing the database.", "$ref": "#/$defs/string", - "since_version": "v0.265.0" + "sinceVersion": "v0.265.0" }, "database_name": { "description": "The name of the database (in a instance) associated with the catalog.", "$ref": "#/$defs/string", - "since_version": "v0.265.0" + "sinceVersion": "v0.265.0" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", @@ -663,7 +663,7 @@ "name": { "description": "The name of the catalog in UC.", "$ref": "#/$defs/string", - "since_version": "v0.265.0" + "sinceVersion": "v0.265.0" } }, "additionalProperties": false, @@ -688,22 +688,22 @@ "capacity": { "description": "The sku of the instance. Valid values are \"CU_1\", \"CU_2\", \"CU_4\", \"CU_8\".", "$ref": "#/$defs/string", - "since_version": "v0.265.0" + "sinceVersion": "v0.265.0" }, "custom_tags": { "description": "Custom tags associated with the instance. This field is only included on create and update responses.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/database.CustomTag", - "since_version": "v0.273.0" + "sinceVersion": "v0.273.0" }, "enable_pg_native_login": { "description": "Whether to enable PG native password login on the instance. Defaults to false.", "$ref": "#/$defs/bool", - "since_version": "v0.267.0" + "sinceVersion": "v0.267.0" }, "enable_readable_secondaries": { "description": "Whether to enable secondaries to serve read-only traffic. Defaults to false.", "$ref": "#/$defs/bool", - "since_version": "v0.265.0" + "sinceVersion": "v0.265.0" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", @@ -712,17 +712,17 @@ "name": { "description": "The name of the instance. This is the unique identifier for the instance.", "$ref": "#/$defs/string", - "since_version": "v0.265.0" + "sinceVersion": "v0.265.0" }, "node_count": { "description": "The number of nodes in the instance, composed of 1 primary and 0 or more secondaries. Defaults to\n1 primary and 0 secondaries. This field is input only, see effective_node_count for the output.", "$ref": "#/$defs/int", - "since_version": "v0.265.0" + "sinceVersion": "v0.265.0" }, "parent_instance_ref": { "description": "The ref of the parent instance. This is only available if the instance is\nchild instance.\nInput: For specifying the parent instance to create a child instance. Optional.\nOutput: Only populated if provided as input to create a child instance.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.DatabaseInstanceRef", - "since_version": "v0.265.0" + "sinceVersion": "v0.265.0" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.DatabaseInstancePermission" @@ -730,17 +730,17 @@ "retention_window_in_days": { "description": "The retention window for the instance. This is the time window in days\nfor which the historical data is retained. The default value is 7 days.\nValid values are 2 to 35 days.", "$ref": "#/$defs/int", - "since_version": "v0.265.0" + "sinceVersion": "v0.265.0" }, "stopped": { "description": "Whether to stop the instance. An input only param, see effective_stopped for the output.", "$ref": "#/$defs/bool", - "since_version": "v0.265.0" + "sinceVersion": "v0.265.0" }, "usage_policy_id": { "description": "The desired usage policy to associate with the instance.", "$ref": "#/$defs/string", - "since_version": "v0.273.0" + "sinceVersion": "v0.273.0" } }, "additionalProperties": false, @@ -761,19 +761,19 @@ "properties": { "group_name": { "$ref": "#/$defs/string", - "since_version": "v0.265.0" + "sinceVersion": "v0.265.0" }, "level": { "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.DatabaseInstancePermissionLevel", - "since_version": "v0.265.0" + "sinceVersion": "v0.265.0" }, "service_principal_name": { "$ref": "#/$defs/string", - "since_version": "v0.265.0" + "sinceVersion": "v0.265.0" }, "user_name": { "$ref": "#/$defs/string", - "since_version": "v0.265.0" + "sinceVersion": "v0.265.0" } }, "additionalProperties": false, @@ -811,12 +811,12 @@ "principal": { "description": "The name of the principal that will be granted privileges", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "privileges": { "description": "The privileges to grant to the specified entity", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -839,32 +839,32 @@ "budget_policy_id": { "description": "The id of the user specified budget policy to use for this job.\nIf not specified, a default budget policy may be applied when creating or modifying the job.\nSee `effective_budget_policy_id` for the budget policy used by this workload.", "$ref": "#/$defs/string", - "since_version": "v0.231.0" + "sinceVersion": "v0.231.0" }, "continuous": { "description": "An optional continuous property for this job. The continuous property will ensure that there is always one run executing. Only one of `schedule` and `continuous` can be used.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Continuous", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "description": { "description": "An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "email_notifications": { "description": "An optional set of email addresses that is notified when runs of this job begin or complete as well as when this job is deleted.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobEmailNotifications", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "environments": { "description": "A list of task execution environment specifications that can be referenced by serverless tasks of this job.\nFor serverless notebook tasks, if the environment_key is not specified, the notebook environment will be used if present. If a jobs environment is specified, it will override the notebook environment.\nFor other serverless tasks, the task environment is required to be specified using environment_key in the task settings.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.JobEnvironment", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "git_source": { "description": "An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks.\n\nIf `git_source` is set, these tasks retrieve the file from the remote repository by default. However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task.\n\nNote: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.GitSource", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "health": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRules" @@ -872,7 +872,7 @@ "job_clusters": { "description": "A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.JobCluster", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", @@ -881,27 +881,27 @@ "max_concurrent_runs": { "description": "An optional maximum allowed number of concurrent runs of the job.\nSet this value if you want to be able to execute multiple runs of the same job concurrently.\nThis is useful for example if you trigger your job on a frequent schedule and want to allow consecutive runs to overlap with each other, or if you want to trigger multiple runs which differ by their input parameters.\nThis setting affects only new runs. For example, suppose the job’s concurrency is 4 and there are 4 concurrent active runs. Then setting the concurrency to 3 won’t kill any of the active runs.\nHowever, from then on, new runs are skipped unless there are fewer than 3 active runs.\nThis value cannot exceed 1000. Setting this value to `0` causes all new runs to be skipped.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "name": { "description": "An optional name for the job. The maximum length is 4096 bytes in UTF-8 encoding.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "notification_settings": { "description": "Optional notification settings that are used when sending notifications to each of the `email_notifications` and `webhook_notifications` for this job.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobNotificationSettings", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "parameters": { "description": "Job-level parameter definitions", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.JobParameterDefinition", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "performance_target": { "description": "The performance mode on a serverless job. This field determines the level of compute performance or cost-efficiency for the run.\nThe performance target does not apply to tasks that run on Serverless GPU compute.\n\n* `STANDARD`: Enables cost-efficient execution of serverless workloads.\n* `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PerformanceTarget", - "since_version": "v0.241.0" + "sinceVersion": "v0.241.0" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.JobPermission" @@ -909,7 +909,7 @@ "queue": { "description": "The queue settings of the job.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.QueueSettings", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "run_as": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs" @@ -917,39 +917,39 @@ "schedule": { "description": "An optional periodic schedule for this job. The default behavior is that the job only runs when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.CronSchedule", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "tags": { "description": "A map of tags associated with the job. These are forwarded to the cluster as cluster tags for jobs clusters, and are subject to the same limitations as cluster tags. A maximum of 25 tags can be added to the job.", "$ref": "#/$defs/map/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "tasks": { "description": "A list of task specifications to be executed by this job.\nIt supports up to 1000 elements in write endpoints (:method:jobs/create, :method:jobs/reset, :method:jobs/update, :method:jobs/submit).\nRead endpoints return only 100 tasks. If more than 100 tasks are available, you can paginate through them using :method:jobs/get. Use the `next_page_token` field at the object root to determine if more results are available.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Task", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "timeout_seconds": { "description": "An optional timeout applied to each run of this job. A value of `0` means no timeout.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "trigger": { "description": "A configuration to trigger a run when certain conditions are met. The default behavior is that the job runs only when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TriggerSettings", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "usage_policy_id": { "description": "The id of the user specified usage policy to use for this job.\nIf not specified, a default usage policy may be applied when creating or modifying the job.\nSee `effective_usage_policy_id` for the usage policy used by this workload.", "$ref": "#/$defs/string", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "since_version": "v0.265.0" + "sinceVersion": "v0.265.0" }, "webhook_notifications": { "description": "A collection of system notification IDs to notify when runs of this job begin or complete.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.WebhookNotifications", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -968,19 +968,19 @@ "properties": { "group_name": { "$ref": "#/$defs/string", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" }, "level": { "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.JobPermissionLevel", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" }, "service_principal_name": { "$ref": "#/$defs/string", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" }, "user_name": { "$ref": "#/$defs/string", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" } }, "additionalProperties": false, @@ -1019,7 +1019,7 @@ "prevent_destroy": { "description": "Lifecycle setting to prevent the resource from being destroyed.", "$ref": "#/$defs/bool", - "since_version": "v0.268.0" + "sinceVersion": "v0.268.0" } }, "additionalProperties": false @@ -1038,7 +1038,7 @@ "artifact_location": { "description": "Location where all artifacts for the experiment are stored.\nIf not provided, the remote server will select an appropriate default.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", @@ -1047,7 +1047,7 @@ "name": { "description": "Experiment name.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.MlflowExperimentPermission" @@ -1055,7 +1055,7 @@ "tags": { "description": "A collection of tags to set on the experiment. Maximum tag size and number of tags per request\ndepends on the storage backend. All storage backends are guaranteed to support tag keys up\nto 250 bytes in size and tag values up to 5000 bytes in size. All storage backends are also\nguaranteed to support up to 20 tags per request.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/ml.ExperimentTag", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -1077,19 +1077,19 @@ "properties": { "group_name": { "$ref": "#/$defs/string", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" }, "level": { "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.MlflowExperimentPermissionLevel", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" }, "service_principal_name": { "$ref": "#/$defs/string", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" }, "user_name": { "$ref": "#/$defs/string", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" } }, "additionalProperties": false, @@ -1127,7 +1127,7 @@ "description": { "description": "Optional description for registered model.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", @@ -1136,7 +1136,7 @@ "name": { "description": "Register models under this name", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.MlflowModelPermission" @@ -1144,7 +1144,7 @@ "tags": { "description": "Additional metadata for registered model.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/ml.ModelTag", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -1166,19 +1166,19 @@ "properties": { "group_name": { "$ref": "#/$defs/string", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" }, "level": { "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.MlflowModelPermissionLevel", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" }, "service_principal_name": { "$ref": "#/$defs/string", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" }, "user_name": { "$ref": "#/$defs/string", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" } }, "additionalProperties": false, @@ -1218,17 +1218,17 @@ "ai_gateway": { "description": "The AI Gateway configuration for the serving endpoint. NOTE: External model, provisioned throughput, and pay-per-token endpoints are fully supported; agent endpoints currently only support inference tables.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayConfig", - "since_version": "v0.230.0" + "sinceVersion": "v0.230.0" }, "budget_policy_id": { "description": "The budget policy to be applied to the serving endpoint.", "$ref": "#/$defs/string", - "since_version": "v0.244.0" + "sinceVersion": "v0.244.0" }, "config": { "description": "The core config of the serving endpoint.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.EndpointCoreConfigInput", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "description": { "$ref": "#/$defs/string" @@ -1236,7 +1236,7 @@ "email_notifications": { "description": "Email notification settings.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.EmailNotifications", - "since_version": "v0.264.0" + "sinceVersion": "v0.264.0" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", @@ -1245,7 +1245,7 @@ "name": { "description": "The name of the serving endpoint. This field is required and must be unique across a Databricks workspace.\nAn endpoint name can consist of alphanumeric characters, dashes, and underscores.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.ModelServingEndpointPermission" @@ -1254,18 +1254,18 @@ "description": "Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI Gateway to manage rate limits.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.RateLimit", "deprecationMessage": "This field is deprecated", - "since_version": "v0.229.0", + "sinceVersion": "v0.229.0", "deprecated": true }, "route_optimized": { "description": "Enable route optimization for the serving endpoint.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "tags": { "description": "Tags to be attached to the serving endpoint and automatically propagated to billing logs.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.EndpointTag", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -1287,19 +1287,19 @@ "properties": { "group_name": { "$ref": "#/$defs/string", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" }, "level": { "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.ModelServingEndpointPermissionLevel", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" }, "service_principal_name": { "$ref": "#/$defs/string", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" }, "user_name": { "$ref": "#/$defs/string", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" } }, "additionalProperties": false, @@ -1337,22 +1337,22 @@ "group_name": { "description": "The name of the group that has the permission set in level.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "level": { "description": "The allowed permission for user, group, service principal defined for this permission.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "service_principal_name": { "description": "The name of the service principal that has the permission set in level.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "user_name": { "description": "The name of the user that has the permission set in level.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -1374,84 +1374,84 @@ "allow_duplicate_names": { "description": "If false, deployment will fail if name conflicts with that of another pipeline.", "$ref": "#/$defs/bool", - "since_version": "v0.261.0" + "sinceVersion": "v0.261.0" }, "budget_policy_id": { "description": "Budget policy of this pipeline.", "$ref": "#/$defs/string", - "since_version": "v0.230.0" + "sinceVersion": "v0.230.0" }, "catalog": { "description": "A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, tables in this pipeline are published to a `target` schema inside `catalog` (for example, `catalog`.`target`.`table`). If `target` is not specified, no data is published to Unity Catalog.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "channel": { "description": "DLT Release Channel that specifies which version to use.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "clusters": { "description": "Cluster settings for this pipeline deployment.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineCluster", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "configuration": { "description": "String-String configuration for this pipeline execution.", "$ref": "#/$defs/map/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "continuous": { "description": "Whether the pipeline is continuous or triggered. This replaces `trigger`.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "development": { "description": "Whether the pipeline is in Development mode. Defaults to false.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "edition": { "description": "Pipeline product edition.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "environment": { "description": "Environment specification for this pipeline used to install dependencies.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PipelinesEnvironment", - "since_version": "v0.257.0" + "sinceVersion": "v0.257.0" }, "event_log": { "description": "Event log configuration for this pipeline", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.EventLogSpec", - "since_version": "v0.246.0" + "sinceVersion": "v0.246.0" }, "filters": { "description": "Filters on which Pipeline packages to include in the deployed graph.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.Filters", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "gateway_definition": { "description": "The definition of a gateway pipeline to support change data capture.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionGatewayPipelineDefinition", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "id": { "description": "Unique identifier for this pipeline.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "ingestion_definition": { "description": "The configuration for a managed ingestion pipeline. These settings cannot be used with the 'libraries', 'schema', 'target', or 'catalog' settings.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinition", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "libraries": { "description": "Libraries or code needed by this deployment.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineLibrary", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", @@ -1460,12 +1460,12 @@ "name": { "description": "Friendly identifier for this pipeline.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "notifications": { "description": "List of notification settings for this pipeline.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.Notifications", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.PipelinePermission" @@ -1473,19 +1473,19 @@ "photon": { "description": "Whether Photon is enabled for this pipeline.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "restart_window": { "description": "Restart window of this pipeline.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.RestartWindow", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "since_version": "v0.234.0" + "sinceVersion": "v0.234.0" }, "root_path": { "description": "Root path for this pipeline.\nThis is used as the root directory when editing the pipeline in the Databricks user interface and it is\nadded to sys.path when executing Python sources during pipeline execution.", "$ref": "#/$defs/string", - "since_version": "v0.253.0" + "sinceVersion": "v0.253.0" }, "run_as": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.RunAs" @@ -1493,28 +1493,28 @@ "schema": { "description": "The default schema (database) where tables are read from or published to.", "$ref": "#/$defs/string", - "since_version": "v0.230.0" + "sinceVersion": "v0.230.0" }, "serverless": { "description": "Whether serverless compute is enabled for this pipeline.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "storage": { "description": "DBFS root directory for storing checkpoints and tables.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "tags": { "description": "A map of tags associated with the pipeline.\nThese are forwarded to the cluster as cluster tags, and are therefore subject to the same limitations.\nA maximum of 25 tags can be added to the pipeline.", "$ref": "#/$defs/map/string", - "since_version": "v0.256.0" + "sinceVersion": "v0.256.0" }, "target": { "description": "Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field.", "$ref": "#/$defs/string", "deprecationMessage": "This field is deprecated", - "since_version": "v0.229.0", + "sinceVersion": "v0.229.0", "deprecated": true }, "trigger": { @@ -1528,7 +1528,7 @@ "$ref": "#/$defs/string", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "since_version": "v0.276.0" + "sinceVersion": "v0.276.0" } }, "additionalProperties": false, @@ -1547,19 +1547,19 @@ "properties": { "group_name": { "$ref": "#/$defs/string", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" }, "level": { "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.PipelinePermissionLevel", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" }, "service_principal_name": { "$ref": "#/$defs/string", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" }, "user_name": { "$ref": "#/$defs/string", - "since_version": "v0.247.0" + "sinceVersion": "v0.247.0" } }, "additionalProperties": false, @@ -1598,24 +1598,24 @@ "assets_dir": { "description": "[Create:REQ Update:IGN] Field for specifying the absolute path to a custom directory to store data-monitoring\nassets. Normally prepopulated to a default user location via UI and Python APIs.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "baseline_table_name": { "description": "[Create:OPT Update:OPT] Baseline table name.\nBaseline data is used to compute drift from the data in the monitored `table_name`.\nThe baseline table and the monitored table shall have the same schema.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "custom_metrics": { "description": "[Create:OPT Update:OPT] Custom metrics.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/catalog.MonitorMetric", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "data_classification_config": { "description": "[Create:OPT Update:OPT] Data classification related config.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorDataClassificationConfig", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "inference_log": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorInferenceLog" @@ -1623,7 +1623,7 @@ "latest_monitor_failure_msg": { "description": "[Create:ERR Update:IGN] The latest error message for a monitor failure.", "$ref": "#/$defs/string", - "since_version": "v0.264.0" + "sinceVersion": "v0.264.0" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", @@ -1632,32 +1632,32 @@ "notifications": { "description": "[Create:OPT Update:OPT] Field for specifying notification settings.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorNotifications", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "output_schema_name": { "description": "[Create:REQ Update:REQ] Schema where output tables are created. Needs to be in 2-level format {catalog}.{schema}", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "schedule": { "description": "[Create:OPT Update:OPT] The monitor schedule.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorCronSchedule", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "skip_builtin_dashboard": { "description": "Whether to skip creating a default dashboard summarizing data quality metrics.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "slicing_exprs": { "description": "[Create:OPT Update:OPT] List of column expressions to slice data with for targeted analysis. The data is grouped by\neach expression independently, resulting in a separate slice for each predicate and its\ncomplements. For example `slicing_exprs=[“col_1”, “col_2 \u003e 10”]` will generate the following\nslices: two slices for `col_2 \u003e 10` (True and False), and one slice per unique value in\n`col1`. For high-cardinality columns, only the top 100 unique values by frequency will\ngenerate slices.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "snapshot": { "description": "Configuration for monitoring snapshot tables.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorSnapshot", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "table_name": { "$ref": "#/$defs/string" @@ -1665,12 +1665,12 @@ "time_series": { "description": "Configuration for monitoring time series tables.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorTimeSeries", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "warehouse_id": { "description": "Optional argument to specify the warehouse for dashboard creation. If not specified, the first running\nwarehouse will be used.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -1701,12 +1701,12 @@ "catalog_name": { "description": "The name of the catalog where the schema and the registered model reside", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "comment": { "description": "The comment attached to the registered model", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "created_at": { "$ref": "#/$defs/int64" @@ -1730,7 +1730,7 @@ "name": { "description": "The name of the registered model", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "owner": { "$ref": "#/$defs/string" @@ -1738,12 +1738,12 @@ "schema_name": { "description": "The name of the schema where the registered model resides", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "storage_location": { "description": "The storage location on the cloud under which model version data files are stored", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "updated_at": { "$ref": "#/$defs/int64" @@ -1769,12 +1769,12 @@ "catalog_name": { "description": "Name of parent catalog.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "comment": { "description": "User-provided free-form text description.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "grants": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.SchemaGrant" @@ -1786,7 +1786,7 @@ "name": { "description": "Name of schema, relative to parent catalog.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "properties": { "$ref": "#/$defs/map/string" @@ -1794,7 +1794,7 @@ "storage_root": { "description": "Storage root URL for managed tables within schema.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -1817,11 +1817,11 @@ "properties": { "principal": { "$ref": "#/$defs/string", - "since_version": "v0.267.0" + "sinceVersion": "v0.267.0" }, "privileges": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.SchemaGrantPrivilege", - "since_version": "v0.267.0" + "sinceVersion": "v0.267.0" } }, "additionalProperties": false, @@ -1870,27 +1870,27 @@ "backend_type": { "description": "The backend type the scope will be created with. If not specified, will default to `DATABRICKS`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/workspace.ScopeBackendType", - "since_version": "v0.252.0" + "sinceVersion": "v0.252.0" }, "keyvault_metadata": { "description": "The metadata for the secret scope if the `backend_type` is `AZURE_KEYVAULT`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/workspace.AzureKeyVaultSecretScopeMetadata", - "since_version": "v0.252.0" + "sinceVersion": "v0.252.0" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle", - "since_version": "v0.268.0" + "sinceVersion": "v0.268.0" }, "name": { "description": "Scope name requested by the user. Scope names are unique.", "$ref": "#/$defs/string", - "since_version": "v0.252.0" + "sinceVersion": "v0.252.0" }, "permissions": { "description": "The permissions to apply to the secret scope. Permissions are managed via secret scope ACLs.", "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.SecretScopePermission", - "since_version": "v0.252.0" + "sinceVersion": "v0.252.0" } }, "additionalProperties": false, @@ -1912,22 +1912,22 @@ "group_name": { "description": "The name of the group that has the permission set in level. This field translates to a `principal` field in secret scope ACL.", "$ref": "#/$defs/string", - "since_version": "v0.252.0" + "sinceVersion": "v0.252.0" }, "level": { "description": "The allowed permission for user, group, service principal defined for this permission.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.SecretScopePermissionLevel", - "since_version": "v0.252.0" + "sinceVersion": "v0.252.0" }, "service_principal_name": { "description": "The application ID of an active service principal. This field translates to a `principal` field in secret scope ACL.", "$ref": "#/$defs/string", - "since_version": "v0.252.0" + "sinceVersion": "v0.252.0" }, "user_name": { "description": "The name of the user that has the permission set in level. This field translates to a `principal` field in secret scope ACL.", "$ref": "#/$defs/string", - "since_version": "v0.252.0" + "sinceVersion": "v0.252.0" } }, "additionalProperties": false, @@ -1966,22 +1966,22 @@ "auto_stop_mins": { "description": "The amount of time in minutes that a SQL warehouse must be idle (i.e., no\nRUNNING queries) before it is automatically stopped.\n\nSupported values:\n- Must be == 0 or \u003e= 10 mins\n- 0 indicates no autostop.\n\nDefaults to 120 mins", "$ref": "#/$defs/int", - "since_version": "v0.260.0" + "sinceVersion": "v0.260.0" }, "channel": { "description": "Channel Details", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.Channel", - "since_version": "v0.260.0" + "sinceVersion": "v0.260.0" }, "cluster_size": { "description": "Size of the clusters allocated for this warehouse.\nIncreasing the size of a spark cluster allows you to run larger queries on\nit. If you want to increase the number of concurrent queries, please tune\nmax_num_clusters.\n\nSupported values:\n- 2X-Small\n- X-Small\n- Small\n- Medium\n- Large\n- X-Large\n- 2X-Large\n- 3X-Large\n- 4X-Large", "$ref": "#/$defs/string", - "since_version": "v0.260.0" + "sinceVersion": "v0.260.0" }, "creator_name": { "description": "warehouse creator name", "$ref": "#/$defs/string", - "since_version": "v0.260.0" + "sinceVersion": "v0.260.0" }, "enable_photon": { "description": "Configures whether the warehouse should use Photon optimized clusters.\n\nDefaults to true.", @@ -1990,13 +1990,13 @@ "enable_serverless_compute": { "description": "Configures whether the warehouse should use serverless compute", "$ref": "#/$defs/bool", - "since_version": "v0.260.0" + "sinceVersion": "v0.260.0" }, "instance_profile_arn": { "description": "Deprecated. Instance profile used to pass IAM role to the cluster", "$ref": "#/$defs/string", "deprecationMessage": "This field is deprecated", - "since_version": "v0.260.0", + "sinceVersion": "v0.260.0", "deprecated": true }, "lifecycle": { @@ -2006,17 +2006,17 @@ "max_num_clusters": { "description": "Maximum number of clusters that the autoscaler will create to handle\nconcurrent queries.\n\nSupported values:\n- Must be \u003e= min_num_clusters\n- Must be \u003c= 40.\n\nDefaults to min_clusters if unset.", "$ref": "#/$defs/int", - "since_version": "v0.260.0" + "sinceVersion": "v0.260.0" }, "min_num_clusters": { "description": "Minimum number of available clusters that will be maintained for this SQL\nwarehouse. Increasing this will ensure that a larger number of clusters are\nalways running and therefore may reduce the cold start time for new\nqueries. This is similar to reserved vs. revocable cores in a resource\nmanager.\n\nSupported values:\n- Must be \u003e 0\n- Must be \u003c= min(max_num_clusters, 30)\n\nDefaults to 1", "$ref": "#/$defs/int", - "since_version": "v0.260.0" + "sinceVersion": "v0.260.0" }, "name": { "description": "Logical name for the cluster.\n\nSupported values:\n- Must be unique within an org.\n- Must be less than 100 characters.", "$ref": "#/$defs/string", - "since_version": "v0.260.0" + "sinceVersion": "v0.260.0" }, "permissions": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.SqlWarehousePermission" @@ -2027,7 +2027,7 @@ "tags": { "description": "A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes) associated\nwith this SQL warehouse.\n\nSupported values:\n- Number of tags \u003c 45.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.EndpointTags", - "since_version": "v0.260.0" + "sinceVersion": "v0.260.0" }, "warehouse_type": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.CreateWarehouseRequestWarehouseType" @@ -2048,19 +2048,19 @@ "properties": { "group_name": { "$ref": "#/$defs/string", - "since_version": "v0.260.0" + "sinceVersion": "v0.260.0" }, "level": { "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.SqlWarehousePermissionLevel", - "since_version": "v0.260.0" + "sinceVersion": "v0.260.0" }, "service_principal_name": { "$ref": "#/$defs/string", - "since_version": "v0.260.0" + "sinceVersion": "v0.260.0" }, "user_name": { "$ref": "#/$defs/string", - "since_version": "v0.260.0" + "sinceVersion": "v0.260.0" } }, "additionalProperties": false, @@ -2099,24 +2099,24 @@ "properties": { "database_instance_name": { "$ref": "#/$defs/string", - "since_version": "v0.266.0" + "sinceVersion": "v0.266.0" }, "lifecycle": { "description": "Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Lifecycle", - "since_version": "v0.268.0" + "sinceVersion": "v0.268.0" }, "logical_database_name": { "$ref": "#/$defs/string", - "since_version": "v0.266.0" + "sinceVersion": "v0.266.0" }, "name": { "$ref": "#/$defs/string", - "since_version": "v0.266.0" + "sinceVersion": "v0.266.0" }, "spec": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.SyncedTableSpec", - "since_version": "v0.266.0" + "sinceVersion": "v0.266.0" } }, "additionalProperties": false, @@ -2138,12 +2138,12 @@ "catalog_name": { "description": "The name of the catalog where the schema and the volume are", "$ref": "#/$defs/string", - "since_version": "v0.236.0" + "sinceVersion": "v0.236.0" }, "comment": { "description": "The comment attached to the volume", "$ref": "#/$defs/string", - "since_version": "v0.236.0" + "sinceVersion": "v0.236.0" }, "grants": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.VolumeGrant" @@ -2155,17 +2155,17 @@ "name": { "description": "The name of the volume", "$ref": "#/$defs/string", - "since_version": "v0.236.0" + "sinceVersion": "v0.236.0" }, "schema_name": { "description": "The name of the schema where the volume is", "$ref": "#/$defs/string", - "since_version": "v0.236.0" + "sinceVersion": "v0.236.0" }, "storage_location": { "description": "The storage location on the cloud", "$ref": "#/$defs/string", - "since_version": "v0.236.0" + "sinceVersion": "v0.236.0" }, "volume_type": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.VolumeType" @@ -2192,11 +2192,11 @@ "properties": { "principal": { "$ref": "#/$defs/string", - "since_version": "v0.264.1" + "sinceVersion": "v0.264.1" }, "privileges": { "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.VolumeGrantPrivilege", - "since_version": "v0.264.1" + "sinceVersion": "v0.264.1" } }, "additionalProperties": false, @@ -2237,62 +2237,62 @@ "alert": { "description": "The name of the alert for which to retrieve an ID.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "cluster": { "description": "The name of the cluster for which to retrieve an ID.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "cluster_policy": { "description": "The name of the cluster_policy for which to retrieve an ID.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "dashboard": { "description": "The name of the dashboard for which to retrieve an ID.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "instance_pool": { "description": "The name of the instance_pool for which to retrieve an ID.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "job": { "description": "The name of the job for which to retrieve an ID.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "metastore": { "description": "The name of the metastore for which to retrieve an ID.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "notification_destination": { "description": "The name of the notification_destination for which to retrieve an ID.", "$ref": "#/$defs/string", - "since_version": "v0.236.0" + "sinceVersion": "v0.236.0" }, "pipeline": { "description": "The name of the pipeline for which to retrieve an ID.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "query": { "description": "The name of the query for which to retrieve an ID.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "service_principal": { "description": "The name of the service_principal for which to retrieve an ID.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "warehouse": { "description": "The name of the warehouse for which to retrieve an ID.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -2311,22 +2311,22 @@ "default": { "description": "The default value for the variable.", "$ref": "#/$defs/interface", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "description": { "description": "The description of the variable.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "lookup": { "description": "The name of the alert, cluster_policy, cluster, dashboard, instance_pool, job, metastore, pipeline, query, service_principal, or warehouse object for which to retrieve an ID.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.Lookup", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "type": { "description": "The type of the variable.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.VariableType", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -2341,23 +2341,23 @@ "default": { "description": "The default value for the variable.", "$ref": "#/$defs/interface", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "description": { "description": "The description of the variable", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "lookup": { "description": "The name of the alert, cluster_policy, cluster, dashboard, instance_pool, job, metastore, pipeline, query, service_principal, or warehouse object for which to retrieve an ID.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.Lookup", "markdownDescription": "The name of the `alert`, `cluster_policy`, `cluster`, `dashboard`, `instance_pool`, `job`, `metastore`, `pipeline`, `query`, `service_principal`, or `warehouse` object for which to retrieve an ID.", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "type": { "description": "The type of the variable.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.VariableType", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -2375,33 +2375,33 @@ "build": { "description": "An optional set of build commands to run locally before deployment.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "dynamic_version": { "description": "Whether to patch the wheel version dynamically based on the timestamp of the whl file. If this is set to `true`, new code can be deployed without having to update the version in `setup.py` or `pyproject.toml`. This setting is only valid when `type` is set to `whl`. See [\\_](/dev-tools/bundles/settings.md#bundle-syntax-mappings-artifacts).", "$ref": "#/$defs/bool", - "since_version": "v0.245.0" + "sinceVersion": "v0.245.0" }, "executable": { "description": "The executable type. Valid values are `bash`, `sh`, and `cmd`.", "$ref": "#/$defs/github.com/databricks/cli/libs/exec.ExecutableType", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "files": { "description": "The relative or absolute path to the built artifact files.", "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config.ArtifactFile", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "path": { "description": "The local path of the directory for the artifact.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "type": { "description": "Required if the artifact is a Python wheel. The type of the artifact. Valid values are `whl` and `jar`.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.ArtifactType", "markdownDescription": "Required if the artifact is a Python wheel. The type of the artifact. Valid values are `whl` and `jar`.", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -2420,7 +2420,7 @@ "source": { "description": "Required. The artifact source file.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -2446,40 +2446,40 @@ "description": "The ID of a cluster to use to run the bundle.", "$ref": "#/$defs/string", "markdownDescription": "The ID of a cluster to use to run the bundle. See [cluster_id](https://docs.databricks.com/dev-tools/bundles/settings.html#cluster_id).", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "compute_id": { "description": "Deprecated. The ID of the compute to use to run the bundle.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "databricks_cli_version": { "description": "The Databricks CLI version to use for the bundle.", "$ref": "#/$defs/string", "markdownDescription": "The Databricks CLI version to use for the bundle. See [databricks_cli_version](https://docs.databricks.com/dev-tools/bundles/settings.html#databricks_cli_version).", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "deployment": { "description": "The definition of the bundle deployment", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Deployment", "markdownDescription": "The definition of the bundle deployment. For supported attributes see [link](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html).", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "git": { "description": "The Git version control details that are associated with your bundle.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Git", "markdownDescription": "The Git version control details that are associated with your bundle. For supported attributes see [git](https://docs.databricks.com/dev-tools/bundles/settings.html#git).", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "name": { "description": "The name of the bundle.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "uuid": { "description": "Reserved. A Universally Unique Identifier (UUID) for the bundle that uniquely identifies the bundle in internal Databricks systems. This is generated when a bundle project is initialized using a Databricks template (using the `databricks bundle init` command).", "$ref": "#/$defs/string", - "since_version": "v0.236.0" + "sinceVersion": "v0.236.0" } }, "additionalProperties": false, @@ -2504,12 +2504,12 @@ "fail_on_active_runs": { "description": "Whether to fail on active runs. If this is set to true a deployment that is running can be interrupted.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "lock": { "description": "The deployment lock attributes.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Lock", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -2529,38 +2529,38 @@ "description": "The PyDABs configuration.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.PyDABs", "deprecationMessage": "Deprecated: please use python instead", - "since_version": "v0.229.0", + "sinceVersion": "v0.229.0", "deprecated": true }, "python": { "description": "Configures loading of Python code defined with 'databricks-bundles' package.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Python", - "since_version": "v0.238.0" + "sinceVersion": "v0.238.0" }, "python_wheel_wrapper": { "description": "Whether to use a Python wheel wrapper.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "scripts": { "description": "The commands to run.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Command", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "skip_artifact_cleanup": { "description": "Determines whether to skip cleaning up the .internal folder", "$ref": "#/$defs/bool", - "since_version": "v0.254.0" + "sinceVersion": "v0.254.0" }, "skip_name_prefix_for_schema": { "description": "Skip adding the prefix that is either set in `presets.name_prefix` or computed when `mode: development`\nis set, to the names of UC schemas defined in the bundle.", "$ref": "#/$defs/bool", - "since_version": "v0.255.0" + "sinceVersion": "v0.255.0" }, "use_legacy_run_as": { "description": "Whether to use the legacy run_as behavior.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -2580,13 +2580,13 @@ "description": "The Git branch name.", "$ref": "#/$defs/string", "markdownDescription": "The Git branch name. See [git](https://docs.databricks.com/dev-tools/bundles/settings.html#git).", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "origin_url": { "description": "The origin URL of the repository.", "$ref": "#/$defs/string", "markdownDescription": "The origin URL of the repository. See [git](https://docs.databricks.com/dev-tools/bundles/settings.html#git).", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -2605,12 +2605,12 @@ "enabled": { "description": "Whether this lock is enabled.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "force": { "description": "Whether to force this lock if it is enabled.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -2632,37 +2632,37 @@ "artifacts_dynamic_version": { "description": "Whether to enable dynamic_version on all artifacts.", "$ref": "#/$defs/bool", - "since_version": "v0.256.0" + "sinceVersion": "v0.256.0" }, "jobs_max_concurrent_runs": { "description": "The maximum concurrent runs for a job.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "name_prefix": { "description": "The prefix for job runs of the bundle.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "pipelines_development": { "description": "Whether pipeline deployments should be locked in development mode.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "source_linked_deployment": { "description": "Whether to link the deployment to the bundle source.", "$ref": "#/$defs/bool", - "since_version": "v0.236.0" + "sinceVersion": "v0.236.0" }, "tags": { "description": "The tags for the bundle deployment.", "$ref": "#/$defs/map/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "trigger_pause_status": { "description": "A pause status to apply to all job triggers and schedules. Valid values are PAUSED or UNPAUSED.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -2681,7 +2681,7 @@ "enabled": { "description": "Whether or not PyDABs (Private Preview) is enabled", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -2700,17 +2700,17 @@ "mutators": { "description": "Mutators contains a list of fully qualified function paths to mutator functions.\n\nExample: [\"my_project.mutators:add_default_cluster\"]", "$ref": "#/$defs/slice/string", - "since_version": "v0.238.0" + "sinceVersion": "v0.238.0" }, "resources": { "description": "Resources contains a list of fully qualified function paths to load resources\ndefined in Python code.\n\nExample: [\"my_project.resources:load_resources\"]", "$ref": "#/$defs/slice/string", - "since_version": "v0.238.0" + "sinceVersion": "v0.238.0" }, "venv_path": { "description": "VEnvPath is path to the virtual environment.\n\nIf enabled, Python code will execute within this environment. If disabled,\nit defaults to using the Python interpreter available in the current shell.", "$ref": "#/$defs/string", - "since_version": "v0.238.0" + "sinceVersion": "v0.238.0" } }, "additionalProperties": false @@ -2728,103 +2728,103 @@ "properties": { "alerts": { "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Alert", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "apps": { "description": "The app resource defines a Databricks app.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.App", "markdownDescription": "The app resource defines a [Databricks app](https://docs.databricks.com/api/workspace/apps/create). For information about Databricks Apps, see [link](https://docs.databricks.com/dev-tools/databricks-apps/index.html).", - "since_version": "v0.239.0" + "sinceVersion": "v0.239.0" }, "clusters": { "description": "The cluster definitions for the bundle, where each key is the name of a cluster.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Cluster", "markdownDescription": "The cluster definitions for the bundle, where each key is the name of a cluster. See [clusters](https://docs.databricks.com/dev-tools/bundles/resources.html#clusters).", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "dashboards": { "description": "The dashboard definitions for the bundle, where each key is the name of the dashboard.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Dashboard", "markdownDescription": "The dashboard definitions for the bundle, where each key is the name of the dashboard. See [dashboards](https://docs.databricks.com/dev-tools/bundles/resources.html#dashboards).", - "since_version": "v0.232.0" + "sinceVersion": "v0.232.0" }, "database_catalogs": { "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.DatabaseCatalog", - "since_version": "v0.265.0" + "sinceVersion": "v0.265.0" }, "database_instances": { "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.DatabaseInstance", - "since_version": "v0.265.0" + "sinceVersion": "v0.265.0" }, "experiments": { "description": "The experiment definitions for the bundle, where each key is the name of the experiment.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.MlflowExperiment", "markdownDescription": "The experiment definitions for the bundle, where each key is the name of the experiment. See [experiments](https://docs.databricks.com/dev-tools/bundles/resources.html#experiments).", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "jobs": { "description": "The job definitions for the bundle, where each key is the name of the job.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Job", "markdownDescription": "The job definitions for the bundle, where each key is the name of the job. See [jobs](https://docs.databricks.com/dev-tools/bundles/resources.html#jobs).", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "model_serving_endpoints": { "description": "The model serving endpoint definitions for the bundle, where each key is the name of the model serving endpoint.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint", "markdownDescription": "The model serving endpoint definitions for the bundle, where each key is the name of the model serving endpoint. See [model_serving_endpoints](https://docs.databricks.com/dev-tools/bundles/resources.html#model_serving_endpoints).", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "models": { "description": "The model definitions for the bundle, where each key is the name of the model.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.MlflowModel", "markdownDescription": "The model definitions for the bundle, where each key is the name of the model. See [models](https://docs.databricks.com/dev-tools/bundles/resources.html#models).", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "pipelines": { "description": "The pipeline definitions for the bundle, where each key is the name of the pipeline.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Pipeline", "markdownDescription": "The pipeline definitions for the bundle, where each key is the name of the pipeline. See [pipelines](https://docs.databricks.com/dev-tools/bundles/resources.html#pipelines).", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "quality_monitors": { "description": "The quality monitor definitions for the bundle, where each key is the name of the quality monitor.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.QualityMonitor", "markdownDescription": "The quality monitor definitions for the bundle, where each key is the name of the quality monitor. See [quality_monitors](https://docs.databricks.com/dev-tools/bundles/resources.html#quality_monitors).", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "registered_models": { "description": "The registered model definitions for the bundle, where each key is the name of the Unity Catalog registered model.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.RegisteredModel", "markdownDescription": "The registered model definitions for the bundle, where each key is the name of the Unity Catalog registered model. See [registered_models](https://docs.databricks.com/dev-tools/bundles/resources.html#registered_models)", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "schemas": { "description": "The schema definitions for the bundle, where each key is the name of the schema.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Schema", "markdownDescription": "The schema definitions for the bundle, where each key is the name of the schema. See [schemas](https://docs.databricks.com/dev-tools/bundles/resources.html#schemas).", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "secret_scopes": { "description": "The secret scope definitions for the bundle, where each key is the name of the secret scope.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.SecretScope", "markdownDescription": "The secret scope definitions for the bundle, where each key is the name of the secret scope. See [secret_scopes](https://docs.databricks.com/dev-tools/bundles/resources.html#secret_scopes).", - "since_version": "v0.252.0" + "sinceVersion": "v0.252.0" }, "sql_warehouses": { "description": "The SQL warehouse definitions for the bundle, where each key is the name of the warehouse.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.SqlWarehouse", "markdownDescription": "The SQL warehouse definitions for the bundle, where each key is the name of the warehouse. See [sql_warehouses](https://docs.databricks.com/dev-tools/bundles/resources.html#sql_warehouses).", - "since_version": "v0.260.0" + "sinceVersion": "v0.260.0" }, "synced_database_tables": { "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.SyncedDatabaseTable", - "since_version": "v0.266.0" + "sinceVersion": "v0.266.0" }, "volumes": { "description": "The volume definitions for the bundle, where each key is the name of the volume.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Volume", "markdownDescription": "The volume definitions for the bundle, where each key is the name of the volume. See [volumes](https://docs.databricks.com/dev-tools/bundles/resources.html#volumes).", - "since_version": "v0.236.0" + "sinceVersion": "v0.236.0" } }, "additionalProperties": false @@ -2842,7 +2842,7 @@ "properties": { "content": { "$ref": "#/$defs/string", - "since_version": "v0.259.0" + "sinceVersion": "v0.259.0" } }, "additionalProperties": false, @@ -2864,17 +2864,17 @@ "exclude": { "description": "A list of files or folders to exclude from the bundle.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "include": { "description": "A list of files or folders to include in the bundle.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "paths": { "description": "The local folder paths, which can be outside the bundle root, to synchronize to the workspace when the bundle is deployed.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -2893,76 +2893,76 @@ "artifacts": { "description": "The artifacts to include in the target deployment.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Artifact", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "bundle": { "description": "The bundle attributes when deploying to this target.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Bundle", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "cluster_id": { "description": "The ID of the cluster to use for this target.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "compute_id": { "description": "Deprecated. The ID of the compute to use for this target.", "$ref": "#/$defs/string", "deprecationMessage": "Deprecated: please use cluster_id instead", - "since_version": "v0.229.0", + "sinceVersion": "v0.229.0", "deprecated": true }, "default": { "description": "Whether this target is the default target.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "git": { "description": "The Git version control settings for the target.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Git", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "mode": { "description": "The deployment mode for the target.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Mode", "markdownDescription": "The deployment mode for the target. Valid values are `development` or `production`. See [link](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html).", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "permissions": { "description": "The permissions for deploying and running the bundle in the target.", "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "presets": { "description": "The deployment presets for the target.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Presets", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "resources": { "description": "The resource definitions for the target.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Resources", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "run_as": { "description": "The identity to use to run the bundle.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs", "markdownDescription": "The identity to use to run the bundle, see [link](https://docs.databricks.com/dev-tools/bundles/run-as.html).", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "sync": { "description": "The local paths to sync to the target workspace when a bundle is run or deployed.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Sync", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "variables": { "description": "The custom variable definitions for the target.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/variable.TargetVariable", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "workspace": { "description": "The Databricks workspace for the target.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Workspace", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -2981,82 +2981,82 @@ "artifact_path": { "description": "The artifact path to use within the workspace for both deployments and workflow runs", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "auth_type": { "description": "The authentication type.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "azure_client_id": { "description": "The Azure client ID", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "azure_environment": { "description": "The Azure environment", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "azure_login_app_id": { "description": "The Azure login app ID", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "azure_tenant_id": { "description": "The Azure tenant ID", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "azure_use_msi": { "description": "Whether to use MSI for Azure", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "azure_workspace_resource_id": { "description": "The Azure workspace resource ID", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "client_id": { "description": "The client ID for the workspace", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "file_path": { "description": "The file path to use within the workspace for both deployments and workflow runs", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "google_service_account": { "description": "The Google service account name", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "host": { "description": "The Databricks workspace host URL", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "profile": { "description": "The Databricks workspace profile name", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "resource_path": { "description": "The workspace resource path", "$ref": "#/$defs/string", - "since_version": "v0.230.0" + "sinceVersion": "v0.230.0" }, "root_path": { "description": "The Databricks workspace root path", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "state_path": { "description": "The workspace state path", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -3087,7 +3087,7 @@ }, "deployment_id": { "$ref": "#/$defs/string", - "since_version": "v0.239.0" + "sinceVersion": "v0.239.0" }, "env_vars": { "description": "The environment variables to set in the app runtime environment. This will override the environment variables specified in the app.yaml file.", @@ -3101,11 +3101,11 @@ }, "mode": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentMode", - "since_version": "v0.239.0" + "sinceVersion": "v0.239.0" }, "source_code_path": { "$ref": "#/$defs/string", - "since_version": "v0.239.0" + "sinceVersion": "v0.239.0" } }, "additionalProperties": false @@ -3123,7 +3123,7 @@ "properties": { "source_code_path": { "$ref": "#/$defs/string", - "since_version": "v0.239.0" + "sinceVersion": "v0.239.0" } }, "additionalProperties": false @@ -3185,44 +3185,44 @@ "properties": { "database": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceDatabase", - "since_version": "v0.260.0" + "sinceVersion": "v0.260.0" }, "description": { "description": "Description of the App Resource.", "$ref": "#/$defs/string", - "since_version": "v0.239.0" + "sinceVersion": "v0.239.0" }, "experiment": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceExperiment" }, "genie_space": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceGenieSpace", - "since_version": "v0.273.0" + "sinceVersion": "v0.273.0" }, "job": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceJob", - "since_version": "v0.239.0" + "sinceVersion": "v0.239.0" }, "name": { "description": "Name of the App Resource.", "$ref": "#/$defs/string", - "since_version": "v0.239.0" + "sinceVersion": "v0.239.0" }, "secret": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceSecret", - "since_version": "v0.239.0" + "sinceVersion": "v0.239.0" }, "serving_endpoint": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceServingEndpoint", - "since_version": "v0.239.0" + "sinceVersion": "v0.239.0" }, "sql_warehouse": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceSqlWarehouse", - "since_version": "v0.239.0" + "sinceVersion": "v0.239.0" }, "uc_securable": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceUcSecurable", - "since_version": "v0.253.0" + "sinceVersion": "v0.253.0" } }, "additionalProperties": false, @@ -3243,15 +3243,15 @@ "properties": { "database_name": { "$ref": "#/$defs/string", - "since_version": "v0.260.0" + "sinceVersion": "v0.260.0" }, "instance_name": { "$ref": "#/$defs/string", - "since_version": "v0.260.0" + "sinceVersion": "v0.260.0" }, "permission": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceDatabaseDatabasePermission", - "since_version": "v0.260.0" + "sinceVersion": "v0.260.0" } }, "additionalProperties": false, @@ -3328,15 +3328,15 @@ "properties": { "name": { "$ref": "#/$defs/string", - "since_version": "v0.273.0" + "sinceVersion": "v0.273.0" }, "permission": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceGenieSpaceGenieSpacePermission", - "since_version": "v0.273.0" + "sinceVersion": "v0.273.0" }, "space_id": { "$ref": "#/$defs/string", - "since_version": "v0.273.0" + "sinceVersion": "v0.273.0" } }, "additionalProperties": false, @@ -3376,11 +3376,11 @@ "properties": { "id": { "$ref": "#/$defs/string", - "since_version": "v0.239.0" + "sinceVersion": "v0.239.0" }, "permission": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceJobJobPermission", - "since_version": "v0.239.0" + "sinceVersion": "v0.239.0" } }, "additionalProperties": false, @@ -3419,15 +3419,15 @@ "properties": { "key": { "$ref": "#/$defs/string", - "since_version": "v0.239.0" + "sinceVersion": "v0.239.0" }, "permission": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceSecretSecretPermission", - "since_version": "v0.239.0" + "sinceVersion": "v0.239.0" }, "scope": { "$ref": "#/$defs/string", - "since_version": "v0.239.0" + "sinceVersion": "v0.239.0" } }, "additionalProperties": false, @@ -3467,11 +3467,11 @@ "properties": { "name": { "$ref": "#/$defs/string", - "since_version": "v0.239.0" + "sinceVersion": "v0.239.0" }, "permission": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceServingEndpointServingEndpointPermission", - "since_version": "v0.239.0" + "sinceVersion": "v0.239.0" } }, "additionalProperties": false, @@ -3509,11 +3509,11 @@ "properties": { "id": { "$ref": "#/$defs/string", - "since_version": "v0.239.0" + "sinceVersion": "v0.239.0" }, "permission": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceSqlWarehouseSqlWarehousePermission", - "since_version": "v0.239.0" + "sinceVersion": "v0.239.0" } }, "additionalProperties": false, @@ -3551,15 +3551,15 @@ "properties": { "permission": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceUcSecurableUcSecurablePermission", - "since_version": "v0.253.0" + "sinceVersion": "v0.253.0" }, "securable_full_name": { "$ref": "#/$defs/string", - "since_version": "v0.253.0" + "sinceVersion": "v0.253.0" }, "securable_type": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceUcSecurableUcSecurableType", - "since_version": "v0.253.0" + "sinceVersion": "v0.253.0" } }, "additionalProperties": false, @@ -3778,17 +3778,17 @@ "pause_status": { "description": "Read only field that indicates whether a schedule is paused or not.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorCronSchedulePauseStatus", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "quartz_cron_expression": { "description": "The expression that determines when to run the monitor. See [examples](https://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html).", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "timezone_id": { "description": "The timezone id (e.g., ``PST``) in which to evaluate the quartz expression.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -3829,7 +3829,7 @@ "enabled": { "description": "Whether to enable data classification.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -3848,7 +3848,7 @@ "email_addresses": { "description": "The list of email addresses to send the notification to. A maximum of 5 email addresses is supported.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -3867,37 +3867,37 @@ "granularities": { "description": "Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "label_col": { "description": "Column for the label.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "model_id_col": { "description": "Column for the model identifier.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "prediction_col": { "description": "Column for the prediction.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "prediction_proba_col": { "description": "Column for prediction probabilities", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "problem_type": { "description": "Problem type the model aims to solve.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorInferenceLogProblemType", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "timestamp_col": { "description": "Column for the timestamp.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -3939,27 +3939,27 @@ "definition": { "description": "Jinja template for a SQL expression that specifies how to compute the metric. See [create metric definition](https://docs.databricks.com/en/lakehouse-monitoring/custom-metrics.html#create-definition).", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "input_columns": { "description": "A list of column names in the input table the metric should be computed for.\nCan use ``\":table\"`` to indicate that the metric needs information from multiple columns.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "name": { "description": "Name of the metric in the output tables.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "output_data_type": { "description": "The output type of the custom metric.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "type": { "description": "Can only be one of ``\"CUSTOM_METRIC_TYPE_AGGREGATE\"``, ``\"CUSTOM_METRIC_TYPE_DERIVED\"``, or ``\"CUSTOM_METRIC_TYPE_DRIFT\"``.\nThe ``\"CUSTOM_METRIC_TYPE_AGGREGATE\"`` and ``\"CUSTOM_METRIC_TYPE_DERIVED\"`` metrics\nare computed on a single table, whereas the ``\"CUSTOM_METRIC_TYPE_DRIFT\"`` compare metrics across\nbaseline and input table, or across the two consecutive time windows.\n- CUSTOM_METRIC_TYPE_AGGREGATE: only depend on the existing columns in your table\n- CUSTOM_METRIC_TYPE_DERIVED: depend on previously computed aggregate metrics\n- CUSTOM_METRIC_TYPE_DRIFT: depend on previously computed aggregate or derived metrics", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorMetricType", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -4002,14 +4002,14 @@ "on_failure": { "description": "Destinations to send notifications on failure/timeout.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorDestination", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "on_new_classification_tag_detected": { "description": "Destinations to send notifications on new classification tag detected.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorDestination", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -4042,12 +4042,12 @@ "granularities": { "description": "Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "timestamp_col": { "description": "Column for the timestamp.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -4070,28 +4070,28 @@ "alias_name": { "description": "Name of the alias, e.g. 'champion' or 'latest_stable'", "$ref": "#/$defs/string", - "since_version": "v0.273.0" + "sinceVersion": "v0.273.0" }, "catalog_name": { "$ref": "#/$defs/string", - "since_version": "v0.273.0" + "sinceVersion": "v0.273.0" }, "id": { "$ref": "#/$defs/string", - "since_version": "v0.273.0" + "sinceVersion": "v0.273.0" }, "model_name": { "$ref": "#/$defs/string", - "since_version": "v0.273.0" + "sinceVersion": "v0.273.0" }, "schema_name": { "$ref": "#/$defs/string", - "since_version": "v0.273.0" + "sinceVersion": "v0.273.0" }, "version_num": { "description": "Integer version number of the model version to which this alias points.", "$ref": "#/$defs/int", - "since_version": "v0.273.0" + "sinceVersion": "v0.273.0" } }, "additionalProperties": false @@ -4126,7 +4126,7 @@ "destination": { "description": "abfss destination, e.g. `abfss://\u003ccontainer-name\u003e@\u003cstorage-account-name\u003e.dfs.core.windows.net/\u003cdirectory-name\u003e`.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -4148,12 +4148,12 @@ "max_workers": { "description": "The maximum number of workers to which the cluster can scale up when overloaded.\nNote that `max_workers` must be strictly greater than `min_workers`.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "min_workers": { "description": "The minimum number of workers to which the cluster can scale down when underutilized.\nIt is also the initial number of workers the cluster will have after creation.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -4172,51 +4172,51 @@ "properties": { "availability": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AwsAvailability", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "ebs_volume_count": { "description": "The number of volumes launched for each instance. Users can choose up to 10 volumes.\nThis feature is only enabled for supported node types. Legacy node types cannot specify\ncustom EBS volumes.\nFor node types with no instance store, at least one EBS volume needs to be specified;\notherwise, cluster creation will fail.\n\nThese EBS volumes will be mounted at `/ebs0`, `/ebs1`, and etc.\nInstance store volumes will be mounted at `/local_disk0`, `/local_disk1`, and etc.\n\nIf EBS volumes are attached, Databricks will configure Spark to use only the EBS volumes for\nscratch storage because heterogenously sized scratch devices can lead to inefficient disk\nutilization. If no EBS volumes are attached, Databricks will configure Spark to use instance\nstore volumes.\n\nPlease note that if EBS volumes are specified, then the Spark configuration `spark.local.dir`\nwill be overridden.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "ebs_volume_iops": { "description": "If using gp3 volumes, what IOPS to use for the disk. If this is not set, the maximum performance of a gp2 volume with the same volume size will be used.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "ebs_volume_size": { "description": "The size of each EBS volume (in GiB) launched for each instance. For general purpose\nSSD, this value must be within the range 100 - 4096. For throughput optimized HDD,\nthis value must be within the range 500 - 4096.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "ebs_volume_throughput": { "description": "If using gp3 volumes, what throughput to use for the disk. If this is not set, the maximum performance of a gp2 volume with the same volume size will be used.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "ebs_volume_type": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.EbsVolumeType", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "first_on_demand": { "description": "The first `first_on_demand` nodes of the cluster will be placed on on-demand instances.\nIf this value is greater than 0, the cluster driver node in particular will be placed on an\non-demand instance. If this value is greater than or equal to the current cluster size, all\nnodes will be placed on on-demand instances. If this value is less than the current cluster\nsize, `first_on_demand` nodes will be placed on on-demand instances and the remainder will\nbe placed on `availability` instances. Note that this value does not affect\ncluster size and cannot currently be mutated over the lifetime of a cluster.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "instance_profile_arn": { "description": "Nodes for this cluster will only be placed on AWS instances with this instance profile. If\nommitted, nodes will be placed on instances without an IAM instance profile. The instance\nprofile must have previously been added to the Databricks environment by an account\nadministrator.\n\nThis feature may only be available to certain customer plans.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "spot_bid_price_percent": { "description": "The bid price for AWS spot instances, as a percentage of the corresponding instance type's\non-demand price.\nFor example, if this field is set to 50, and the cluster needs a new `r3.xlarge` spot\ninstance, then the bid price is half of the price of\non-demand `r3.xlarge` instances. Similarly, if this field is set to 200, the bid price is twice\nthe price of on-demand `r3.xlarge` instances. If not specified, the default value is 100.\nWhen spot instances are requested for this cluster, only spot instances whose bid price\npercentage matches this field will be considered.\nNote that, for safety, we enforce this field to be no more than 10000.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "zone_id": { "description": "Identifier for the availability zone/datacenter in which the cluster resides.\nThis string will be of a form like \"us-west-2a\". The provided availability\nzone must be in the same region as the Databricks deployment. For example, \"us-west-2a\"\nis not a valid zone id if the Databricks deployment resides in the \"us-east-1\" region.\nThis is an optional field at cluster creation, and if not specified, the zone \"auto\" will be used.\nIf the zone specified is \"auto\", will try to place cluster in a zone with high availability,\nand will retry placement in a different AZ if there is not enough capacity.\n\nThe list of available zones as well as the default value can be found by using the\n`List Zones` method.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -4252,22 +4252,22 @@ "properties": { "availability": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AzureAvailability", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "first_on_demand": { "description": "The first `first_on_demand` nodes of the cluster will be placed on on-demand instances.\nThis value should be greater than 0, to make sure the cluster driver node is placed on an\non-demand instance. If this value is greater than or equal to the current cluster size, all\nnodes will be placed on on-demand instances. If this value is less than the current cluster\nsize, `first_on_demand` nodes will be placed on on-demand instances and the remainder will\nbe placed on `availability` instances. Note that this value does not affect\ncluster size and cannot currently be mutated over the lifetime of a cluster.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "log_analytics_info": { "description": "Defines values necessary to configure and run Azure Log Analytics agent", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.LogAnalyticsInfo", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "spot_bid_max_price": { "description": "The max bid price to be used for Azure spot instances.\nThe Max price for the bid cannot be higher than the on-demand price of the instance.\nIf not specified, the default value is -1, which specifies that the instance cannot be evicted\non the basis of price, and only on the basis of availability. Further, the value should \u003e 0 or -1.", "$ref": "#/$defs/float64", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -4303,12 +4303,12 @@ "jobs": { "description": "With jobs set, the cluster can be used for jobs", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "notebooks": { "description": "With notebooks set, this cluster can be used for notebooks", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -4328,17 +4328,17 @@ "dbfs": { "description": "destination needs to be provided. e.g.\n`{ \"dbfs\" : { \"destination\" : \"dbfs:/home/cluster_log\" } }`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DbfsStorageInfo", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "s3": { "description": "destination and either the region or endpoint need to be provided. e.g.\n`{ \"s3\": { \"destination\" : \"s3://cluster_log_bucket/prefix\", \"region\" : \"us-west-2\" } }`\nCluster iam role is used to access s3, please make sure the cluster iam role in\n`instance_profile_arn` has permission to write data to the s3 destination.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.S3StorageInfo", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "volumes": { "description": "destination needs to be provided, e.g.\n`{ \"volumes\": { \"destination\": \"/Volumes/catalog/schema/volume/cluster_log\" } }`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.VolumesStorageInfo", - "since_version": "v0.242.0" + "sinceVersion": "v0.242.0" } }, "additionalProperties": false @@ -4358,157 +4358,157 @@ "apply_policy_default_values": { "description": "When set to true, fixed and default values from the policy will be used for fields that are omitted. When set to false, only fixed values from the policy will be applied.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "autoscale": { "description": "Parameters needed in order to automatically scale clusters up and down based on load.\nNote: autoscaling works best with DB runtime versions 3.0 or later.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AutoScale", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "autotermination_minutes": { "description": "Automatically terminates the cluster after it is inactive for this time in minutes. If not set,\nthis cluster will not be automatically terminated. If specified, the threshold must be between\n10 and 10000 minutes.\nUsers can also set this value to 0 to explicitly disable automatic termination.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "aws_attributes": { "description": "Attributes related to clusters running on Amazon Web Services.\nIf not specified at cluster creation, a set of default values will be used.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AwsAttributes", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "azure_attributes": { "description": "Attributes related to clusters running on Microsoft Azure.\nIf not specified at cluster creation, a set of default values will be used.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AzureAttributes", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "cluster_log_conf": { "description": "The configuration for delivering spark logs to a long-term storage destination.\nThree kinds of destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be specified\nfor one cluster. If the conf is given, the logs will be delivered to the destination every\n`5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while\nthe destination of executor logs is `$destination/$clusterId/executor`.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClusterLogConf", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "cluster_name": { "description": "Cluster name requested by the user. This doesn't have to be unique.\nIf not specified at creation, the cluster name will be an empty string.\nFor job clusters, the cluster name is automatically set based on the job and job run IDs.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "custom_tags": { "description": "Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS\ninstances and EBS volumes) with these tags in addition to `default_tags`. Notes:\n\n- Currently, Databricks allows at most 45 custom tags\n\n- Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags", "$ref": "#/$defs/map/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "data_security_mode": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DataSecurityMode", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "docker_image": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DockerImage", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "driver_instance_pool_id": { "description": "The optional ID of the instance pool for the driver of the cluster belongs.\nThe pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not\nassigned.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "driver_node_type_id": { "description": "The node type of the Spark driver.\nNote that this field is optional; if unset, the driver node type will be set as the same value\nas `node_type_id` defined above.\n\nThis field, along with node_type_id, should not be set if virtual_cluster_size is set.\nIf both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id and node_type_id take precedence.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "enable_elastic_disk": { "description": "Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk\nspace when its Spark workers are running low on disk space.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "enable_local_disk_encryption": { "description": "Whether to enable LUKS on cluster VMs' local disks", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "gcp_attributes": { "description": "Attributes related to clusters running on Google Cloud Platform.\nIf not specified at cluster creation, a set of default values will be used.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.GcpAttributes", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "init_scripts": { "description": "The configuration for storing init scripts. Any number of destinations can be specified.\nThe scripts are executed sequentially in the order provided.\nIf `cluster_log_conf` is specified, init script logs are sent to `\u003cdestination\u003e/\u003ccluster-ID\u003e/init_scripts`.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/compute.InitScriptInfo", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "instance_pool_id": { "description": "The optional ID of the instance pool to which the cluster belongs.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "is_single_node": { "description": "This field can only be used when `kind = CLASSIC_PREVIEW`.\n\nWhen set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers`", "$ref": "#/$defs/bool", - "since_version": "v0.237.0" + "sinceVersion": "v0.237.0" }, "kind": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Kind", - "since_version": "v0.237.0" + "sinceVersion": "v0.237.0" }, "node_type_id": { "description": "This field encodes, through a single value, the resources available to each of\nthe Spark nodes in this cluster. For example, the Spark nodes can be provisioned\nand optimized for memory or compute intensive workloads. A list of available node\ntypes can be retrieved by using the :method:clusters/listNodeTypes API call.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "num_workers": { "description": "Number of worker nodes that this cluster should have. A cluster has one Spark Driver\nand `num_workers` Executors for a total of `num_workers` + 1 Spark nodes.\n\nNote: When reading the properties of a cluster, this field reflects the desired number\nof workers rather than the actual current number of workers. For instance, if a cluster\nis resized from 5 to 10 workers, this field will immediately be updated to reflect\nthe target size of 10 workers, whereas the workers listed in `spark_info` will gradually\nincrease from 5 to 10 as the new nodes are provisioned.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "policy_id": { "description": "The ID of the cluster policy used to create the cluster if applicable.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "remote_disk_throughput": { "description": "If set, what the configurable throughput (in Mb/s) for the remote disk is. Currently only supported for GCP HYPERDISK_BALANCED disks.", "$ref": "#/$defs/int", - "since_version": "v0.257.0" + "sinceVersion": "v0.257.0" }, "runtime_engine": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.RuntimeEngine", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "single_user_name": { "description": "Single user name if data_security_mode is `SINGLE_USER`", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "spark_conf": { "description": "An object containing a set of optional, user-specified Spark configuration key-value pairs.\nUsers can also pass in a string of extra JVM options to the driver and the executors via\n`spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.", "$ref": "#/$defs/map/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "spark_env_vars": { "description": "An object containing a set of optional, user-specified environment variable key-value pairs.\nPlease note that key-value pair of the form (X,Y) will be exported as is (i.e.,\n`export X='Y'`) while launching the driver and workers.\n\nIn order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending\nthem to `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all\ndefault databricks managed environmental variables are included as well.\n\nExample Spark environment variables:\n`{\"SPARK_WORKER_MEMORY\": \"28000m\", \"SPARK_LOCAL_DIRS\": \"/local_disk0\"}` or\n`{\"SPARK_DAEMON_JAVA_OPTS\": \"$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true\"}`", "$ref": "#/$defs/map/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "spark_version": { "description": "The Spark version of the cluster, e.g. `3.3.x-scala2.11`.\nA list of available Spark versions can be retrieved by using\nthe :method:clusters/sparkVersions API call.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "ssh_public_keys": { "description": "SSH public key contents that will be added to each Spark node in this cluster. The\ncorresponding private keys can be used to login with the user name `ubuntu` on port `2200`.\nUp to 10 keys can be specified.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "total_initial_remote_disk_size": { "description": "If set, what the total initial volume size (in GB) of the remote disks should be. Currently only supported for GCP HYPERDISK_BALANCED disks.", "$ref": "#/$defs/int", - "since_version": "v0.257.0" + "sinceVersion": "v0.257.0" }, "use_ml_runtime": { "description": "This field can only be used when `kind = CLASSIC_PREVIEW`.\n\n`effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not.", "$ref": "#/$defs/bool", - "since_version": "v0.237.0" + "sinceVersion": "v0.237.0" }, "workload_type": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.WorkloadType", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -4552,7 +4552,7 @@ "destination": { "description": "dbfs destination, e.g. `dbfs:/my/path`", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -4574,12 +4574,12 @@ "password": { "description": "Password of the user", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "username": { "description": "Name of the user", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -4597,12 +4597,12 @@ "properties": { "basic_auth": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DockerBasicAuth", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "url": { "description": "URL of the docker image.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -4639,22 +4639,22 @@ "description": "Use `environment_version` instead.", "$ref": "#/$defs/string", "deprecationMessage": "This field is deprecated", - "since_version": "v0.229.0", + "sinceVersion": "v0.229.0", "deprecated": true }, "dependencies": { "description": "List of pip dependencies, as supported by the version of pip in this environment.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "environment_version": { "description": "Required. Environment version used by the environment.\nEach version comes with a specific Python version and a set of Python packages.\nThe version is a string, consisting of an integer.", "$ref": "#/$defs/string", - "since_version": "v0.252.0" + "sinceVersion": "v0.252.0" }, "java_dependencies": { "$ref": "#/$defs/slice/string", - "since_version": "v0.271.0" + "sinceVersion": "v0.271.0" } }, "additionalProperties": false @@ -4673,39 +4673,39 @@ "properties": { "availability": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.GcpAvailability", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "boot_disk_size": { "description": "Boot disk size in GB", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "first_on_demand": { "description": "The first `first_on_demand` nodes of the cluster will be placed on on-demand instances.\nThis value should be greater than 0, to make sure the cluster driver node is placed on an\non-demand instance. If this value is greater than or equal to the current cluster size, all\nnodes will be placed on on-demand instances. If this value is less than the current cluster\nsize, `first_on_demand` nodes will be placed on on-demand instances and the remainder will\nbe placed on `availability` instances. Note that this value does not affect\ncluster size and cannot currently be mutated over the lifetime of a cluster.", "$ref": "#/$defs/int", - "since_version": "v0.265.0" + "sinceVersion": "v0.265.0" }, "google_service_account": { "description": "If provided, the cluster will impersonate the google service account when accessing\ngcloud services (like GCS). The google service account\nmust have previously been added to the Databricks environment by an account\nadministrator.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "local_ssd_count": { "description": "If provided, each node (workers and driver) in the cluster will have this number of local SSDs attached.\nEach local SSD is 375GB in size.\nRefer to [GCP documentation](https://cloud.google.com/compute/docs/disks/local-ssd#choose_number_local_ssds)\nfor the supported number of local SSDs for each instance type.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "use_preemptible_executors": { "description": "This field determines whether the spark executors will be scheduled to run on preemptible\nVMs (when set to true) versus standard compute engine VMs (when set to false; default).\nNote: Soon to be deprecated, use the 'availability' field instead.", "$ref": "#/$defs/bool", "deprecationMessage": "This field is deprecated", - "since_version": "v0.229.0", + "sinceVersion": "v0.229.0", "deprecated": true }, "zone_id": { "description": "Identifier for the availability zone in which the cluster resides.\nThis can be one of the following:\n- \"HA\" =\u003e High availability, spread nodes across availability zones for a Databricks deployment region [default].\n- \"AUTO\" =\u003e Databricks picks an availability zone to schedule the cluster on.\n- A GCP availability zone =\u003e Pick One of the available zones for (machine type + region) from\nhttps://cloud.google.com/compute/docs/regions-zones.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -4742,7 +4742,7 @@ "destination": { "description": "GCS destination/URI, e.g. `gs://my-bucket/some-prefix`", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -4765,39 +4765,39 @@ "abfss": { "description": "Contains the Azure Data Lake Storage destination path", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Adlsgen2Info", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "dbfs": { "description": "destination needs to be provided. e.g.\n`{ \"dbfs\": { \"destination\" : \"dbfs:/home/cluster_log\" } }`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DbfsStorageInfo", "deprecationMessage": "This field is deprecated", - "since_version": "v0.229.0", + "sinceVersion": "v0.229.0", "deprecated": true }, "file": { "description": "destination needs to be provided, e.g.\n`{ \"file\": { \"destination\": \"file:/my/local/file.sh\" } }`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.LocalFileInfo", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "gcs": { "description": "destination needs to be provided, e.g.\n`{ \"gcs\": { \"destination\": \"gs://my-bucket/file.sh\" } }`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.GcsStorageInfo", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "s3": { "description": "destination and either the region or endpoint need to be provided. e.g.\n`{ \\\"s3\\\": { \\\"destination\\\": \\\"s3://cluster_log_bucket/prefix\\\", \\\"region\\\": \\\"us-west-2\\\" } }`\nCluster iam role is used to access s3, please make sure the cluster iam role in\n`instance_profile_arn` has permission to write data to the s3 destination.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.S3StorageInfo", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "volumes": { "description": "destination needs to be provided. e.g.\n`{ \\\"volumes\\\" : { \\\"destination\\\" : \\\"/Volumes/my-init.sh\\\" } }`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.VolumesStorageInfo", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "workspace": { "description": "destination needs to be provided, e.g.\n`{ \"workspace\": { \"destination\": \"/cluster-init-scripts/setup-datadog.sh\" } }`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.WorkspaceStorageInfo", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -4830,39 +4830,39 @@ "cran": { "description": "Specification of a CRAN library to be installed as part of the library", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.RCranLibrary", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "egg": { "description": "Deprecated. URI of the egg library to install. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above.", "$ref": "#/$defs/string", "deprecationMessage": "This field is deprecated", - "since_version": "v0.229.0", + "sinceVersion": "v0.229.0", "deprecated": true }, "jar": { "description": "URI of the JAR library to install. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs.\nFor example: `{ \"jar\": \"/Workspace/path/to/library.jar\" }`, `{ \"jar\" : \"/Volumes/path/to/library.jar\" }` or\n`{ \"jar\": \"s3://my-bucket/library.jar\" }`.\nIf S3 is used, please make sure the cluster has read access on the library. You may need to\nlaunch the cluster with an IAM role to access the S3 URI.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "maven": { "description": "Specification of a maven library to be installed. For example:\n`{ \"coordinates\": \"org.jsoup:jsoup:1.7.2\" }`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.MavenLibrary", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "pypi": { "description": "Specification of a PyPi library to be installed. For example:\n`{ \"package\": \"simplejson\" }`", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.PythonPyPiLibrary", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "requirements": { "description": "URI of the requirements.txt file to install. Only Workspace paths and Unity Catalog Volumes paths are supported.\nFor example: `{ \"requirements\": \"/Workspace/path/to/requirements.txt\" }` or `{ \"requirements\" : \"/Volumes/path/to/requirements.txt\" }`", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "whl": { "description": "URI of the wheel library to install. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs.\nFor example: `{ \"whl\": \"/Workspace/path/to/library.whl\" }`, `{ \"whl\" : \"/Volumes/path/to/library.whl\" }` or\n`{ \"whl\": \"s3://my-bucket/library.whl\" }`.\nIf S3 is used, please make sure the cluster has read access on the library. You may need to\nlaunch the cluster with an IAM role to access the S3 URI.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -4881,7 +4881,7 @@ "destination": { "description": "local file destination, e.g. `file:/my/local/file.sh`", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -4903,12 +4903,12 @@ "log_analytics_primary_key": { "description": "The primary key for the Azure Log Analytics agent configuration", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "log_analytics_workspace_id": { "description": "The workspace ID for the Azure Log Analytics agent configuration", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -4927,17 +4927,17 @@ "coordinates": { "description": "Gradle-style maven coordinates. For example: \"org.jsoup:jsoup:1.7.2\".", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "exclusions": { "description": "List of dependences to exclude. For example: `[\"slf4j:slf4j\", \"*:hadoop-client\"]`.\n\nMaven dependency exclusions:\nhttps://maven.apache.org/guides/introduction/introduction-to-optional-and-excludes-dependencies.html.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "repo": { "description": "Maven repo to install the Maven package from. If omitted, both Maven Central Repository\nand Spark Packages are searched.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -4959,12 +4959,12 @@ "package": { "description": "The name of the pypi package to install. An optional exact version specification is also\nsupported. Examples: \"simplejson\" and \"simplejson==3.8.0\".", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "repo": { "description": "The repository where the package can be found. If not specified, the default pip index is\nused.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -4986,12 +4986,12 @@ "package": { "description": "The name of the CRAN package to install.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "repo": { "description": "The repository where the package can be found. If not specified, the default CRAN repo is used.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -5030,37 +5030,37 @@ "canned_acl": { "description": "(Optional) Set canned access control list for the logs, e.g. `bucket-owner-full-control`.\nIf `canned_cal` is set, please make sure the cluster iam role has `s3:PutObjectAcl` permission on\nthe destination bucket and prefix. The full list of possible canned acl can be found at\nhttp://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl.\nPlease also note that by default only the object owner gets full controls. If you are using cross account\nrole for writing data, you may want to set `bucket-owner-full-control` to make bucket owner able to\nread the logs.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "destination": { "description": "S3 destination, e.g. `s3://my-bucket/some-prefix` Note that logs will be delivered using\ncluster iam role, please make sure you set cluster iam role and the role has write access to the\ndestination. Please also note that you cannot use AWS keys to deliver logs.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "enable_encryption": { "description": "(Optional) Flag to enable server side encryption, `false` by default.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "encryption_type": { "description": "(Optional) The encryption type, it could be `sse-s3` or `sse-kms`. It will be used only when\nencryption is enabled and the default type is `sse-s3`.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "endpoint": { "description": "S3 endpoint, e.g. `https://s3-us-west-2.amazonaws.com`. Either region or endpoint needs to be set.\nIf both are set, endpoint will be used.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "kms_key": { "description": "(Optional) Kms key which will be used if encryption is enabled and encryption type is set to `sse-kms`.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "region": { "description": "S3 region, e.g. `us-west-2`. Either region or endpoint needs to be set. If both are set,\nendpoint will be used.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -5083,7 +5083,7 @@ "destination": { "description": "UC Volumes destination, e.g. `/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh`\nor `dbfs:/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh`", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -5106,7 +5106,7 @@ "clients": { "description": "defined what type of clients can use the cluster. E.g. Notebooks, Jobs", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClientsTypes", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -5129,7 +5129,7 @@ "destination": { "description": "wsfs destination, e.g. `workspace:/cluster-init-scripts/setup-datadog.sh`", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -5166,12 +5166,12 @@ "key": { "description": "The key of the custom tag.", "$ref": "#/$defs/string", - "since_version": "v0.273.0" + "sinceVersion": "v0.273.0" }, "value": { "description": "The value of the custom tag.", "$ref": "#/$defs/string", - "since_version": "v0.273.0" + "sinceVersion": "v0.273.0" } }, "additionalProperties": false @@ -5191,17 +5191,17 @@ "branch_time": { "description": "Branch time of the ref database instance.\nFor a parent ref instance, this is the point in time on the parent instance from which the\ninstance was created.\nFor a child ref instance, this is the point in time on the instance from which the child\ninstance was created.\nInput: For specifying the point in time to create a child instance. Optional.\nOutput: Only populated if provided as input to create a child instance.", "$ref": "#/$defs/string", - "since_version": "v0.265.0" + "sinceVersion": "v0.265.0" }, "lsn": { "description": "User-specified WAL LSN of the ref database instance.\n\nInput: For specifying the WAL LSN to create a child instance. Optional.\nOutput: Only populated if provided as input to create a child instance.", "$ref": "#/$defs/string", - "since_version": "v0.265.0" + "sinceVersion": "v0.265.0" }, "name": { "description": "Name of the ref database instance.", "$ref": "#/$defs/string", - "since_version": "v0.265.0" + "sinceVersion": "v0.265.0" } }, "additionalProperties": false @@ -5252,17 +5252,17 @@ "budget_policy_id": { "description": "Budget policy to set on the newly created pipeline.", "$ref": "#/$defs/string", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "storage_catalog": { "description": "This field needs to be specified if the destination catalog is a managed postgres catalog.\n\nUC catalog for the pipeline to store intermediate files (checkpoints, event logs etc).\nThis needs to be a standard catalog where the user has permissions to create Delta tables.", "$ref": "#/$defs/string", - "since_version": "v0.266.0" + "sinceVersion": "v0.266.0" }, "storage_schema": { "description": "This field needs to be specified if the destination catalog is a managed postgres catalog.\n\nUC schema for the pipeline to store intermediate files (checkpoints, event logs etc).\nThis needs to be in the standard catalog where the user has permissions to create Delta tables.", "$ref": "#/$defs/string", - "since_version": "v0.266.0" + "sinceVersion": "v0.266.0" } }, "additionalProperties": false @@ -5397,37 +5397,37 @@ "create_database_objects_if_missing": { "description": "If true, the synced table's logical database and schema resources in PG\nwill be created if they do not already exist.", "$ref": "#/$defs/bool", - "since_version": "v0.266.0" + "sinceVersion": "v0.266.0" }, "existing_pipeline_id": { "description": "At most one of existing_pipeline_id and new_pipeline_spec should be defined.\n\nIf existing_pipeline_id is defined, the synced table will be bin packed into the existing pipeline\nreferenced. This avoids creating a new pipeline and allows sharing existing compute.\nIn this case, the scheduling_policy of this synced table must match the scheduling policy of the existing pipeline.", "$ref": "#/$defs/string", - "since_version": "v0.266.0" + "sinceVersion": "v0.266.0" }, "new_pipeline_spec": { "description": "At most one of existing_pipeline_id and new_pipeline_spec should be defined.\n\nIf new_pipeline_spec is defined, a new pipeline is created for this synced table. The location pointed to is used\nto store intermediate files (checkpoints, event logs etc). The caller must have write permissions to create Delta\ntables in the specified catalog and schema. Again, note this requires write permissions, whereas the source table\nonly requires read permissions.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.NewPipelineSpec", - "since_version": "v0.266.0" + "sinceVersion": "v0.266.0" }, "primary_key_columns": { "description": "Primary Key columns to be used for data insert/update in the destination.", "$ref": "#/$defs/slice/string", - "since_version": "v0.266.0" + "sinceVersion": "v0.266.0" }, "scheduling_policy": { "description": "Scheduling policy of the underlying pipeline.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.SyncedTableSchedulingPolicy", - "since_version": "v0.266.0" + "sinceVersion": "v0.266.0" }, "source_table_full_name": { "description": "Three-part (catalog, schema, table) name of the source Delta table.", "$ref": "#/$defs/string", - "since_version": "v0.266.0" + "sinceVersion": "v0.266.0" }, "timeseries_key": { "description": "Time series key to deduplicate (tie-break) rows with the same primary key.", "$ref": "#/$defs/string", - "since_version": "v0.266.0" + "sinceVersion": "v0.266.0" } }, "additionalProperties": false @@ -5472,22 +5472,22 @@ "continuous_update_status": { "description": "Detailed status of a synced table. Shown if the synced table is in the SYNCED_CONTINUOUS_UPDATE\nor the SYNCED_UPDATING_PIPELINE_RESOURCES state.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.SyncedTableContinuousUpdateStatus", - "since_version": "v0.266.0" + "sinceVersion": "v0.266.0" }, "failed_status": { "description": "Detailed status of a synced table. Shown if the synced table is in the OFFLINE_FAILED or the\nSYNCED_PIPELINE_FAILED state.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.SyncedTableFailedStatus", - "since_version": "v0.266.0" + "sinceVersion": "v0.266.0" }, "provisioning_status": { "description": "Detailed status of a synced table. Shown if the synced table is in the\nPROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.SyncedTableProvisioningStatus", - "since_version": "v0.266.0" + "sinceVersion": "v0.266.0" }, "triggered_update_status": { "description": "Detailed status of a synced table. Shown if the synced table is in the SYNCED_TRIGGERED_UPDATE\nor the SYNCED_NO_PENDING_UPDATE state.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/database.SyncedTableTriggeredUpdateStatus", - "since_version": "v0.266.0" + "sinceVersion": "v0.266.0" } }, "additionalProperties": false @@ -5535,22 +5535,22 @@ "clean_room_name": { "description": "The clean room that the notebook belongs to.", "$ref": "#/$defs/string", - "since_version": "v0.237.0" + "sinceVersion": "v0.237.0" }, "etag": { "description": "Checksum to validate the freshness of the notebook resource (i.e. the notebook being run is the latest version).\nIt can be fetched by calling the :method:cleanroomassets/get API.", "$ref": "#/$defs/string", - "since_version": "v0.237.0" + "sinceVersion": "v0.237.0" }, "notebook_base_parameters": { "description": "Base parameters to be used for the clean room notebook job.", "$ref": "#/$defs/map/string", - "since_version": "v0.237.0" + "sinceVersion": "v0.237.0" }, "notebook_name": { "description": "Name of the notebook being run.", "$ref": "#/$defs/string", - "since_version": "v0.237.0" + "sinceVersion": "v0.237.0" } }, "additionalProperties": false, @@ -5573,17 +5573,17 @@ "gpu_node_pool_id": { "description": "IDof the GPU pool to use.", "$ref": "#/$defs/string", - "since_version": "v0.243.0" + "sinceVersion": "v0.243.0" }, "gpu_type": { "description": "GPU type.", "$ref": "#/$defs/string", - "since_version": "v0.243.0" + "sinceVersion": "v0.243.0" }, "num_gpus": { "description": "Number of GPUs.", "$ref": "#/$defs/int", - "since_version": "v0.243.0" + "sinceVersion": "v0.243.0" } }, "additionalProperties": false, @@ -5620,17 +5620,17 @@ "left": { "description": "The left operand of the condition task. Can be either a string value or a job state or parameter reference.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "op": { "description": "* `EQUAL_TO`, `NOT_EQUAL` operators perform string comparison of their operands. This means that `“12.0” == “12”` will evaluate to `false`.\n* `GREATER_THAN`, `GREATER_THAN_OR_EQUAL`, `LESS_THAN`, `LESS_THAN_OR_EQUAL` operators perform numeric comparison of their operands. `“12.0” \u003e= “12”` will evaluate to `true`, `“10.0” \u003e= “12”` will evaluate to `false`.\n\nThe boolean comparison to task values can be implemented with operators `EQUAL_TO`, `NOT_EQUAL`. If a task value was set to a boolean value, it will be serialized to `“true”` or `“false”` for the comparison.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ConditionTaskOp", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "right": { "description": "The right operand of the condition task. Can be either a string value or a job state or parameter reference.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -5674,12 +5674,12 @@ "pause_status": { "description": "Indicate whether the continuous execution of the job is paused or not. Defaults to UNPAUSED.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PauseStatus", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "task_retry_mode": { "description": "Indicate whether the continuous job is applying task level retries or not. Defaults to NEVER.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TaskRetryMode", - "since_version": "v0.267.0" + "sinceVersion": "v0.267.0" } }, "additionalProperties": false @@ -5698,17 +5698,17 @@ "pause_status": { "description": "Indicate whether this schedule is paused or not.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PauseStatus", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "quartz_cron_expression": { "description": "A Cron expression using Quartz syntax that describes the schedule for a job. See [Cron Trigger](http://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html) for details. This field is required.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "timezone_id": { "description": "A Java timezone ID. The schedule for a job is resolved with respect to this timezone. See [Java TimeZone](https://docs.oracle.com/javase/7/docs/api/java/util/TimeZone.html) for details. This field is required.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -5731,16 +5731,16 @@ "properties": { "dashboard_id": { "$ref": "#/$defs/string", - "since_version": "v0.248.0" + "sinceVersion": "v0.248.0" }, "subscription": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Subscription", - "since_version": "v0.248.0" + "sinceVersion": "v0.248.0" }, "warehouse_id": { "description": "Optional: The warehouse id to execute the dashboard with for the schedule.\nIf not specified, the default warehouse of the dashboard will be used.", "$ref": "#/$defs/string", - "since_version": "v0.248.0" + "sinceVersion": "v0.248.0" } }, "additionalProperties": false @@ -5760,12 +5760,12 @@ "connection_resource_name": { "description": "The resource name of the UC connection that authenticates the dbt Cloud for this task", "$ref": "#/$defs/string", - "since_version": "v0.256.0" + "sinceVersion": "v0.256.0" }, "dbt_cloud_job_id": { "description": "Id of the dbt Cloud job to be triggered", "$ref": "#/$defs/int64", - "since_version": "v0.256.0" + "sinceVersion": "v0.256.0" } }, "additionalProperties": false @@ -5784,12 +5784,12 @@ "connection_resource_name": { "description": "The resource name of the UC connection that authenticates the dbt platform for this task", "$ref": "#/$defs/string", - "since_version": "v0.257.0" + "sinceVersion": "v0.257.0" }, "dbt_platform_job_id": { "description": "Id of the dbt platform job to be triggered. Specified as a string for maximum compatibility with clients.", "$ref": "#/$defs/string", - "since_version": "v0.257.0" + "sinceVersion": "v0.257.0" } }, "additionalProperties": false @@ -5808,37 +5808,37 @@ "catalog": { "description": "Optional name of the catalog to use. The value is the top level in the 3-level namespace of Unity Catalog (catalog / schema / relation). The catalog value can only be specified if a warehouse_id is specified. Requires dbt-databricks \u003e= 1.1.1.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "commands": { "description": "A list of dbt commands to execute. All commands must start with `dbt`. This parameter must not be empty. A maximum of up to 10 commands can be provided.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "profiles_directory": { "description": "Optional (relative) path to the profiles directory. Can only be specified if no warehouse_id is specified. If no warehouse_id is specified and this folder is unset, the root directory is used.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "project_directory": { "description": "Path to the project directory. Optional for Git sourced tasks, in which\ncase if no value is provided, the root of the Git repository is used.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "schema": { "description": "Optional schema to write to. This parameter is only used when a warehouse_id is also provided. If not provided, the `default` schema is used.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "source": { "description": "Optional location type of the project directory. When set to `WORKSPACE`, the project will be retrieved\nfrom the local Databricks workspace. When set to `GIT`, the project will be retrieved from a Git repository\ndefined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise.\n\n* `WORKSPACE`: Project is located in Databricks workspace.\n* `GIT`: Project is located in cloud Git provider.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Source", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "warehouse_id": { "description": "ID of the SQL warehouse to connect to. If provided, we automatically generate and provide the profile and connection details to dbt. It can be overridden on a per-command basis by using the `--profiles-dir` command line argument.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -5860,17 +5860,17 @@ "min_time_between_triggers_seconds": { "description": "If set, the trigger starts a run only after the specified amount of time passed since\nthe last time the trigger fired. The minimum allowed value is 60 seconds", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "url": { "description": "URL to be monitored for file arrivals. The path must point to the root or a subpath of the external location.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "wait_after_last_change_seconds": { "description": "If set, the trigger starts a run only after no file activity has occurred for the specified amount of time.\nThis makes it possible to wait for a batch of incoming files to arrive before triggering a run. The\nminimum allowed value is 60 seconds.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -5892,17 +5892,17 @@ "concurrency": { "description": "An optional maximum allowed number of concurrent runs of the task.\nSet this value if you want to be able to execute multiple runs of the task concurrently.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "inputs": { "description": "Array for task to iterate on. This can be a JSON string or a reference to\nan array parameter.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "task": { "description": "Configuration for the task that will be run for each element in the array", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Task", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -5940,41 +5940,41 @@ "command": { "description": "Command launcher to run the actual script, e.g. bash, python etc.", "$ref": "#/$defs/string", - "since_version": "v0.243.0" + "sinceVersion": "v0.243.0" }, "compute": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ComputeConfig", - "since_version": "v0.243.0" + "sinceVersion": "v0.243.0" }, "dl_runtime_image": { "description": "Runtime image", "$ref": "#/$defs/string", - "since_version": "v0.243.0" + "sinceVersion": "v0.243.0" }, "mlflow_experiment_name": { "description": "Optional string containing the name of the MLflow experiment to log the run to. If name is not\nfound, backend will create the mlflow experiment using the name.", "$ref": "#/$defs/string", - "since_version": "v0.243.0" + "sinceVersion": "v0.243.0" }, "source": { "description": "Optional location type of the training script. When set to `WORKSPACE`, the script will be retrieved from the local Databricks workspace. When set to `GIT`, the script will be retrieved from a Git repository\ndefined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise.\n* `WORKSPACE`: Script is located in Databricks workspace.\n* `GIT`: Script is located in cloud Git provider.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Source", - "since_version": "v0.243.0" + "sinceVersion": "v0.243.0" }, "training_script_path": { "description": "The training script file path to be executed. Cloud file URIs (such as dbfs:/, s3:/, adls:/, gcs:/) and workspace paths are supported. For python files stored in the Databricks workspace, the path must be absolute and begin with `/`. For files stored in a remote repository, the path must be relative. This field is required.", "$ref": "#/$defs/string", - "since_version": "v0.243.0" + "sinceVersion": "v0.243.0" }, "yaml_parameters": { "description": "Optional string containing model parameters passed to the training script in yaml format.\nIf present, then the content in yaml_parameters_file_path will be ignored.", "$ref": "#/$defs/string", - "since_version": "v0.243.0" + "sinceVersion": "v0.243.0" }, "yaml_parameters_file_path": { "description": "Optional path to a YAML file containing model parameters passed to the training script.", "$ref": "#/$defs/string", - "since_version": "v0.243.0" + "sinceVersion": "v0.243.0" } }, "additionalProperties": false, @@ -6018,7 +6018,7 @@ "used_commit": { "description": "Commit that was used to execute the run. If git_branch was specified, this points to the HEAD of the branch at the time of the run; if git_tag was specified, this points to the commit the tag points to.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -6038,27 +6038,27 @@ "git_branch": { "description": "Name of the branch to be checked out and used by this job. This field cannot be specified in conjunction with git_tag or git_commit.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "git_commit": { "description": "Commit to be checked out and used by this job. This field cannot be specified in conjunction with git_branch or git_tag.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "git_provider": { "description": "Unique identifier of the service used to host the Git repository. The value is case insensitive.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.GitProvider", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "git_tag": { "description": "Name of the tag to be checked out and used by this job. This field cannot be specified in conjunction with git_branch or git_commit.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "git_url": { "description": "URL of the repository to be cloned by this job.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -6081,12 +6081,12 @@ "job_cluster_key": { "description": "A unique name for the job cluster. This field is required and must be unique within the job.\n`JobTaskSettings` may refer to this field to determine which cluster to launch for the task execution.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "new_cluster": { "description": "If new_cluster, a description of a cluster that is created for each task.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClusterSpec", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -6109,12 +6109,12 @@ "kind": { "description": "The kind of deployment that manages the job.\n\n* `BUNDLE`: The job is managed by Databricks Asset Bundle.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobDeploymentKind", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "metadata_file_path": { "description": "Path of the file that contains deployment metadata.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -6168,33 +6168,33 @@ "description": "If true, do not send email to recipients specified in `on_failure` if the run is skipped.\nThis field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.", "$ref": "#/$defs/bool", "deprecationMessage": "This field is deprecated", - "since_version": "v0.229.0", + "sinceVersion": "v0.229.0", "deprecated": true }, "on_duration_warning_threshold_exceeded": { "description": "A list of email addresses to be notified when the duration of a run exceeds the threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. If no rule for the `RUN_DURATION_SECONDS` metric is specified in the `health` field for the job, notifications are not sent.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "on_failure": { "description": "A list of email addresses to be notified when a run unsuccessfully completes. A run is considered to have completed unsuccessfully if it ends with an `INTERNAL_ERROR` `life_cycle_state` or a `FAILED`, or `TIMED_OUT` result_state. If this is not specified on job creation, reset, or update the list is empty, and notifications are not sent.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "on_start": { "description": "A list of email addresses to be notified when a run begins. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "on_streaming_backlog_exceeded": { "description": "A list of email addresses to notify when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "on_success": { "description": "A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -6213,11 +6213,11 @@ "environment_key": { "description": "The key of an environment. It has to be unique within a job.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "spec": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Environment", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -6239,12 +6239,12 @@ "no_alert_for_canceled_runs": { "description": "If true, do not send notifications to recipients specified in `on_failure` if the run is canceled.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "no_alert_for_skipped_runs": { "description": "If true, do not send notifications to recipients specified in `on_failure` if the run is skipped.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -6263,12 +6263,12 @@ "default": { "description": "Default value of the parameter.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "name": { "description": "The name of the defined parameter. May only contain alphanumeric characters, `_`, `-`, and `.`", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -6321,17 +6321,17 @@ "dirty_state": { "description": "Dirty state indicates the job is not fully synced with the job specification in the remote repository.\n\nPossible values are:\n* `NOT_SYNCED`: The job is not yet synced with the remote job specification. Import the remote job specification from UI to make the job fully synced.\n* `DISCONNECTED`: The job is temporary disconnected from the remote job specification and is allowed for live edit. Import the remote job specification again from UI to make the job fully synced.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobSourceDirtyState", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "import_from_git_branch": { "description": "Name of the branch which the job is imported from.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "job_config_path": { "description": "Path of the job YAML file that contains the job specification.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -6403,16 +6403,16 @@ "properties": { "metric": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthMetric", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "op": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthOperator", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "value": { "description": "Specifies the threshold value that the health metric should obey to satisfy the health rule.", "$ref": "#/$defs/int64", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -6436,7 +6436,7 @@ "properties": { "rules": { "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRule", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -6455,27 +6455,27 @@ "aliases": { "description": "Aliases of the model versions to monitor. Can only be used in conjunction with condition MODEL_ALIAS_SET.", "$ref": "#/$defs/slice/string", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "condition": { "description": "The condition based on which to trigger a job run.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ModelTriggerConfigurationCondition", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "min_time_between_triggers_seconds": { "description": "If set, the trigger starts a run only after the specified amount of time has passed since\nthe last time the trigger fired. The minimum allowed value is 60 seconds.", "$ref": "#/$defs/int", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "securable_name": { "description": "Name of the securable to monitor (\"mycatalog.myschema.mymodel\" in the case of model-level triggers,\n\"mycatalog.myschema\" in the case of schema-level triggers) or empty in the case of metastore-level triggers.", "$ref": "#/$defs/string", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "wait_after_last_change_seconds": { "description": "If set, the trigger starts a run only after no model updates have occurred for the specified time\nand can be used to wait for a series of model updates before triggering a run. The\nminimum allowed value is 60 seconds.", "$ref": "#/$defs/int", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" } }, "additionalProperties": false, @@ -6513,22 +6513,22 @@ "base_parameters": { "description": "Base parameters to be used for each run of this job. If the run is initiated by a call to :method:jobs/run\nNow with parameters specified, the two parameters maps are merged. If the same key is specified in\n`base_parameters` and in `run-now`, the value from `run-now` is used.\nUse [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.\n\nIf the notebook takes a parameter that is not specified in the job’s `base_parameters` or the `run-now` override parameters,\nthe default value from the notebook is used.\n\nRetrieve these parameters in a notebook using [dbutils.widgets.get](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-widgets).\n\nThe JSON representation of this field cannot exceed 1MB.", "$ref": "#/$defs/map/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "notebook_path": { "description": "The path of the notebook to be run in the Databricks workspace or remote repository.\nFor notebooks stored in the Databricks workspace, the path must be absolute and begin with a slash.\nFor notebooks stored in a remote repository, the path must be relative. This field is required.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "source": { "description": "Optional location type of the notebook. When set to `WORKSPACE`, the notebook will be retrieved from the local Databricks workspace. When set to `GIT`, the notebook will be retrieved from a Git repository\ndefined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise.\n* `WORKSPACE`: Notebook is located in Databricks workspace.\n* `GIT`: Notebook is located in cloud Git provider.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Source", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "warehouse_id": { "description": "Optional `warehouse_id` to run the notebook on a SQL warehouse. Classic SQL warehouses are NOT supported, please use serverless or pro SQL warehouses.\n\nNote that SQL warehouses only support SQL cells; if the notebook contains non-SQL cells, the run will fail.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -6581,12 +6581,12 @@ "interval": { "description": "The interval at which the trigger should run.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "unit": { "description": "The unit of time for the interval.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PeriodicTriggerConfigurationTimeUnit", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -6625,7 +6625,7 @@ "full_refresh": { "description": "If true, triggers a full refresh on the delta live table.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -6644,12 +6644,12 @@ "full_refresh": { "description": "If true, triggers a full refresh on the delta live table.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "pipeline_id": { "description": "The full name of the pipeline task to execute.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -6671,27 +6671,27 @@ "authentication_method": { "description": "How the published Power BI model authenticates to Databricks", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.AuthenticationMethod", - "since_version": "v0.248.0" + "sinceVersion": "v0.248.0" }, "model_name": { "description": "The name of the Power BI model", "$ref": "#/$defs/string", - "since_version": "v0.248.0" + "sinceVersion": "v0.248.0" }, "overwrite_existing": { "description": "Whether to overwrite existing Power BI models", "$ref": "#/$defs/bool", - "since_version": "v0.248.0" + "sinceVersion": "v0.248.0" }, "storage_mode": { "description": "The default storage mode of the Power BI model", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.StorageMode", - "since_version": "v0.248.0" + "sinceVersion": "v0.248.0" }, "workspace_name": { "description": "The name of the Power BI workspace of the model", "$ref": "#/$defs/string", - "since_version": "v0.248.0" + "sinceVersion": "v0.248.0" } }, "additionalProperties": false @@ -6710,22 +6710,22 @@ "catalog": { "description": "The catalog name in Databricks", "$ref": "#/$defs/string", - "since_version": "v0.248.0" + "sinceVersion": "v0.248.0" }, "name": { "description": "The table name in Databricks", "$ref": "#/$defs/string", - "since_version": "v0.248.0" + "sinceVersion": "v0.248.0" }, "schema": { "description": "The schema name in Databricks", "$ref": "#/$defs/string", - "since_version": "v0.248.0" + "sinceVersion": "v0.248.0" }, "storage_mode": { "description": "The Power BI storage mode of the table", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.StorageMode", - "since_version": "v0.248.0" + "sinceVersion": "v0.248.0" } }, "additionalProperties": false @@ -6744,27 +6744,27 @@ "connection_resource_name": { "description": "The resource name of the UC connection to authenticate from Databricks to Power BI", "$ref": "#/$defs/string", - "since_version": "v0.248.0" + "sinceVersion": "v0.248.0" }, "power_bi_model": { "description": "The semantic model to update", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PowerBiModel", - "since_version": "v0.248.0" + "sinceVersion": "v0.248.0" }, "refresh_after_update": { "description": "Whether the model should be refreshed after the update", "$ref": "#/$defs/bool", - "since_version": "v0.248.0" + "sinceVersion": "v0.248.0" }, "tables": { "description": "The tables to be exported to Power BI", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.PowerBiTable", - "since_version": "v0.248.0" + "sinceVersion": "v0.248.0" }, "warehouse_id": { "description": "The SQL warehouse ID to use as the Power BI data source", "$ref": "#/$defs/string", - "since_version": "v0.248.0" + "sinceVersion": "v0.248.0" } }, "additionalProperties": false @@ -6783,22 +6783,22 @@ "entry_point": { "description": "Named entry point to use, if it does not exist in the metadata of the package it executes the function from the package directly using `$packageName.$entryPoint()`", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "named_parameters": { "description": "Command-line parameters passed to Python wheel task in the form of `[\"--name=task\", \"--data=dbfs:/path/to/data.json\"]`. Leave it empty if `parameters` is not null.", "$ref": "#/$defs/map/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "package_name": { "description": "Name of the package to execute", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "parameters": { "description": "Command-line parameters passed to Python wheel task. Leave it empty if `named_parameters` is not null.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -6821,7 +6821,7 @@ "enabled": { "description": "If true, enable queueing for the job. This is a required field.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -6866,7 +6866,7 @@ "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, - "since_version": "v0.229.0", + "sinceVersion": "v0.229.0", "deprecated": true }, "jar_params": { @@ -6875,18 +6875,18 @@ "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, - "since_version": "v0.229.0", + "sinceVersion": "v0.229.0", "deprecated": true }, "job_id": { "description": "ID of the job to trigger.", "$ref": "#/$defs/int64", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "job_parameters": { "description": "Job-level parameters used to trigger the job.", "$ref": "#/$defs/map/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "notebook_params": { "description": "A map from keys to values for jobs with notebook task, for example `\"notebook_params\": {\"name\": \"john doe\", \"age\": \"35\"}`.\nThe map is passed to the notebook and is accessible through the [dbutils.widgets.get](https://docs.databricks.com/dev-tools/databricks-utils.html) function.\n\nIf not specified upon `run-now`, the triggered run uses the job’s base parameters.\n\nnotebook_params cannot be specified in conjunction with jar_params.\n\n⚠ **Deprecation note** Use [job parameters](https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown) to pass information down to tasks.\n\nThe JSON representation of this field (for example `{\"notebook_params\":{\"name\":\"john doe\",\"age\":\"35\"}}`) cannot exceed 10,000 bytes.", @@ -6894,20 +6894,20 @@ "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, - "since_version": "v0.229.0", + "sinceVersion": "v0.229.0", "deprecated": true }, "pipeline_params": { "description": "Controls whether the pipeline should perform a full refresh", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PipelineParams", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "python_named_params": { "$ref": "#/$defs/map/string", "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, - "since_version": "v0.229.0", + "sinceVersion": "v0.229.0", "deprecated": true }, "python_params": { @@ -6916,7 +6916,7 @@ "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, - "since_version": "v0.229.0", + "sinceVersion": "v0.229.0", "deprecated": true }, "spark_submit_params": { @@ -6925,7 +6925,7 @@ "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, - "since_version": "v0.229.0", + "sinceVersion": "v0.229.0", "deprecated": true }, "sql_params": { @@ -6934,7 +6934,7 @@ "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, - "since_version": "v0.229.0", + "sinceVersion": "v0.229.0", "deprecated": true } }, @@ -6974,24 +6974,24 @@ "description": "Deprecated since 04/2016. For classic compute, provide a `jar` through the `libraries` field instead. For serverless compute, provide a `jar` though the `java_dependencies` field inside the `environments` list.\n\nSee the examples of classic and serverless compute usage at the top of the page.", "$ref": "#/$defs/string", "deprecationMessage": "This field is deprecated", - "since_version": "v0.229.0", + "sinceVersion": "v0.229.0", "deprecated": true }, "main_class_name": { "description": "The full name of the class containing the main method to be executed. This class must be contained in a JAR provided as a library.\n\nThe code must use `SparkContext.getOrCreate` to obtain a Spark context; otherwise, runs of the job fail.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "parameters": { "description": "Parameters passed to the main method.\n\nUse [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "run_as_repl": { "description": "Deprecated. A value of `false` is no longer supported.", "$ref": "#/$defs/bool", "deprecationMessage": "This field is deprecated", - "since_version": "v0.240.0", + "sinceVersion": "v0.240.0", "deprecated": true } }, @@ -7011,17 +7011,17 @@ "parameters": { "description": "Command line parameters passed to the Python file.\n\nUse [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "python_file": { "description": "The Python file to be executed. Cloud file URIs (such as dbfs:/, s3:/, adls:/, gcs:/) and workspace paths are supported. For python files stored in the Databricks workspace, the path must be absolute and begin with `/`. For files stored in a remote repository, the path must be relative. This field is required.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "source": { "description": "Optional location type of the Python file. When set to `WORKSPACE` or not specified, the file will be retrieved from the local\nDatabricks workspace or cloud location (if the `python_file` has a URI format). When set to `GIT`,\nthe Python file will be retrieved from a Git repository defined in `git_source`.\n\n* `WORKSPACE`: The Python file is located in a Databricks workspace or at a cloud filesystem URI.\n* `GIT`: The Python file is located in a remote Git repository.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Source", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -7043,7 +7043,7 @@ "parameters": { "description": "Command-line parameters passed to spark submit.\n\nUse [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -7062,32 +7062,32 @@ "alert": { "description": "If alert, indicates that this job must refresh a SQL alert.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskAlert", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "dashboard": { "description": "If dashboard, indicates that this job must refresh a SQL dashboard.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskDashboard", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "file": { "description": "If file, indicates that this job runs a SQL file in a remote Git repository.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskFile", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "parameters": { "description": "Parameters to be used for each run of this job. The SQL alert task does not support custom parameters.", "$ref": "#/$defs/map/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "query": { "description": "If query, indicates that this job must execute a SQL query.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskQuery", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "warehouse_id": { "description": "The canonical identifier of the SQL warehouse. Recommended to use with serverless or pro SQL warehouses. Classic SQL warehouses are only supported for SQL alert, dashboard and query tasks and are limited to scheduled single-task jobs.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -7109,17 +7109,17 @@ "alert_id": { "description": "The canonical identifier of the SQL alert.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "pause_subscriptions": { "description": "If true, the alert notifications are not sent to subscribers.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "subscriptions": { "description": "If specified, alert notifications are sent to subscribers.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskSubscription", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -7141,22 +7141,22 @@ "custom_subject": { "description": "Subject of the email sent to subscribers of this task.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "dashboard_id": { "description": "The canonical identifier of the SQL dashboard.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "pause_subscriptions": { "description": "If true, the dashboard snapshot is not taken, and emails are not sent to subscribers.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "subscriptions": { "description": "If specified, dashboard snapshots are sent to subscriptions.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskSubscription", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -7178,12 +7178,12 @@ "path": { "description": "Path of the SQL file. Must be relative if the source is a remote Git repository and absolute for workspace paths.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "source": { "description": "Optional location type of the SQL file. When set to `WORKSPACE`, the SQL file will be retrieved\nfrom the local Databricks workspace. When set to `GIT`, the SQL file will be retrieved from a Git repository\ndefined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise.\n\n* `WORKSPACE`: SQL file is located in Databricks workspace.\n* `GIT`: SQL file is located in cloud Git provider.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Source", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -7205,7 +7205,7 @@ "query_id": { "description": "The canonical identifier of the SQL query.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -7227,12 +7227,12 @@ "destination_id": { "description": "The canonical identifier of the destination to receive email notification. This parameter is mutually exclusive with user_name. You cannot set both destination_id and user_name for subscription notifications.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "user_name": { "description": "The user name to receive the subscription email. This parameter is mutually exclusive with destination_id. You cannot set both destination_id and user_name for subscription notifications.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -7267,16 +7267,16 @@ "custom_subject": { "description": "Optional: Allows users to specify a custom subject line on the email sent\nto subscribers.", "$ref": "#/$defs/string", - "since_version": "v0.248.0" + "sinceVersion": "v0.248.0" }, "paused": { "description": "When true, the subscription will not send emails.", "$ref": "#/$defs/bool", - "since_version": "v0.248.0" + "sinceVersion": "v0.248.0" }, "subscribers": { "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.SubscriptionSubscriber", - "since_version": "v0.248.0" + "sinceVersion": "v0.248.0" } }, "additionalProperties": false @@ -7294,11 +7294,11 @@ "properties": { "destination_id": { "$ref": "#/$defs/string", - "since_version": "v0.248.0" + "sinceVersion": "v0.248.0" }, "user_name": { "$ref": "#/$defs/string", - "since_version": "v0.248.0" + "sinceVersion": "v0.248.0" } }, "additionalProperties": false @@ -7317,22 +7317,22 @@ "condition": { "description": "The table(s) condition based on which to trigger a job run.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Condition", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "min_time_between_triggers_seconds": { "description": "If set, the trigger starts a run only after the specified amount of time has passed since\nthe last time the trigger fired. The minimum allowed value is 60 seconds.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "table_names": { "description": "A list of tables to monitor for changes. The table name must be in the format `catalog_name.schema_name.table_name`.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "wait_after_last_change_seconds": { "description": "If set, the trigger starts a run only after no table updates have occurred for the specified time\nand can be used to wait for a series of table updates before triggering a run. The\nminimum allowed value is 60 seconds.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -7354,17 +7354,17 @@ "clean_rooms_notebook_task": { "description": "The task runs a [clean rooms](https://docs.databricks.com/clean-rooms/index.html) notebook\nwhen the `clean_rooms_notebook_task` field is present.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.CleanRoomsNotebookTask", - "since_version": "v0.237.0" + "sinceVersion": "v0.237.0" }, "condition_task": { "description": "The task evaluates a condition that can be used to control the execution of other tasks when the `condition_task` field is present.\nThe condition task does not require a cluster to execute and does not support retries or notifications.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ConditionTask", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "dashboard_task": { "description": "The task refreshes a dashboard and sends a snapshot to subscribers.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.DashboardTask", - "since_version": "v0.248.0" + "sinceVersion": "v0.248.0" }, "dbt_cloud_task": { "description": "Task type for dbt cloud, deprecated in favor of the new name dbt_platform_task", @@ -7372,173 +7372,173 @@ "x-databricks-preview": "PRIVATE", "deprecationMessage": "This field is deprecated", "doNotSuggest": true, - "since_version": "v0.256.0", + "sinceVersion": "v0.256.0", "deprecated": true }, "dbt_platform_task": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.DbtPlatformTask", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "since_version": "v0.257.0" + "sinceVersion": "v0.257.0" }, "dbt_task": { "description": "The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.DbtTask", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "depends_on": { "description": "An optional array of objects specifying the dependency graph of the task. All tasks specified in this field must complete before executing this task. The task will run only if the `run_if` condition is true.\nThe key is `task_key`, and the value is the name assigned to the dependent task.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.TaskDependency", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "description": { "description": "An optional description for this task.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "disable_auto_optimization": { "description": "An option to disable auto optimization in serverless", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "disabled": { "description": "An optional flag to disable the task. If set to true, the task will not run even if it is part of a job.", "$ref": "#/$defs/bool", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "since_version": "v0.271.0" + "sinceVersion": "v0.271.0" }, "email_notifications": { "description": "An optional set of email addresses that is notified when runs of this task begin or complete as well as when this task is deleted. The default behavior is to not send any emails.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TaskEmailNotifications", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "environment_key": { "description": "The key that references an environment spec in a job. This field is required for Python script, Python wheel and dbt tasks when using serverless compute.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "existing_cluster_id": { "description": "If existing_cluster_id, the ID of an existing cluster that is used for all runs.\nWhen running jobs or tasks on an existing cluster, you may need to manually restart\nthe cluster if it stops responding. We suggest running jobs and tasks on new clusters for\ngreater reliability", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "for_each_task": { "description": "The task executes a nested task for every input provided when the `for_each_task` field is present.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ForEachTask", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "gen_ai_compute_task": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.GenAiComputeTask", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "since_version": "v0.243.0" + "sinceVersion": "v0.243.0" }, "health": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRules", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "job_cluster_key": { "description": "If job_cluster_key, this task is executed reusing the cluster specified in `job.settings.job_clusters`.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "libraries": { "description": "An optional list of libraries to be installed on the cluster.\nThe default value is an empty list.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/compute.Library", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "max_retries": { "description": "An optional maximum number of times to retry an unsuccessful run. A run is considered to be unsuccessful if it completes with the `FAILED` result_state or `INTERNAL_ERROR` `life_cycle_state`. The value `-1` means to retry indefinitely and the value `0` means to never retry.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "min_retry_interval_millis": { "description": "An optional minimal interval in milliseconds between the start of the failed run and the subsequent retry run. The default behavior is that unsuccessful runs are immediately retried.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "new_cluster": { "description": "If new_cluster, a description of a new cluster that is created for each run.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClusterSpec", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "notebook_task": { "description": "The task runs a notebook when the `notebook_task` field is present.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.NotebookTask", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "notification_settings": { "description": "Optional notification settings that are used when sending notifications to each of the `email_notifications` and `webhook_notifications` for this task.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TaskNotificationSettings", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "pipeline_task": { "description": "The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines configured to use triggered more are supported.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PipelineTask", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "power_bi_task": { "description": "The task triggers a Power BI semantic model update when the `power_bi_task` field is present.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PowerBiTask", - "since_version": "v0.248.0" + "sinceVersion": "v0.248.0" }, "python_wheel_task": { "description": "The task runs a Python wheel when the `python_wheel_task` field is present.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PythonWheelTask", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "retry_on_timeout": { "description": "An optional policy to specify whether to retry a job when it times out. The default behavior\nis to not retry on timeout.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "run_if": { "description": "An optional value specifying the condition determining whether the task is run once its dependencies have been completed.\n\n* `ALL_SUCCESS`: All dependencies have executed and succeeded\n* `AT_LEAST_ONE_SUCCESS`: At least one dependency has succeeded\n* `NONE_FAILED`: None of the dependencies have failed and at least one was executed\n* `ALL_DONE`: All dependencies have been completed\n* `AT_LEAST_ONE_FAILED`: At least one dependency failed\n* `ALL_FAILED`: ALl dependencies have failed", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.RunIf", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "run_job_task": { "description": "The task triggers another job when the `run_job_task` field is present.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.RunJobTask", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "spark_jar_task": { "description": "The task runs a JAR when the `spark_jar_task` field is present.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SparkJarTask", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "spark_python_task": { "description": "The task runs a Python file when the `spark_python_task` field is present.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SparkPythonTask", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "spark_submit_task": { "description": "(Legacy) The task runs the spark-submit script when the spark_submit_task field is present. Databricks recommends using the spark_jar_task instead; see [Spark Submit task for jobs](/jobs/spark-submit).", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SparkSubmitTask", "deprecationMessage": "This field is deprecated", - "since_version": "v0.229.0", + "sinceVersion": "v0.229.0", "deprecated": true }, "sql_task": { "description": "The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when the `sql_task` field is present.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTask", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "task_key": { "description": "A unique name for the task. This field is used to refer to this task from other tasks.\nThis field is required and must be unique within its parent job.\nOn Update or Reset, this field is used to reference the tasks to be updated or reset.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "timeout_seconds": { "description": "An optional timeout applied to each run of this job task. A value of `0` means no timeout.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "webhook_notifications": { "description": "A collection of system notification IDs to notify when runs of this task begin or complete. The default behavior is to not send any system notifications.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.WebhookNotifications", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -7560,12 +7560,12 @@ "outcome": { "description": "Can only be specified on condition task dependencies. The outcome of the dependent task that must be met for this task to run.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "task_key": { "description": "The name of the task this task depends on.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -7588,33 +7588,33 @@ "description": "If true, do not send email to recipients specified in `on_failure` if the run is skipped.\nThis field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.", "$ref": "#/$defs/bool", "deprecationMessage": "This field is deprecated", - "since_version": "v0.229.0", + "sinceVersion": "v0.229.0", "deprecated": true }, "on_duration_warning_threshold_exceeded": { "description": "A list of email addresses to be notified when the duration of a run exceeds the threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. If no rule for the `RUN_DURATION_SECONDS` metric is specified in the `health` field for the job, notifications are not sent.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "on_failure": { "description": "A list of email addresses to be notified when a run unsuccessfully completes. A run is considered to have completed unsuccessfully if it ends with an `INTERNAL_ERROR` `life_cycle_state` or a `FAILED`, or `TIMED_OUT` result_state. If this is not specified on job creation, reset, or update the list is empty, and notifications are not sent.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "on_start": { "description": "A list of email addresses to be notified when a run begins. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "on_streaming_backlog_exceeded": { "description": "A list of email addresses to notify when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "on_success": { "description": "A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -7633,17 +7633,17 @@ "alert_on_last_attempt": { "description": "If true, do not send notifications to recipients specified in `on_start` for the retried runs and do not send notifications to recipients specified in `on_failure` until the last retry of the run.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "no_alert_for_canceled_runs": { "description": "If true, do not send notifications to recipients specified in `on_failure` if the run is canceled.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "no_alert_for_skipped_runs": { "description": "If true, do not send notifications to recipients specified in `on_failure` if the run is skipped.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -7678,27 +7678,27 @@ "file_arrival": { "description": "File arrival trigger settings.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.FileArrivalTriggerConfiguration", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "model": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ModelTriggerConfiguration", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "pause_status": { "description": "Whether this trigger is paused or not.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PauseStatus", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "periodic": { "description": "Periodic trigger settings.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PeriodicTriggerConfiguration", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "table_update": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TableUpdateTriggerConfiguration", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -7716,7 +7716,7 @@ "properties": { "id": { "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -7738,27 +7738,27 @@ "on_duration_warning_threshold_exceeded": { "description": "An optional list of system notification IDs to call when the duration of a run exceeds the threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. A maximum of 3 destinations can be specified for the `on_duration_warning_threshold_exceeded` property.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Webhook", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "on_failure": { "description": "An optional list of system notification IDs to call when the run fails. A maximum of 3 destinations can be specified for the `on_failure` property.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Webhook", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "on_start": { "description": "An optional list of system notification IDs to call when the run starts. A maximum of 3 destinations can be specified for the `on_start` property.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Webhook", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "on_streaming_backlog_exceeded": { "description": "An optional list of system notification IDs to call when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.\nA maximum of 3 destinations can be specified for the `on_streaming_backlog_exceeded` property.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Webhook", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "on_success": { "description": "An optional list of system notification IDs to call when the run completes successfully. A maximum of 3 destinations can be specified for the `on_success` property.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Webhook", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -7778,12 +7778,12 @@ "key": { "description": "The tag key.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "value": { "description": "The tag value.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -7803,12 +7803,12 @@ "key": { "description": "The tag key.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "value": { "description": "The tag value.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -7829,7 +7829,7 @@ "$ref": "#/$defs/string", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" } }, "additionalProperties": false @@ -7847,11 +7847,11 @@ "properties": { "quartz_cron_schedule": { "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "timezone_id": { "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -7907,17 +7907,17 @@ "catalog": { "description": "The UC catalog the event log is published under.", "$ref": "#/$defs/string", - "since_version": "v0.246.0" + "sinceVersion": "v0.246.0" }, "name": { "description": "The name the event log is published to in UC.", "$ref": "#/$defs/string", - "since_version": "v0.246.0" + "sinceVersion": "v0.246.0" }, "schema": { "description": "The UC schema the event log is published under.", "$ref": "#/$defs/string", - "since_version": "v0.246.0" + "sinceVersion": "v0.246.0" } }, "additionalProperties": false @@ -7936,7 +7936,7 @@ "path": { "description": "The absolute path of the source code.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -7955,12 +7955,12 @@ "exclude": { "description": "Paths to exclude.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "include": { "description": "Paths to include.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -7979,17 +7979,17 @@ "report": { "description": "Select a specific source report.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.ReportSpec", - "since_version": "v0.231.0" + "sinceVersion": "v0.231.0" }, "schema": { "description": "Select all tables from a specific source schema.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.SchemaSpec", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "table": { "description": "Select a specific source table.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpec", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -8009,35 +8009,35 @@ "description": "[Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.", "$ref": "#/$defs/string", "deprecationMessage": "This field is deprecated", - "since_version": "v0.229.0", + "sinceVersion": "v0.229.0", "deprecated": true }, "connection_name": { "description": "Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.", "$ref": "#/$defs/string", - "since_version": "v0.234.0" + "sinceVersion": "v0.234.0" }, "connection_parameters": { "description": "Optional, Internal. Parameters required to establish an initial connection with the source.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.ConnectionParameters", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "gateway_storage_catalog": { "description": "Required, Immutable. The name of the catalog for the gateway pipeline's storage location.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "gateway_storage_name": { "description": "Optional. The Unity Catalog-compatible name for the gateway storage location.\nThis is the destination to use for the data that is extracted by the gateway.\nSpark Declarative Pipelines system will automatically create the storage location under the catalog and schema.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "gateway_storage_schema": { "description": "Required, Immutable. The name of the schema for the gateway pipelines's storage location.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -8061,40 +8061,40 @@ "connection_name": { "description": "Immutable. The Unity Catalog connection that this ingestion pipeline uses to communicate with the source. This is used with connectors for applications like Salesforce, Workday, and so on.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "ingest_from_uc_foreign_catalog": { "description": "Immutable. If set to true, the pipeline will ingest tables from the\nUC foreign catalogs directly without the need to specify a UC connection or ingestion gateway.\nThe `source_catalog` fields in objects of IngestionConfig are interpreted as\nthe UC foreign catalogs to ingest from.", "$ref": "#/$defs/bool", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "ingestion_gateway_id": { "description": "Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate with the source database. This is used with connectors to databases like SQL Server.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "netsuite_jar_path": { "$ref": "#/$defs/string", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "since_version": "v0.271.0" + "sinceVersion": "v0.271.0" }, "objects": { "description": "Required. Settings specifying tables to replicate and the destination for the replicated tables.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionConfig", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "source_configurations": { "description": "Top-level source configurations", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.SourceConfig", - "since_version": "v0.267.0" + "sinceVersion": "v0.267.0" }, "table_configuration": { "description": "Configuration settings to control the ingestion of tables. These settings are applied to all tables in the pipeline.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -8116,21 +8116,21 @@ "$ref": "#/$defs/slice/string", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "since_version": "v0.264.0" + "sinceVersion": "v0.264.0" }, "deletion_condition": { "description": "Specifies a SQL WHERE condition that specifies that the source row has been deleted.\nThis is sometimes referred to as \"soft-deletes\".\nFor example: \"Operation = 'DELETE'\" or \"is_deleted = true\".\nThis field is orthogonal to `hard_deletion_sync_interval_in_seconds`,\none for soft-deletes and the other for hard-deletes.\nSee also the hard_deletion_sync_min_interval_in_seconds field for\nhandling of \"hard deletes\" where the source rows are physically removed from the table.", "$ref": "#/$defs/string", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "since_version": "v0.264.0" + "sinceVersion": "v0.264.0" }, "hard_deletion_sync_min_interval_in_seconds": { "description": "Specifies the minimum interval (in seconds) between snapshots on primary keys\nfor detecting and synchronizing hard deletions—i.e., rows that have been\nphysically removed from the source table.\nThis interval acts as a lower bound. If ingestion runs less frequently than\nthis value, hard deletion synchronization will align with the actual ingestion\nfrequency instead of happening more often.\nIf not set, hard deletion synchronization via snapshots is disabled.\nThis field is mutable and can be updated without triggering a full snapshot.", "$ref": "#/$defs/int64", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "since_version": "v0.264.0" + "sinceVersion": "v0.264.0" } }, "additionalProperties": false @@ -8150,19 +8150,19 @@ "description": "(Optional) Marks the report as incremental.\nThis field is deprecated and should not be used. Use `parameters` instead. The incremental behavior is now\ncontrolled by the `parameters` field.", "$ref": "#/$defs/bool", "deprecationMessage": "This field is deprecated", - "since_version": "v0.271.0", + "sinceVersion": "v0.271.0", "deprecated": true }, "parameters": { "description": "Parameters for the Workday report. Each key represents the parameter name (e.g., \"start_date\", \"end_date\"),\nand the corresponding value is a SQL-like expression used to compute the parameter value at runtime.\nExample:\n{\n\"start_date\": \"{ coalesce(current_offset(), date(\\\"2025-02-01\\\")) }\",\n\"end_date\": \"{ current_date() - INTERVAL 1 DAY }\"\n}", "$ref": "#/$defs/map/string", - "since_version": "v0.271.0" + "sinceVersion": "v0.271.0" }, "report_parameters": { "description": "(Optional) Additional custom parameters for Workday Report\nThis field is deprecated and should not be used. Use `parameters` instead.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinitionWorkdayReportParametersQueryKeyValue", "deprecationMessage": "This field is deprecated", - "since_version": "v0.271.0", + "sinceVersion": "v0.271.0", "deprecated": true } }, @@ -8182,12 +8182,12 @@ "key": { "description": "Key for the report parameter, can be a column name or other metadata", "$ref": "#/$defs/string", - "since_version": "v0.271.0" + "sinceVersion": "v0.271.0" }, "value": { "description": "Value for the report parameter.\nPossible values it can take are these sql functions:\n1. coalesce(current_offset(), date(\"YYYY-MM-DD\")) -\u003e if current_offset() is null, then the passed date, else current_offset()\n2. current_date()\n3. date_sub(current_date(), x) -\u003e subtract x (some non-negative integer) days from current date", "$ref": "#/$defs/string", - "since_version": "v0.271.0" + "sinceVersion": "v0.271.0" } }, "additionalProperties": false @@ -8246,7 +8246,7 @@ "path": { "description": "The absolute path of the source code.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -8265,12 +8265,12 @@ "alerts": { "description": "A list of alerts that trigger the sending of notifications to the configured\ndestinations. The supported alerts are:\n\n* `on-update-success`: A pipeline update completes successfully.\n* `on-update-failure`: Each time a pipeline update fails.\n* `on-update-fatal-failure`: A pipeline update fails with a non-retryable (fatal) error.\n* `on-flow-failure`: A single data flow fails.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "email_recipients": { "description": "A list of email addresses notified when a configured alert is triggered.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -8289,7 +8289,7 @@ "include": { "description": "The source code to include for pipelines", "$ref": "#/$defs/string", - "since_version": "v0.252.0" + "sinceVersion": "v0.252.0" } }, "additionalProperties": false @@ -8308,97 +8308,97 @@ "apply_policy_default_values": { "description": "Note: This field won't be persisted. Only API users will check this field.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "autoscale": { "description": "Parameters needed in order to automatically scale clusters up and down based on load.\nNote: autoscaling works best with DB runtime versions 3.0 or later.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineClusterAutoscale", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "aws_attributes": { "description": "Attributes related to clusters running on Amazon Web Services.\nIf not specified at cluster creation, a set of default values will be used.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AwsAttributes", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "azure_attributes": { "description": "Attributes related to clusters running on Microsoft Azure.\nIf not specified at cluster creation, a set of default values will be used.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AzureAttributes", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "cluster_log_conf": { "description": "The configuration for delivering spark logs to a long-term storage destination.\nOnly dbfs destinations are supported. Only one destination can be specified\nfor one cluster. If the conf is given, the logs will be delivered to the destination every\n`5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while\nthe destination of executor logs is `$destination/$clusterId/executor`.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClusterLogConf", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "custom_tags": { "description": "Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS\ninstances and EBS volumes) with these tags in addition to `default_tags`. Notes:\n\n- Currently, Databricks allows at most 45 custom tags\n\n- Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags", "$ref": "#/$defs/map/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "driver_instance_pool_id": { "description": "The optional ID of the instance pool for the driver of the cluster belongs.\nThe pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not\nassigned.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "driver_node_type_id": { "description": "The node type of the Spark driver.\nNote that this field is optional; if unset, the driver node type will be set as the same value\nas `node_type_id` defined above.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "enable_local_disk_encryption": { "description": "Whether to enable local disk encryption for the cluster.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "gcp_attributes": { "description": "Attributes related to clusters running on Google Cloud Platform.\nIf not specified at cluster creation, a set of default values will be used.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.GcpAttributes", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "init_scripts": { "description": "The configuration for storing init scripts. Any number of destinations can be specified. The scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, init script logs are sent to `\u003cdestination\u003e/\u003ccluster-ID\u003e/init_scripts`.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/compute.InitScriptInfo", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "instance_pool_id": { "description": "The optional ID of the instance pool to which the cluster belongs.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "label": { "description": "A label for the cluster specification, either `default` to configure the default cluster, or `maintenance` to configure the maintenance cluster. This field is optional. The default value is `default`.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "node_type_id": { "description": "This field encodes, through a single value, the resources available to each of\nthe Spark nodes in this cluster. For example, the Spark nodes can be provisioned\nand optimized for memory or compute intensive workloads. A list of available node\ntypes can be retrieved by using the :method:clusters/listNodeTypes API call.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "num_workers": { "description": "Number of worker nodes that this cluster should have. A cluster has one Spark Driver\nand `num_workers` Executors for a total of `num_workers` + 1 Spark nodes.\n\nNote: When reading the properties of a cluster, this field reflects the desired number\nof workers rather than the actual current number of workers. For instance, if a cluster\nis resized from 5 to 10 workers, this field will immediately be updated to reflect\nthe target size of 10 workers, whereas the workers listed in `spark_info` will gradually\nincrease from 5 to 10 as the new nodes are provisioned.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "policy_id": { "description": "The ID of the cluster policy used to create the cluster if applicable.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "spark_conf": { "description": "An object containing a set of optional, user-specified Spark configuration key-value pairs.\nSee :method:clusters/create for more details.", "$ref": "#/$defs/map/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "spark_env_vars": { "description": "An object containing a set of optional, user-specified environment variable key-value pairs.\nPlease note that key-value pair of the form (X,Y) will be exported as is (i.e.,\n`export X='Y'`) while launching the driver and workers.\n\nIn order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending\nthem to `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all\ndefault databricks managed environmental variables are included as well.\n\nExample Spark environment variables:\n`{\"SPARK_WORKER_MEMORY\": \"28000m\", \"SPARK_LOCAL_DIRS\": \"/local_disk0\"}` or\n`{\"SPARK_DAEMON_JAVA_OPTS\": \"$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true\"}`", "$ref": "#/$defs/map/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "ssh_public_keys": { "description": "SSH public key contents that will be added to each Spark node in this cluster. The\ncorresponding private keys can be used to login with the user name `ubuntu` on port `2200`.\nUp to 10 keys can be specified.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -8417,17 +8417,17 @@ "max_workers": { "description": "The maximum number of workers to which the cluster can scale up when overloaded. `max_workers` must be strictly greater than `min_workers`.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "min_workers": { "description": "The minimum number of workers the cluster can scale down to when underutilized.\nIt is also the initial number of workers the cluster will have after creation.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "mode": { "description": "Databricks Enhanced Autoscaling optimizes cluster utilization by automatically\nallocating cluster resources based on workload volume, with minimal impact to\nthe data processing latency of your pipelines. Enhanced Autoscaling is available\nfor `updates` clusters only. The legacy autoscaling feature is used for `maintenance`\nclusters.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineClusterAutoscaleMode", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -8466,12 +8466,12 @@ "kind": { "description": "The deployment method that manages the pipeline.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.DeploymentKind", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "metadata_file_path": { "description": "The path to the file containing metadata about the deployment.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -8493,37 +8493,37 @@ "file": { "description": "The path to a file that defines a pipeline and is stored in the Databricks Repos.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.FileLibrary", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "glob": { "description": "The unified field to include source codes.\nEach entry can be a notebook path, a file path, or a folder path that ends `/**`.\nThis field cannot be used together with `notebook` or `file`.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PathPattern", - "since_version": "v0.252.0" + "sinceVersion": "v0.252.0" }, "jar": { "description": "URI of the jar to be installed. Currently only DBFS is supported.", "$ref": "#/$defs/string", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "maven": { "description": "Specification of a maven library to be installed.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.MavenLibrary", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "notebook": { "description": "The path to a notebook that defines a pipeline and is stored in the Databricks workspace.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.NotebookLibrary", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "whl": { "description": "URI of the whl to be installed.", "$ref": "#/$defs/string", "deprecationMessage": "This field is deprecated", - "since_version": "v0.229.0", + "sinceVersion": "v0.229.0", "deprecated": true } }, @@ -8542,11 +8542,11 @@ "properties": { "cron": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.CronTrigger", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "manual": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.ManualTrigger", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -8566,7 +8566,7 @@ "dependencies": { "description": "List of pip dependencies, as supported by the version of pip in this environment.\nEach dependency is a pip requirement file line https://pip.pypa.io/en/stable/reference/requirements-file-format/\nAllowed dependency could be \u003crequirement specifier\u003e, \u003carchive url/path\u003e, \u003clocal project path\u003e(WSFS or Volumes in Databricks), \u003cvcs project url\u003e", "$ref": "#/$defs/slice/string", - "since_version": "v0.257.0" + "sinceVersion": "v0.257.0" } }, "additionalProperties": false @@ -8586,7 +8586,7 @@ "slot_config": { "description": "Optional. The Postgres slot configuration to use for logical replication", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PostgresSlotConfig", - "since_version": "v0.267.0" + "sinceVersion": "v0.267.0" } }, "additionalProperties": false @@ -8606,12 +8606,12 @@ "publication_name": { "description": "The name of the publication to use for the Postgres source", "$ref": "#/$defs/string", - "since_version": "v0.267.0" + "sinceVersion": "v0.267.0" }, "slot_name": { "description": "The name of the logical replication slot to use for the Postgres source", "$ref": "#/$defs/string", - "since_version": "v0.267.0" + "sinceVersion": "v0.267.0" } }, "additionalProperties": false @@ -8630,27 +8630,27 @@ "destination_catalog": { "description": "Required. Destination catalog to store table.", "$ref": "#/$defs/string", - "since_version": "v0.231.0" + "sinceVersion": "v0.231.0" }, "destination_schema": { "description": "Required. Destination schema to store table.", "$ref": "#/$defs/string", - "since_version": "v0.231.0" + "sinceVersion": "v0.231.0" }, "destination_table": { "description": "Required. Destination table name. The pipeline fails if a table with that name already exists.", "$ref": "#/$defs/string", - "since_version": "v0.231.0" + "sinceVersion": "v0.231.0" }, "source_url": { "description": "Required. Report URL in the source system.", "$ref": "#/$defs/string", - "since_version": "v0.231.0" + "sinceVersion": "v0.231.0" }, "table_configuration": { "description": "Configuration settings to control the ingestion of tables. These settings override the table_configuration defined in the IngestionPipelineDefinition object.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig", - "since_version": "v0.231.0" + "sinceVersion": "v0.231.0" } }, "additionalProperties": false, @@ -8674,17 +8674,17 @@ "days_of_week": { "description": "Days of week in which the restart is allowed to happen (within a five-hour window starting at start_hour).\nIf not specified all days of the week will be used.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.DayOfWeek", - "since_version": "v0.234.0" + "sinceVersion": "v0.234.0" }, "start_hour": { "description": "An integer between 0 and 23 denoting the start hour for the restart window in the 24-hour day.\nContinuous pipeline restart is triggered only within a five-hour window starting at this hour.", "$ref": "#/$defs/int", - "since_version": "v0.234.0" + "sinceVersion": "v0.234.0" }, "time_zone_id": { "description": "Time zone id of restart window. See https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html for details.\nIf not specified, UTC will be used.", "$ref": "#/$defs/string", - "since_version": "v0.234.0" + "sinceVersion": "v0.234.0" } }, "additionalProperties": false, @@ -8707,12 +8707,12 @@ "service_principal_name": { "description": "Application ID of an active service principal. Setting this field requires the `servicePrincipal/user` role.", "$ref": "#/$defs/string", - "since_version": "v0.241.0" + "sinceVersion": "v0.241.0" }, "user_name": { "description": "The email of an active workspace user. Users can only set this field to their own email.", "$ref": "#/$defs/string", - "since_version": "v0.241.0" + "sinceVersion": "v0.241.0" } }, "additionalProperties": false @@ -8731,27 +8731,27 @@ "destination_catalog": { "description": "Required. Destination catalog to store tables.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "destination_schema": { "description": "Required. Destination schema to store tables in. Tables with the same name as the source tables are created in this destination schema. The pipeline fails If a table with the same name already exists.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "source_catalog": { "description": "The source catalog name. Might be optional depending on the type of source.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "source_schema": { "description": "Required. Schema name in the source database.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "table_configuration": { "description": "Configuration settings to control the ingestion of tables. These settings are applied to all tables in this schema and override the table_configuration defined in the IngestionPipelineDefinition object.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -8776,12 +8776,12 @@ "postgres": { "description": "Postgres-specific catalog-level configuration parameters", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PostgresCatalogConfig", - "since_version": "v0.267.0" + "sinceVersion": "v0.267.0" }, "source_catalog": { "description": "Source catalog name", "$ref": "#/$defs/string", - "since_version": "v0.267.0" + "sinceVersion": "v0.267.0" } }, "additionalProperties": false @@ -8800,7 +8800,7 @@ "catalog": { "description": "Catalog-level source configuration parameters", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.SourceCatalogConfig", - "since_version": "v0.267.0" + "sinceVersion": "v0.267.0" } }, "additionalProperties": false @@ -8819,37 +8819,37 @@ "destination_catalog": { "description": "Required. Destination catalog to store table.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "destination_schema": { "description": "Required. Destination schema to store table.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "destination_table": { "description": "Optional. Destination table name. The pipeline fails if a table with that name already exists. If not set, the source table name is used.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "source_catalog": { "description": "Source catalog name. Might be optional depending on the type of source.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "source_schema": { "description": "Schema name in the source database. Might be optional depending on the type of source.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "source_table": { "description": "Required. Table name in the source database.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "table_configuration": { "description": "Configuration settings to control the ingestion of tables. These settings override the table_configuration defined in the IngestionPipelineDefinition object and the SchemaSpec.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -8873,24 +8873,24 @@ "exclude_columns": { "description": "A list of column names to be excluded for the ingestion.\nWhen not specified, include_columns fully controls what columns to be ingested.\nWhen specified, all other columns including future ones will be automatically included for ingestion.\nThis field in mutually exclusive with `include_columns`.", "$ref": "#/$defs/slice/string", - "since_version": "v0.251.0" + "sinceVersion": "v0.251.0" }, "include_columns": { "description": "A list of column names to be included for the ingestion.\nWhen not specified, all columns except ones in exclude_columns will be included. Future\ncolumns will be automatically included.\nWhen specified, all other future columns will be automatically excluded from ingestion.\nThis field in mutually exclusive with `exclude_columns`.", "$ref": "#/$defs/slice/string", - "since_version": "v0.251.0" + "sinceVersion": "v0.251.0" }, "primary_keys": { "description": "The primary key of the table used to apply changes.", "$ref": "#/$defs/slice/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "query_based_connector_config": { "description": "Configurations that are only applicable for query-based ingestion connectors.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "since_version": "v0.264.0" + "sinceVersion": "v0.264.0" }, "row_filter": { "description": "(Optional, Immutable) The row filter condition to be applied to the table.\nIt must not contain the WHERE keyword, only the actual filter condition.\nIt must be in DBSQL format.", @@ -8903,25 +8903,25 @@ "$ref": "#/$defs/bool", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "scd_type": { "description": "The SCD type to use to ingest the table.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfigScdType", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "sequence_by": { "description": "The column names specifying the logical order of events in the source data. Spark Declarative Pipelines uses this sequencing to handle change events that arrive out of order.", "$ref": "#/$defs/slice/string", - "since_version": "v0.231.0" + "sinceVersion": "v0.231.0" }, "workday_report_parameters": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinitionWorkdayReportParameters", "x-databricks-preview": "PRIVATE", "doNotSuggest": true, - "since_version": "v0.271.0" + "sinceVersion": "v0.271.0" } }, "additionalProperties": false @@ -8957,12 +8957,12 @@ "ai21labs_api_key": { "description": "The Databricks secret key reference for an AI21 Labs API key. If you\nprefer to paste your API key directly, see `ai21labs_api_key_plaintext`.\nYou must provide an API key using one of the following fields:\n`ai21labs_api_key` or `ai21labs_api_key_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "ai21labs_api_key_plaintext": { "description": "An AI21 Labs API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `ai21labs_api_key`. You\nmust provide an API key using one of the following fields:\n`ai21labs_api_key` or `ai21labs_api_key_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -8981,27 +8981,27 @@ "fallback_config": { "description": "Configuration for traffic fallback which auto fallbacks to other served entities if the request to a served\nentity fails with certain error codes, to increase availability.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.FallbackConfig", - "since_version": "v0.246.0" + "sinceVersion": "v0.246.0" }, "guardrails": { "description": "Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and responses.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrails", - "since_version": "v0.230.0" + "sinceVersion": "v0.230.0" }, "inference_table_config": { "description": "Configuration for payload logging using inference tables.\nUse these tables to monitor and audit data being sent to and received from model APIs and to improve model quality.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayInferenceTableConfig", - "since_version": "v0.230.0" + "sinceVersion": "v0.230.0" }, "rate_limits": { "description": "Configuration for rate limits which can be set to limit endpoint traffic.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimit", - "since_version": "v0.230.0" + "sinceVersion": "v0.230.0" }, "usage_tracking_config": { "description": "Configuration to enable usage tracking using system tables.\nThese tables allow you to monitor operational usage on endpoints and their associated costs.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayUsageTrackingConfig", - "since_version": "v0.230.0" + "sinceVersion": "v0.230.0" } }, "additionalProperties": false @@ -9021,24 +9021,24 @@ "description": "List of invalid keywords.\nAI guardrail uses keyword or string matching to decide if the keyword exists in the request or response content.", "$ref": "#/$defs/slice/string", "deprecationMessage": "This field is deprecated", - "since_version": "v0.230.0", + "sinceVersion": "v0.230.0", "deprecated": true }, "pii": { "description": "Configuration for guardrail PII filter.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailPiiBehavior", - "since_version": "v0.230.0" + "sinceVersion": "v0.230.0" }, "safety": { "description": "Indicates whether the safety filter is enabled.", "$ref": "#/$defs/bool", - "since_version": "v0.230.0" + "sinceVersion": "v0.230.0" }, "valid_topics": { "description": "The list of allowed topics.\nGiven a chat request, this guardrail flags the request if its topic is not in the allowed topics.", "$ref": "#/$defs/slice/string", "deprecationMessage": "This field is deprecated", - "since_version": "v0.230.0", + "sinceVersion": "v0.230.0", "deprecated": true } }, @@ -9058,7 +9058,7 @@ "behavior": { "description": "Configuration for input guardrail filters.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailPiiBehaviorBehavior", - "since_version": "v0.230.0" + "sinceVersion": "v0.230.0" } }, "additionalProperties": false @@ -9093,12 +9093,12 @@ "input": { "description": "Configuration for input guardrail filters.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailParameters", - "since_version": "v0.230.0" + "sinceVersion": "v0.230.0" }, "output": { "description": "Configuration for output guardrail filters.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailParameters", - "since_version": "v0.230.0" + "sinceVersion": "v0.230.0" } }, "additionalProperties": false @@ -9117,22 +9117,22 @@ "catalog_name": { "description": "The name of the catalog in Unity Catalog. Required when enabling inference tables.\nNOTE: On update, you have to disable inference table first in order to change the catalog name.", "$ref": "#/$defs/string", - "since_version": "v0.230.0" + "sinceVersion": "v0.230.0" }, "enabled": { "description": "Indicates whether the inference table is enabled.", "$ref": "#/$defs/bool", - "since_version": "v0.230.0" + "sinceVersion": "v0.230.0" }, "schema_name": { "description": "The name of the schema in Unity Catalog. Required when enabling inference tables.\nNOTE: On update, you have to disable inference table first in order to change the schema name.", "$ref": "#/$defs/string", - "since_version": "v0.230.0" + "sinceVersion": "v0.230.0" }, "table_name_prefix": { "description": "The prefix of the table in Unity Catalog.\nNOTE: On update, you have to disable inference table first in order to change the prefix name.", "$ref": "#/$defs/string", - "since_version": "v0.230.0" + "sinceVersion": "v0.230.0" } }, "additionalProperties": false @@ -9151,27 +9151,27 @@ "calls": { "description": "Used to specify how many calls are allowed for a key within the renewal_period.", "$ref": "#/$defs/int64", - "since_version": "v0.230.0" + "sinceVersion": "v0.230.0" }, "key": { "description": "Key field for a rate limit. Currently, 'user', 'user_group, 'service_principal', and 'endpoint' are supported,\nwith 'endpoint' being the default if not specified.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitKey", - "since_version": "v0.230.0" + "sinceVersion": "v0.230.0" }, "principal": { "description": "Principal field for a user, user group, or service principal to apply rate limiting to. Accepts a user email, group name, or service principal application ID.", "$ref": "#/$defs/string", - "since_version": "v0.260.0" + "sinceVersion": "v0.260.0" }, "renewal_period": { "description": "Renewal period field for a rate limit. Currently, only 'minute' is supported.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitRenewalPeriod", - "since_version": "v0.230.0" + "sinceVersion": "v0.230.0" }, "tokens": { "description": "Used to specify how many tokens are allowed for a key within the renewal_period.", "$ref": "#/$defs/int64", - "since_version": "v0.265.0" + "sinceVersion": "v0.265.0" } }, "additionalProperties": false, @@ -9224,7 +9224,7 @@ "enabled": { "description": "Whether to enable usage tracking.", "$ref": "#/$defs/bool", - "since_version": "v0.230.0" + "sinceVersion": "v0.230.0" } }, "additionalProperties": false @@ -9243,37 +9243,37 @@ "aws_access_key_id": { "description": "The Databricks secret key reference for an AWS access key ID with\npermissions to interact with Bedrock services. If you prefer to paste\nyour API key directly, see `aws_access_key_id_plaintext`. You must provide an API\nkey using one of the following fields: `aws_access_key_id` or\n`aws_access_key_id_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "aws_access_key_id_plaintext": { "description": "An AWS access key ID with permissions to interact with Bedrock services\nprovided as a plaintext string. If you prefer to reference your key using\nDatabricks Secrets, see `aws_access_key_id`. You must provide an API key\nusing one of the following fields: `aws_access_key_id` or\n`aws_access_key_id_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "aws_region": { "description": "The AWS region to use. Bedrock has to be enabled there.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "aws_secret_access_key": { "description": "The Databricks secret key reference for an AWS secret access key paired\nwith the access key ID, with permissions to interact with Bedrock\nservices. If you prefer to paste your API key directly, see\n`aws_secret_access_key_plaintext`. You must provide an API key using one\nof the following fields: `aws_secret_access_key` or\n`aws_secret_access_key_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "aws_secret_access_key_plaintext": { "description": "An AWS secret access key paired with the access key ID, with permissions\nto interact with Bedrock services provided as a plaintext string. If you\nprefer to reference your key using Databricks Secrets, see\n`aws_secret_access_key`. You must provide an API key using one of the\nfollowing fields: `aws_secret_access_key` or\n`aws_secret_access_key_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "bedrock_provider": { "description": "The underlying provider in Amazon Bedrock. Supported values (case\ninsensitive) include: Anthropic, Cohere, AI21Labs, Amazon.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfigBedrockProvider", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "instance_profile_arn": { "description": "ARN of the instance profile that the external model will use to access AWS resources.\nYou must authenticate using an instance profile or access keys.\nIf you prefer to authenticate using access keys, see `aws_access_key_id`,\n`aws_access_key_id_plaintext`, `aws_secret_access_key` and `aws_secret_access_key_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.243.0" + "sinceVersion": "v0.243.0" } }, "additionalProperties": false, @@ -9313,12 +9313,12 @@ "anthropic_api_key": { "description": "The Databricks secret key reference for an Anthropic API key. If you\nprefer to paste your API key directly, see `anthropic_api_key_plaintext`.\nYou must provide an API key using one of the following fields:\n`anthropic_api_key` or `anthropic_api_key_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "anthropic_api_key_plaintext": { "description": "The Anthropic API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `anthropic_api_key`. You\nmust provide an API key using one of the following fields:\n`anthropic_api_key` or `anthropic_api_key_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -9337,17 +9337,17 @@ "key": { "description": "The name of the API key parameter used for authentication.", "$ref": "#/$defs/string", - "since_version": "v0.246.0" + "sinceVersion": "v0.246.0" }, "value": { "description": "The Databricks secret key reference for an API Key.\nIf you prefer to paste your token directly, see `value_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.246.0" + "sinceVersion": "v0.246.0" }, "value_plaintext": { "description": "The API Key provided as a plaintext string. If you prefer to reference your\ntoken using Databricks Secrets, see `value`.", "$ref": "#/$defs/string", - "since_version": "v0.246.0" + "sinceVersion": "v0.246.0" } }, "additionalProperties": false, @@ -9369,22 +9369,22 @@ "catalog_name": { "description": "The name of the catalog in Unity Catalog. NOTE: On update, you cannot change the catalog name if the inference table is already enabled.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "enabled": { "description": "Indicates whether the inference table is enabled.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "schema_name": { "description": "The name of the schema in Unity Catalog. NOTE: On update, you cannot change the schema name if the inference table is already enabled.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "table_name_prefix": { "description": "The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if the inference table is already enabled.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -9403,12 +9403,12 @@ "token": { "description": "The Databricks secret key reference for a token.\nIf you prefer to paste your token directly, see `token_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.246.0" + "sinceVersion": "v0.246.0" }, "token_plaintext": { "description": "The token provided as a plaintext string. If you prefer to reference your\ntoken using Databricks Secrets, see `token`.", "$ref": "#/$defs/string", - "since_version": "v0.246.0" + "sinceVersion": "v0.246.0" } }, "additionalProperties": false @@ -9427,17 +9427,17 @@ "cohere_api_base": { "description": "This is an optional field to provide a customized base URL for the Cohere\nAPI. If left unspecified, the standard Cohere base URL is used.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "cohere_api_key": { "description": "The Databricks secret key reference for a Cohere API key. If you prefer\nto paste your API key directly, see `cohere_api_key_plaintext`. You must\nprovide an API key using one of the following fields: `cohere_api_key` or\n`cohere_api_key_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "cohere_api_key_plaintext": { "description": "The Cohere API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `cohere_api_key`. You\nmust provide an API key using one of the following fields:\n`cohere_api_key` or `cohere_api_key_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -9457,17 +9457,17 @@ "api_key_auth": { "description": "This is a field to provide API key authentication for the custom provider API.\nYou can only specify one authentication method.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ApiKeyAuth", - "since_version": "v0.246.0" + "sinceVersion": "v0.246.0" }, "bearer_token_auth": { "description": "This is a field to provide bearer token authentication for the custom provider API.\nYou can only specify one authentication method.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.BearerTokenAuth", - "since_version": "v0.246.0" + "sinceVersion": "v0.246.0" }, "custom_provider_url": { "description": "This is a field to provide the URL of the custom provider API.", "$ref": "#/$defs/string", - "since_version": "v0.246.0" + "sinceVersion": "v0.246.0" } }, "additionalProperties": false, @@ -9489,17 +9489,17 @@ "databricks_api_token": { "description": "The Databricks secret key reference for a Databricks API token that\ncorresponds to a user or service principal with Can Query access to the\nmodel serving endpoint pointed to by this external model. If you prefer\nto paste your API key directly, see `databricks_api_token_plaintext`. You\nmust provide an API key using one of the following fields:\n`databricks_api_token` or `databricks_api_token_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "databricks_api_token_plaintext": { "description": "The Databricks API token that corresponds to a user or service principal\nwith Can Query access to the model serving endpoint pointed to by this\nexternal model provided as a plaintext string. If you prefer to reference\nyour key using Databricks Secrets, see `databricks_api_token`. You must\nprovide an API key using one of the following fields:\n`databricks_api_token` or `databricks_api_token_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "databricks_workspace_url": { "description": "The URL of the Databricks workspace containing the model serving endpoint\npointed to by this external model.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -9521,12 +9521,12 @@ "on_update_failure": { "description": "A list of email addresses to be notified when an endpoint fails to update its configuration or state.", "$ref": "#/$defs/slice/string", - "since_version": "v0.264.0" + "sinceVersion": "v0.264.0" }, "on_update_success": { "description": "A list of email addresses to be notified when an endpoint successfully updates its configuration or state.", "$ref": "#/$defs/slice/string", - "since_version": "v0.264.0" + "sinceVersion": "v0.264.0" } }, "additionalProperties": false @@ -9545,22 +9545,22 @@ "auto_capture_config": { "description": "Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.\nNote: this field is deprecated for creating new provisioned throughput endpoints,\nor updating existing provisioned throughput endpoints that never have inference table configured;\nin these cases please use AI Gateway to manage inference tables.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AutoCaptureConfigInput", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "served_entities": { "description": "The list of served entities under the serving endpoint config.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "served_models": { "description": "(Deprecated, use served_entities instead) The list of served models under the serving endpoint config.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.ServedModelInput", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "traffic_config": { "description": "The traffic configuration associated with the serving endpoint config.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.TrafficConfig", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -9579,12 +9579,12 @@ "key": { "description": "Key field for a serving endpoint tag.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "value": { "description": "Optional value field for a serving endpoint tag.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -9606,62 +9606,62 @@ "ai21labs_config": { "description": "AI21Labs Config. Only required if the provider is 'ai21labs'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.Ai21LabsConfig", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "amazon_bedrock_config": { "description": "Amazon Bedrock Config. Only required if the provider is 'amazon-bedrock'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfig", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "anthropic_config": { "description": "Anthropic Config. Only required if the provider is 'anthropic'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AnthropicConfig", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "cohere_config": { "description": "Cohere Config. Only required if the provider is 'cohere'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.CohereConfig", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "custom_provider_config": { "description": "Custom Provider Config. Only required if the provider is 'custom'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.CustomProviderConfig", - "since_version": "v0.246.0" + "sinceVersion": "v0.246.0" }, "databricks_model_serving_config": { "description": "Databricks Model Serving Config. Only required if the provider is 'databricks-model-serving'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.DatabricksModelServingConfig", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "google_cloud_vertex_ai_config": { "description": "Google Cloud Vertex AI Config. Only required if the provider is 'google-cloud-vertex-ai'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.GoogleCloudVertexAiConfig", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "name": { "description": "The name of the external model.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "openai_config": { "description": "OpenAI Config. Only required if the provider is 'openai'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.OpenAiConfig", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "palm_config": { "description": "PaLM Config. Only required if the provider is 'palm'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.PaLmConfig", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "provider": { "description": "The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', 'palm', and 'custom'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ExternalModelProvider", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "task": { "description": "The task type of the external model.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -9707,7 +9707,7 @@ "enabled": { "description": "Whether to enable traffic fallback. When a served entity in the serving endpoint returns specific error\ncodes (e.g. 500), the request will automatically be round-robin attempted with other served entities in the same\nendpoint, following the order of served entity list, until a successful response is returned.\nIf all attempts fail, return the last response with the error code.", "$ref": "#/$defs/bool", - "since_version": "v0.246.0" + "sinceVersion": "v0.246.0" } }, "additionalProperties": false, @@ -9729,22 +9729,22 @@ "private_key": { "description": "The Databricks secret key reference for a private key for the service\naccount which has access to the Google Cloud Vertex AI Service. See [Best\npractices for managing service account keys]. If you prefer to paste your\nAPI key directly, see `private_key_plaintext`. You must provide an API\nkey using one of the following fields: `private_key` or\n`private_key_plaintext`\n\n[Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "private_key_plaintext": { "description": "The private key for the service account which has access to the Google\nCloud Vertex AI Service provided as a plaintext secret. See [Best\npractices for managing service account keys]. If you prefer to reference\nyour key using Databricks Secrets, see `private_key`. You must provide an\nAPI key using one of the following fields: `private_key` or\n`private_key_plaintext`.\n\n[Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "project_id": { "description": "This is the Google Cloud project id that the service account is\nassociated with.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "region": { "description": "This is the region for the Google Cloud Vertex AI Service. See [supported\nregions] for more details. Some models are only available in specific\nregions.\n\n[supported regions]: https://cloud.google.com/vertex-ai/docs/general/locations", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -9768,57 +9768,57 @@ "microsoft_entra_client_id": { "description": "This field is only required for Azure AD OpenAI and is the Microsoft\nEntra Client ID.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "microsoft_entra_client_secret": { "description": "The Databricks secret key reference for a client secret used for\nMicrosoft Entra ID authentication. If you prefer to paste your client\nsecret directly, see `microsoft_entra_client_secret_plaintext`. You must\nprovide an API key using one of the following fields:\n`microsoft_entra_client_secret` or\n`microsoft_entra_client_secret_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "microsoft_entra_client_secret_plaintext": { "description": "The client secret used for Microsoft Entra ID authentication provided as\na plaintext string. If you prefer to reference your key using Databricks\nSecrets, see `microsoft_entra_client_secret`. You must provide an API key\nusing one of the following fields: `microsoft_entra_client_secret` or\n`microsoft_entra_client_secret_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "microsoft_entra_tenant_id": { "description": "This field is only required for Azure AD OpenAI and is the Microsoft\nEntra Tenant ID.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "openai_api_base": { "description": "This is a field to provide a customized base URl for the OpenAI API. For\nAzure OpenAI, this field is required, and is the base URL for the Azure\nOpenAI API service provided by Azure. For other OpenAI API types, this\nfield is optional, and if left unspecified, the standard OpenAI base URL\nis used.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "openai_api_key": { "description": "The Databricks secret key reference for an OpenAI API key using the\nOpenAI or Azure service. If you prefer to paste your API key directly,\nsee `openai_api_key_plaintext`. You must provide an API key using one of\nthe following fields: `openai_api_key` or `openai_api_key_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "openai_api_key_plaintext": { "description": "The OpenAI API key using the OpenAI or Azure service provided as a\nplaintext string. If you prefer to reference your key using Databricks\nSecrets, see `openai_api_key`. You must provide an API key using one of\nthe following fields: `openai_api_key` or `openai_api_key_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "openai_api_type": { "description": "This is an optional field to specify the type of OpenAI API to use. For\nAzure OpenAI, this field is required, and adjust this parameter to\nrepresent the preferred security access validation protocol. For access\ntoken validation, use azure. For authentication using Azure Active\nDirectory (Azure AD) use, azuread.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "openai_api_version": { "description": "This is an optional field to specify the OpenAI API version. For Azure\nOpenAI, this field is required, and is the version of the Azure OpenAI\nservice to utilize, specified by a date.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "openai_deployment_name": { "description": "This field is only required for Azure OpenAI and is the name of the\ndeployment resource for the Azure OpenAI service.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "openai_organization": { "description": "This is an optional field to specify the organization in OpenAI or Azure\nOpenAI.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -9837,12 +9837,12 @@ "palm_api_key": { "description": "The Databricks secret key reference for a PaLM API key. If you prefer to\npaste your API key directly, see `palm_api_key_plaintext`. You must\nprovide an API key using one of the following fields: `palm_api_key` or\n`palm_api_key_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "palm_api_key_plaintext": { "description": "The PaLM API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `palm_api_key`. You must\nprovide an API key using one of the following fields: `palm_api_key` or\n`palm_api_key_plaintext`.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -9861,17 +9861,17 @@ "calls": { "description": "Used to specify how many calls are allowed for a key within the renewal_period.", "$ref": "#/$defs/int64", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "key": { "description": "Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.RateLimitKey", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "renewal_period": { "description": "Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.RateLimitRenewalPeriod", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -9922,17 +9922,17 @@ "properties": { "served_entity_name": { "$ref": "#/$defs/string", - "since_version": "v0.260.0" + "sinceVersion": "v0.260.0" }, "served_model_name": { "description": "The name of the served model this route configures traffic for.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "traffic_percentage": { "description": "The percentage of endpoint traffic to send to this route. It must be an integer between 0 and 100 inclusive.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -9954,71 +9954,71 @@ "entity_name": { "description": "The name of the entity to be served. The entity may be a model in the Databricks Model Registry, a model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC object, the full name of the object should be given in the form of **catalog_name.schema_name.model_name**.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "entity_version": { "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "environment_vars": { "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`", "$ref": "#/$defs/map/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "external_model": { "description": "The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled) can be specified with the latter set being used for custom model serving for a Databricks registered model. For an existing endpoint with external_model, it cannot be updated to an endpoint without external_model. If the endpoint is created without external_model, users cannot update it to add external_model later. The task type of all external models within an endpoint must be the same.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ExternalModel", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "instance_profile_arn": { "description": "ARN of the instance profile that the served entity uses to access AWS resources.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "max_provisioned_concurrency": { "description": "The maximum provisioned concurrency that the endpoint can scale up to. Do not use if workload_size is specified.", "$ref": "#/$defs/int", - "since_version": "v0.256.0" + "sinceVersion": "v0.256.0" }, "max_provisioned_throughput": { "description": "The maximum tokens per second that the endpoint can scale up to.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "min_provisioned_concurrency": { "description": "The minimum provisioned concurrency that the endpoint can scale down to. Do not use if workload_size is specified.", "$ref": "#/$defs/int", - "since_version": "v0.256.0" + "sinceVersion": "v0.256.0" }, "min_provisioned_throughput": { "description": "The minimum tokens per second that the endpoint can scale down to.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "name": { "description": "The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "provisioned_model_units": { "description": "The number of model units provisioned.", "$ref": "#/$defs/int64", - "since_version": "v0.252.0" + "sinceVersion": "v0.252.0" }, "scale_to_zero_enabled": { "description": "Whether the compute resources for the served entity should scale down to zero.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "workload_size": { "description": "The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency). Additional custom workload sizes can also be used when available in the workspace. If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0. Do not use if min_provisioned_concurrency and max_provisioned_concurrency are specified.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "workload_type": { "description": "The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is \"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types](https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ServingModelWorkloadType", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -10037,65 +10037,65 @@ "environment_vars": { "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`", "$ref": "#/$defs/map/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "instance_profile_arn": { "description": "ARN of the instance profile that the served entity uses to access AWS resources.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "max_provisioned_concurrency": { "description": "The maximum provisioned concurrency that the endpoint can scale up to. Do not use if workload_size is specified.", "$ref": "#/$defs/int", - "since_version": "v0.256.0" + "sinceVersion": "v0.256.0" }, "max_provisioned_throughput": { "description": "The maximum tokens per second that the endpoint can scale up to.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "min_provisioned_concurrency": { "description": "The minimum provisioned concurrency that the endpoint can scale down to. Do not use if workload_size is specified.", "$ref": "#/$defs/int", - "since_version": "v0.256.0" + "sinceVersion": "v0.256.0" }, "min_provisioned_throughput": { "description": "The minimum tokens per second that the endpoint can scale down to.", "$ref": "#/$defs/int", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "model_name": { "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "model_version": { "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "name": { "description": "The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "provisioned_model_units": { "description": "The number of model units provisioned.", "$ref": "#/$defs/int64", - "since_version": "v0.252.0" + "sinceVersion": "v0.252.0" }, "scale_to_zero_enabled": { "description": "Whether the compute resources for the served entity should scale down to zero.", "$ref": "#/$defs/bool", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "workload_size": { "description": "The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency). Additional custom workload sizes can also be used when available in the workspace. If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0. Do not use if min_provisioned_concurrency and max_provisioned_concurrency are specified.", "$ref": "#/$defs/string", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "workload_type": { "description": "The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is \"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types](https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ServedModelInputWorkloadType", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false, @@ -10157,7 +10157,7 @@ "routes": { "description": "The list of routes that define traffic to each served entity.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.Route", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": false @@ -10230,27 +10230,27 @@ "comparison_operator": { "description": "Operator used for comparison in alert evaluation.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.ComparisonOperator", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "empty_result_state": { "description": "Alert state if result is empty. Please avoid setting this field to be `UNKNOWN` because `UNKNOWN` state is planned to be deprecated.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertEvaluationState", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "notification": { "description": "User or Notification Destination to notify when alert is triggered.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2Notification", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "source": { "description": "Source column from result to use to evaluate alert", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2OperandColumn", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "threshold": { "description": "Threshold to user for alert evaluation, can be a column or a value.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2Operand", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" } }, "additionalProperties": false, @@ -10273,16 +10273,16 @@ "notify_on_ok": { "description": "Whether to notify alert subscribers when alert returns back to normal.", "$ref": "#/$defs/bool", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "retrigger_seconds": { "description": "Number of seconds an alert waits after being triggered before it is allowed to send another notification.\nIf set to 0 or omitted, the alert will not send any further notifications after the first trigger\nSetting this value to 1 allows the alert to send a notification on every evaluation where the condition is met, effectively making it always retrigger for notification purposes.", "$ref": "#/$defs/int", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "subscriptions": { "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/sql.AlertV2Subscription", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" } }, "additionalProperties": false @@ -10300,11 +10300,11 @@ "properties": { "column": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2OperandColumn", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "value": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.AlertV2OperandValue", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" } }, "additionalProperties": false @@ -10322,15 +10322,15 @@ "properties": { "aggregation": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.Aggregation", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "display": { "$ref": "#/$defs/string", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "name": { "$ref": "#/$defs/string", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" } }, "additionalProperties": false, @@ -10351,15 +10351,15 @@ "properties": { "bool_value": { "$ref": "#/$defs/bool", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "double_value": { "$ref": "#/$defs/float64", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "string_value": { "$ref": "#/$defs/string", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" } }, "additionalProperties": false @@ -10378,12 +10378,12 @@ "service_principal_name": { "description": "Application ID of an active service principal. Setting this field requires the `servicePrincipal/user` role.", "$ref": "#/$defs/string", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "user_name": { "description": "The email of an active workspace user. Can only set this field to their own email.", "$ref": "#/$defs/string", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" } }, "additionalProperties": false @@ -10401,11 +10401,11 @@ "properties": { "destination_id": { "$ref": "#/$defs/string", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "user_email": { "$ref": "#/$defs/string", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" } }, "additionalProperties": false @@ -10424,11 +10424,11 @@ "properties": { "dbsql_version": { "$ref": "#/$defs/string", - "since_version": "v0.260.0" + "sinceVersion": "v0.260.0" }, "name": { "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.ChannelName", - "since_version": "v0.260.0" + "sinceVersion": "v0.260.0" } }, "additionalProperties": false @@ -10501,17 +10501,17 @@ "pause_status": { "description": "Indicate whether this schedule is paused or not.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/sql.SchedulePauseStatus", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "quartz_cron_schedule": { "description": "A cron expression using quartz syntax that specifies the schedule for this pipeline.\nShould use the quartz format described here: http://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/tutorial-lesson-06.html", "$ref": "#/$defs/string", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" }, "timezone_id": { "description": "A Java timezone id. The schedule will be resolved using this timezone.\nThis will be combined with the quartz_cron_schedule to determine the schedule.\nSee https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html for details.", "$ref": "#/$defs/string", - "since_version": "v0.279.0" + "sinceVersion": "v0.279.0" } }, "additionalProperties": false, @@ -10533,11 +10533,11 @@ "properties": { "key": { "$ref": "#/$defs/string", - "since_version": "v0.260.0" + "sinceVersion": "v0.260.0" }, "value": { "$ref": "#/$defs/string", - "since_version": "v0.260.0" + "sinceVersion": "v0.260.0" } }, "additionalProperties": false @@ -10555,7 +10555,7 @@ "properties": { "custom_tags": { "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/sql.EndpointTagPair", - "since_version": "v0.260.0" + "sinceVersion": "v0.260.0" } }, "additionalProperties": false @@ -10607,12 +10607,12 @@ "dns_name": { "description": "The DNS of the KeyVault", "$ref": "#/$defs/string", - "since_version": "v0.252.0" + "sinceVersion": "v0.252.0" }, "resource_id": { "description": "The resource id of the azure KeyVault that user wants to associate the scope with.", "$ref": "#/$defs/string", - "since_version": "v0.252.0" + "sinceVersion": "v0.252.0" } }, "additionalProperties": false, @@ -11860,85 +11860,85 @@ "description": "Defines the attributes to build an artifact", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Artifact", "markdownDescription": "Defines the attributes to build artifacts, where each key is the name of the artifact, and the value is a Map that defines the artifact build settings. For information about the `artifacts` mapping, see [artifacts](https://docs.databricks.com/dev-tools/bundles/settings.html#artifacts).\n\nArtifact settings defined in the top level of the bundle configuration can be overridden in the `targets` mapping. See [link](https://docs.databricks.com/dev-tools/bundles/artifact-overrides.html).", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "bundle": { "description": "The bundle attributes when deploying to this target.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Bundle", "markdownDescription": "The bundle attributes when deploying to this target,", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "environments": { "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Target", "deprecationMessage": "Deprecated: please use targets instead", - "since_version": "v0.243.0", + "sinceVersion": "v0.243.0", "deprecated": true }, "experimental": { "description": "Defines attributes for experimental features.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Experimental", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "include": { "description": "Specifies a list of path globs that contain configuration files to include within the bundle.", "$ref": "#/$defs/slice/string", "markdownDescription": "Specifies a list of path globs that contain configuration files to include within the bundle. See [include](https://docs.databricks.com/dev-tools/bundles/settings.html#include).", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "permissions": { "description": "Defines a permission for a specific entity.", "$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission", "markdownDescription": "A Sequence that defines the permissions to apply to experiments, jobs, pipelines, and models defined in the bundle, where each item in the sequence is a permission for a specific entity.\n\nSee [permissions](https://docs.databricks.com/dev-tools/bundles/settings.html#permissions) and [link](https://docs.databricks.com/dev-tools/bundles/permissions.html).", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "presets": { "description": "Defines bundle deployment presets.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Presets", "markdownDescription": "Defines bundle deployment presets. See [presets](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html#presets).", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "python": { "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Python", - "since_version": "v0.275.0" + "sinceVersion": "v0.275.0" }, "resources": { "description": "A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Resources", "markdownDescription": "A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource. For more information about Databricks Asset Bundles supported resources, and resource definition reference, see [link](https://docs.databricks.com/dev-tools/bundles/resources.html).\n\n```yaml\nresources:\n \u003cresource-type\u003e:\n \u003cresource-name\u003e:\n \u003cresource-field-name\u003e: \u003cresource-field-value\u003e\n```", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "run_as": { "description": "The identity to use when running Databricks Asset Bundles workflows.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs", "markdownDescription": "The identity to use when running Databricks Asset Bundles workflows. See [link](https://docs.databricks.com/dev-tools/bundles/run-as.html).", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "scripts": { "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Script", - "since_version": "v0.259.0" + "sinceVersion": "v0.259.0" }, "sync": { "description": "The files and file paths to include or exclude in the bundle.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Sync", "markdownDescription": "The files and file paths to include or exclude in the bundle. See [sync](https://docs.databricks.com/dev-tools/bundles/settings.html#sync).", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "targets": { "description": "Defines deployment targets for the bundle.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Target", "markdownDescription": "Defines deployment targets for the bundle. See [targets](https://docs.databricks.com/dev-tools/bundles/settings.html#targets)", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "variables": { "description": "A Map that defines the custom variables for the bundle, where each key is the name of the variable, and the value is a Map that defines the variable.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/variable.Variable", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" }, "workspace": { "description": "Defines the Databricks workspace for the bundle.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Workspace", "markdownDescription": "Defines the Databricks workspace for the bundle. See [workspace](https://docs.databricks.com/dev-tools/bundles/settings.html#workspace).", - "since_version": "v0.229.0" + "sinceVersion": "v0.229.0" } }, "additionalProperties": {} diff --git a/libs/jsonschema/extension.go b/libs/jsonschema/extension.go index 32f7955a05..86fbd4b954 100644 --- a/libs/jsonschema/extension.go +++ b/libs/jsonschema/extension.go @@ -67,5 +67,5 @@ type Extension struct { FieldBehaviors []string `json:"x-databricks-field-behaviors,omitempty"` // SinceVersion indicates which CLI version introduced this field. - SinceVersion string `json:"since_version,omitempty"` + SinceVersion string `json:"sinceVersion,omitempty"` }