Skip to content

Commit b0cff3b

Browse files
committed
Make spark_version field optional to work with defaults in policies
1 parent 1e3e8ce commit b0cff3b

File tree

2 files changed

+53
-1
lines changed

2 files changed

+53
-1
lines changed

clusters/resource_cluster.go

+1-1
Original file line numberDiff line numberDiff line change
@@ -356,7 +356,7 @@ func (ClusterSpec) CustomizeSchema(s *common.CustomizableSchema) *common.Customi
356356
s.SchemaPath("cluster_log_conf", "dbfs", "destination").SetRequired()
357357
s.SchemaPath("cluster_log_conf", "s3", "destination").SetRequired()
358358
s.SchemaPath("cluster_log_conf", "volumes", "destination").SetRequired()
359-
s.SchemaPath("spark_version").SetRequired()
359+
s.SchemaPath("spark_version").SetOptional()
360360
s.AddNewField("cluster_id", &schema.Schema{
361361
Type: schema.TypeString,
362362
Computed: true,

jobs/job_test.go

+52
Original file line numberDiff line numberDiff line change
@@ -437,3 +437,55 @@ func TestAccPeriodicTrigger(t *testing.T) {
437437
}),
438438
})
439439
}
440+
441+
func TestAccJobClusterPolicySparkVersion(t *testing.T) {
442+
acceptance.WorkspaceLevel(t, acceptance.Step{
443+
Template: `
444+
data "databricks_current_user" "me" {}
445+
data "databricks_spark_version" "latest" {}
446+
data "databricks_node_type" "smallest" {
447+
local_disk = true
448+
}
449+
450+
resource "databricks_notebook" "this" {
451+
path = "${data.databricks_current_user.me.home}/Terraform{var.RANDOM}"
452+
language = "PYTHON"
453+
content_base64 = base64encode(<<-EOT
454+
# created from ${abspath(path.module)}
455+
display(spark.range(10))
456+
EOT
457+
)
458+
}
459+
460+
resource "databricks_cluster_policy" "this" {
461+
name = "test-policy-{var.RANDOM}"
462+
definition = jsonencode({
463+
"spark_version": {
464+
"type": "fixed",
465+
"value": "14.3.x-scala2.12"
466+
}
467+
})
468+
}
469+
470+
resource "databricks_job" "this" {
471+
name = "test-job-{var.RANDOM}"
472+
473+
job_cluster {
474+
job_cluster_key = "test-cluster"
475+
new_cluster {
476+
policy_id = databricks_cluster_policy.this.id
477+
apply_policy_default_values = true
478+
}
479+
}
480+
481+
task {
482+
task_key = "test-task"
483+
job_cluster_key = "test-cluster"
484+
notebook_task {
485+
notebook_path = databricks_notebook.this.path
486+
}
487+
}
488+
}
489+
`,
490+
})
491+
}

0 commit comments

Comments
 (0)