Slide 9
Slide 9 text
©2022 Databricks Inc. — All rights reserved 9
var jobSchema = common.StructToSchema(JobSettings{},
func(s map[string]*schema.Schema) map[string]*schema.Schema {
jobSettingsSchema(&s, "")
jobSettingsSchema(&s["task"].Elem.(*schema.Resource).Schema, "task.0.")
jobSettingsSchema(&s["job_cluster"].Elem.(*schema.Resource).Schema, "job_cluster.0.")
gitSourceSchema(s["git_source"].Elem.(*schema.Resource), "")
if p, err := common.SchemaPath(s, "schedule", "pause_status"); err == nil {
p.ValidateFunc = validation.StringInSlice([]string{"PAUSED", "UNPAUSED"}, false)
}
s["max_concurrent_runs"].ValidateDiagFunc = validation.ToDiagFunc(validation.IntAtLeast(1))
s["max_concurrent_runs"].Default = 1
s["url"] = &schema.Schema{
Type: schema.TypeString,
Computed: true,
}
s["always_running"] = &schema.Schema{
Optional: true,
Default: false,
Type: schema.TypeBool,
}
return s
})
// JobSettings contains the information for configuring a job on databricks
type JobSettings struct {
Name string `json:"name,omitempty" tf:"default:Untitled"`
// BEGIN Jobs API 2.0
ExistingClusterID string `json:"existing_cluster_id,omitempty" tf:"group:cluster_type"`
NewCluster *clusters.Cluster `json:"new_cluster,omitempty" tf:"group:cluster_type"`
NotebookTask *NotebookTask `json:"notebook_task,omitempty" tf:"group:task_type"`
SparkJarTask *SparkJarTask `json:"spark_jar_task,omitempty" tf:"group:task_type"`
SparkPythonTask *SparkPythonTask `json:"spark_python_task,omitempty" tf:"group:task_type"`
SparkSubmitTask *SparkSubmitTask `json:"spark_submit_task,omitempty" tf:"group:task_type"`
PipelineTask *PipelineTask `json:"pipeline_task,omitempty" tf:"group:task_type"`
PythonWheelTask *PythonWheelTask `json:"python_wheel_task,omitempty" tf:"group:task_type"`
Libraries []libraries.Library `json:"libraries,omitempty" tf:"slice_set,alias:library"`
TimeoutSeconds int32 `json:"timeout_seconds,omitempty"`
MaxRetries int32 `json:"max_retries,omitempty"`
MinRetryIntervalMillis int32 `json:"min_retry_interval_millis,omitempty"`
RetryOnTimeout bool `json:"retry_on_timeout,omitempty"`
// END Jobs API 2.0
// BEGIN Jobs API 2.1
Tasks []JobTaskSettings `json:"tasks,omitempty" tf:"alias:task"`
Format string `json:"format,omitempty" tf:"computed"`
JobClusters []JobCluster `json:"job_clusters,omitempty" tf:"alias:job_cluster"`
// END Jobs API 2.1
// BEGIN Jobs + Repo integration preview
GitSource *GitSource `json:"git_source,omitempty"`
// END Jobs + Repo integration preview
Schedule *CronSchedule `json:"schedule,omitempty"`
MaxConcurrentRuns int32 `json:"max_concurrent_runs,omitempty"`
EmailNotifications *EmailNotifications `json:"email_notifications,omitempty" tf:"suppress_diff"`
Tags map[string]string `json:"tags,omitempty"`
}
Reflection is not a
silver bullet
And reflected
schemas frequently
use customization