From ccd0ca4f28cb73d6a20a291a29876b257dc74e80 Mon Sep 17 00:00:00 2001 From: Benoit Perigaud <8754100+b-per@users.noreply.github.com> Date: Mon, 5 Feb 2024 09:15:02 +0100 Subject: [PATCH 1/4] Add `samber/lo` for useful functions --- go.mod | 1 + go.sum | 2 ++ 2 files changed, 3 insertions(+) diff --git a/go.mod b/go.mod index 75997d41..62d248a6 100644 --- a/go.mod +++ b/go.mod @@ -6,6 +6,7 @@ require ( github.com/hashicorp/terraform-plugin-docs v0.16.0 github.com/hashicorp/terraform-plugin-log v0.9.0 github.com/hashicorp/terraform-plugin-sdk/v2 v2.30.0 + github.com/samber/lo v1.39.0 ) require ( diff --git a/go.sum b/go.sum index 7f4eecfe..28b85128 100644 --- a/go.sum +++ b/go.sum @@ -168,6 +168,8 @@ github.com/rogpeppe/go-internal v1.6.1 h1:/FiVV8dS/e+YqF2JvO3yXRFbBLTIuSDkuC7aBO github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= github.com/russross/blackfriday v1.6.0 h1:KqfZb0pUVN2lYqZUYRddxF4OR8ZMURnJIG5Y3VRLtww= github.com/russross/blackfriday v1.6.0/go.mod h1:ti0ldHuxg49ri4ksnFxlkCfN+hvslNlmVHqNRXXJNAY= +github.com/samber/lo v1.39.0 h1:4gTz1wUhNYLhFSKl6O+8peW0v2F4BCY034GRpU9WnuA= +github.com/samber/lo v1.39.0/go.mod h1:+m/ZKRl6ClXCE2Lgf3MsQlWfh4bn1bz6CXEOxnEXnEA= github.com/sergi/go-diff v1.2.0 h1:XU+rvMAioB0UC3q1MFrIQy4Vo5/4VsRDQQXHsEya6xQ= github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= From 234024d49faac88fe33f9153d170c17927fcf26a Mon Sep 17 00:00:00 2001 From: Benoit Perigaud <8754100+b-per@users.noreply.github.com> Date: Mon, 5 Feb 2024 09:15:27 +0100 Subject: [PATCH 2/4] Add job chaining capabilities --- pkg/data_sources/job.go | 55 +++++++++++- pkg/data_sources/job_acceptance_test.go | 5 ++ pkg/dbt_cloud/job.go | 62 ++++++++++---- pkg/resources/job.go | 90 +++++++++++++++++++- pkg/resources/job_acceptance_test.go | 107 ++++++++++++++++++++++++ pkg/utils/job.go | 65 ++++++++++++++ 6 files changed, 361 insertions(+), 23 deletions(-) create mode 100644 pkg/utils/job.go diff --git a/pkg/data_sources/job.go b/pkg/data_sources/job.go index dc013330..f6fefbea 100644 --- a/pkg/data_sources/job.go +++ b/pkg/data_sources/job.go @@ -6,8 +6,10 @@ import ( "strconv" "github.com/dbt-labs/terraform-provider-dbtcloud/pkg/dbt_cloud" + "github.com/dbt-labs/terraform-provider-dbtcloud/pkg/utils" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/samber/lo" ) var jobSchema = map[string]*schema.Schema{ @@ -71,6 +73,31 @@ var jobSchema = map[string]*schema.Schema{ Computed: true, Description: "Whether the CI job should be automatically triggered on draft PRs", }, + "job_completion_trigger_condition": &schema.Schema{ + Type: schema.TypeSet, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "job_id": { + Type: schema.TypeInt, + Computed: true, + Description: "The ID of the job that would trigger this job after completion.", + }, + "project_id": { + Type: schema.TypeInt, + Computed: true, + Description: "The ID of the project where the trigger job is running in.", + }, + "statuses": { + Type: schema.TypeSet, + Elem: &schema.Schema{Type: schema.TypeString}, + Computed: true, + Description: "List of statuses to trigger the job on.", + }, + }, + }, + Description: "Which other job should trigger this job when it finishes, and on which conditions.", + }, } func DatasourceJob() *schema.Resource { @@ -80,7 +107,11 @@ func DatasourceJob() *schema.Resource { } } -func datasourceJobRead(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics { +func datasourceJobRead( + ctx context.Context, + d *schema.ResourceData, + m interface{}, +) diag.Diagnostics { c := m.(*dbt_cloud.Client) var diags diag.Diagnostics @@ -132,6 +163,28 @@ func datasourceJobRead(ctx context.Context, d *schema.ResourceData, m interface{ return diag.FromErr(err) } + if job.JobCompletionTrigger == nil { + if err := d.Set("job_completion_trigger_condition", nil); err != nil { + return diag.FromErr(err) + } + } else { + triggerCondition := job.JobCompletionTrigger.Condition + // we convert the statuses from ID to human-readable strings + statusesNames := lo.Map(triggerCondition.Statuses, func(status int, idx int) any { + return utils.JobCompletionTriggerConditionsMappingCodeHuman[status] + }) + triggerConditionMap := map[string]any{ + "job_id": triggerCondition.JobID, + "project_id": triggerCondition.ProjectID, + "statuses": statusesNames, + } + triggerConditionSet := utils.JobConditionMapToSet(triggerConditionMap) + + if err := d.Set("job_completion_trigger_condition", triggerConditionSet); err != nil { + return diag.FromErr(err) + } + } + d.SetId(jobId) return diags diff --git a/pkg/data_sources/job_acceptance_test.go b/pkg/data_sources/job_acceptance_test.go index a598ab45..23fa1620 100644 --- a/pkg/data_sources/job_acceptance_test.go +++ b/pkg/data_sources/job_acceptance_test.go @@ -21,6 +21,11 @@ func TestDbtCloudJobDataSource(t *testing.T) { resource.TestCheckResourceAttr("data.dbtcloud_job.test", "name", randomJobName), resource.TestCheckResourceAttr("data.dbtcloud_job.test", "timeout_seconds", "180"), resource.TestCheckResourceAttr("data.dbtcloud_job.test", "triggers_on_draft_pr", "false"), + resource.TestCheckResourceAttr( + "data.dbtcloud_job.test", + "job_completion_trigger_condition.#", + "0", + ), ) resource.ParallelTest(t, resource.TestCase{ diff --git a/pkg/dbt_cloud/job.go b/pkg/dbt_cloud/job.go index 60b29df0..10c91a08 100644 --- a/pkg/dbt_cloud/job.go +++ b/pkg/dbt_cloud/job.go @@ -47,25 +47,36 @@ type JobExecution struct { Timeout_Seconds int `json:"timeout_seconds"` } +type JobCompletionTrigger struct { + Condition JobCompletionTriggerCondition `json:"condition"` +} + +type JobCompletionTriggerCondition struct { + JobID int `json:"job_id"` + ProjectID int `json:"project_id"` + Statuses []int `json:"statuses"` +} + type Job struct { - ID *int `json:"id"` - Account_Id int `json:"account_id"` - Project_Id int `json:"project_id"` - Environment_Id int `json:"environment_id"` - Name string `json:"name"` - Description string `json:"description"` - Execute_Steps []string `json:"execute_steps"` - Dbt_Version *string `json:"dbt_version"` - Triggers JobTrigger `json:"triggers"` - Settings JobSettings `json:"settings"` - State int `json:"state"` - Generate_Docs bool `json:"generate_docs"` - Schedule JobSchedule `json:"schedule"` - Run_Generate_Sources bool `json:"run_generate_sources"` - Deferring_Job_Id *int `json:"deferring_job_definition_id"` - DeferringEnvironmentId *int `json:"deferring_environment_id"` - Execution JobExecution `json:"execution"` - TriggersOnDraftPR bool `json:"triggers_on_draft_pr"` + ID *int `json:"id"` + Account_Id int `json:"account_id"` + Project_Id int `json:"project_id"` + Environment_Id int `json:"environment_id"` + Name string `json:"name"` + Description string `json:"description"` + Execute_Steps []string `json:"execute_steps"` + Dbt_Version *string `json:"dbt_version"` + Triggers JobTrigger `json:"triggers"` + Settings JobSettings `json:"settings"` + State int `json:"state"` + Generate_Docs bool `json:"generate_docs"` + Schedule JobSchedule `json:"schedule"` + Run_Generate_Sources bool `json:"run_generate_sources"` + Deferring_Job_Id *int `json:"deferring_job_definition_id"` + DeferringEnvironmentId *int `json:"deferring_environment_id"` + Execution JobExecution `json:"execution"` + TriggersOnDraftPR bool `json:"triggers_on_draft_pr"` + JobCompletionTrigger *JobCompletionTrigger `json:"job_completion_trigger_condition"` } func (c *Client) GetJob(jobID string) (*Job, error) { @@ -115,6 +126,7 @@ func (c *Client) CreateJob( selfDeferring bool, timeoutSeconds int, triggersOnDraftPR bool, + jobCompletionTriggerCondition map[string]any, ) (*Job, error) { state := STATE_ACTIVE if !isActive { @@ -176,6 +188,19 @@ func (c *Client) CreateJob( Timeout_Seconds: timeoutSeconds, } + jobCompletionTrigger := &JobCompletionTrigger{} + if len(jobCompletionTriggerCondition) == 0 { + jobCompletionTrigger = nil + } else { + jobCompletionTrigger = &JobCompletionTrigger{ + Condition: JobCompletionTriggerCondition{ + JobID: jobCompletionTriggerCondition["job_id"].(int), + ProjectID: jobCompletionTriggerCondition["project_id"].(int), + Statuses: jobCompletionTriggerCondition["statuses"].([]int), + }, + } + } + newJob := Job{ Account_Id: c.AccountID, Project_Id: projectId, @@ -191,6 +216,7 @@ func (c *Client) CreateJob( Run_Generate_Sources: runGenerateSources, Execution: jobExecution, TriggersOnDraftPR: triggersOnDraftPR, + JobCompletionTrigger: jobCompletionTrigger, } if dbtVersion != "" { newJob.Dbt_Version = &dbtVersion diff --git a/pkg/resources/job.go b/pkg/resources/job.go index 8545656b..73a7fc27 100644 --- a/pkg/resources/job.go +++ b/pkg/resources/job.go @@ -8,9 +8,11 @@ import ( "strings" "github.com/dbt-labs/terraform-provider-dbtcloud/pkg/dbt_cloud" + "github.com/dbt-labs/terraform-provider-dbtcloud/pkg/utils" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" + "github.com/samber/lo" ) var ( @@ -61,7 +63,7 @@ var jobSchema = map[string]*schema.Schema{ Type: schema.TypeBool, Optional: true, Default: true, - Description: "Flag for whether the job is marked active or deleted", + Description: "Flag for whether the job is marked active or deleted. To create/keep a job in a 'deactivated' state, check the `triggers` config.", }, "triggers": &schema.Schema{ Type: schema.TypeMap, @@ -71,7 +73,7 @@ var jobSchema = map[string]*schema.Schema{ Optional: false, Default: false, }, - Description: "Flags for which types of triggers to use, possible values are `github_webhook`, `git_provider_webhook`, `schedule` and `custom_branch_only`.
`custom_branch_only` is only relevant for CI jobs triggered automatically on PR creation to only trigger a job on a PR to the custom branch of the environment.", + Description: "Flags for which types of triggers to use, the values are `github_webhook`, `git_provider_webhook`, `schedule` and `custom_branch_only`.
`custom_branch_only` is only relevant for CI jobs triggered automatically on PR creation to only trigger a job on a PR to the custom branch of the environment. To create a job in a 'deactivated' state, set all to `false`.", }, "num_threads": &schema.Schema{ Type: schema.TypeInt, @@ -95,7 +97,7 @@ var jobSchema = map[string]*schema.Schema{ Type: schema.TypeBool, Optional: true, Default: false, - Description: "Flag for whether the job should run generate sources", + Description: "Flag for whether the job should add a `dbt source freshness` step to the job. The difference between manually adding a step with `dbt source freshness` in the job steps or using this flag is that with this flag, a failed freshness will still allow the following steps to run.", }, "schedule_type": &schema.Schema{ Type: schema.TypeString, @@ -166,6 +168,33 @@ var jobSchema = map[string]*schema.Schema{ Default: false, Description: "Whether the CI job should be automatically triggered on draft PRs", }, + "job_completion_trigger_condition": &schema.Schema{ + Type: schema.TypeSet, + Optional: true, + // using a set or a list with 1 item is the way in the SDKv2 to define nested objects + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "job_id": { + Type: schema.TypeInt, + Required: true, + Description: "The ID of the job that would trigger this job after completion.", + }, + "project_id": { + Type: schema.TypeInt, + Required: true, + Description: "The ID of the project where the trigger job is running in.", + }, + "statuses": { + Type: schema.TypeSet, + Required: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: "List of statuses to trigger the job on. Possible values are `success`, `error` and `canceled`.", + }, + }, + }, + Description: "Which other job should trigger this job when it finishes, and on which conditions (sometimes referred as 'job chaining').", + }, } func ResourceJob() *schema.Resource { @@ -281,6 +310,27 @@ func resourceJobRead(ctx context.Context, d *schema.ResourceData, m interface{}) return diag.FromErr(err) } + if job.JobCompletionTrigger == nil { + if err := d.Set("job_completion_trigger_condition", nil); err != nil { + return diag.FromErr(err) + } + } else { + triggerCondition := job.JobCompletionTrigger.Condition + statusesNames := lo.Map(triggerCondition.Statuses, func(status int, idx int) any { + return utils.JobCompletionTriggerConditionsMappingCodeHuman[status] + }) + triggerConditionMap := map[string]any{ + "job_id": triggerCondition.JobID, + "project_id": triggerCondition.ProjectID, + "statuses": statusesNames, + } + triggerConditionSet := utils.JobConditionMapToSet(triggerConditionMap) + + if err := d.Set("job_completion_trigger_condition", triggerConditionSet); err != nil { + return diag.FromErr(err) + } + } + return diags } @@ -317,6 +367,18 @@ func resourceJobCreate( timeoutSeconds := d.Get("timeout_seconds").(int) triggersOnDraftPR := d.Get("triggers_on_draft_pr").(bool) + var jobCompletionTrigger map[string]any + empty, completionJobID, completionProjectID, completionStatuses := utils.ExtractJobConditionSet( + d, + ) + if !empty { + jobCompletionTrigger = map[string]any{ + "job_id": completionJobID, + "project_id": completionProjectID, + "statuses": completionStatuses, + } + } + steps := []string{} for _, step := range executeSteps { steps = append(steps, step.(string)) @@ -353,6 +415,7 @@ func resourceJobCreate( selfDeferring, timeoutSeconds, triggersOnDraftPR, + jobCompletionTrigger, ) if err != nil { return diag.FromErr(err) @@ -393,7 +456,8 @@ func resourceJobUpdate( d.HasChange("deferring_environment_id") || d.HasChange("self_deferring") || d.HasChange("timeout_seconds") || - d.HasChange("triggers_on_drat_pr") { + d.HasChange("triggers_on_drat_pr") || + d.HasChange("job_completion_trigger_condition") { job, err := c.GetJob(jobId) if err != nil { return diag.FromErr(err) @@ -537,6 +601,24 @@ func resourceJobUpdate( triggersOnDraftPR := d.Get("triggers_on_draft_pr").(bool) job.TriggersOnDraftPR = triggersOnDraftPR } + if d.HasChange("job_completion_trigger_condition") { + + empty, completionJobID, completionProjectID, completionStatuses := utils.ExtractJobConditionSet( + d, + ) + if empty { + job.JobCompletionTrigger = nil + } else { + jobCondTrigger := dbt_cloud.JobCompletionTrigger{ + Condition: dbt_cloud.JobCompletionTriggerCondition{ + JobID: completionJobID, + ProjectID: completionProjectID, + Statuses: completionStatuses, + }, + } + job.JobCompletionTrigger = &jobCondTrigger + } + } _, err = c.UpdateJob(jobId, *job) if err != nil { diff --git a/pkg/resources/job_acceptance_test.go b/pkg/resources/job_acceptance_test.go index 3cf8d289..51aff216 100644 --- a/pkg/resources/job_acceptance_test.go +++ b/pkg/resources/job_acceptance_test.go @@ -16,7 +16,10 @@ func TestAccDbtCloudJobResource(t *testing.T) { jobName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) jobName2 := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + // for deferral jobName3 := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + // for job chaining + jobName4 := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) projectName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) environmentName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) @@ -98,6 +101,42 @@ func TestAccDbtCloudJobResource(t *testing.T) { resource.TestCheckResourceAttrSet("dbtcloud_job.test_job", "generate_docs"), ), }, + // JOB CHAINING + { + Config: testAccDbtCloudJobResourceJobChaining( + jobName2, + projectName, + environmentName, + jobName4, + ), + Check: resource.ComposeTestCheckFunc( + testAccCheckDbtCloudJobExists("dbtcloud_job.test_job"), + testAccCheckDbtCloudJobExists("dbtcloud_job.test_job_4"), + resource.TestCheckResourceAttr( + "dbtcloud_job.test_job_4", + "job_completion_trigger_condition.#", + "1", + ), + resource.TestCheckResourceAttrSet( + "dbtcloud_job.test_job_4", + "job_completion_trigger_condition.0.job_id", + ), + resource.TestCheckResourceAttrSet( + "dbtcloud_job.test_job_4", + "job_completion_trigger_condition.0.project_id", + ), + resource.TestCheckTypeSetElemAttr( + "dbtcloud_job.test_job_4", + "job_completion_trigger_condition.0.statuses.*", + "error", + ), + resource.TestCheckTypeSetElemAttr( + "dbtcloud_job.test_job_4", + "job_completion_trigger_condition.0.statuses.*", + "success", + ), + ), + }, // DEFERRING JOBS (depends on whether DBT_LEGACY_JOB_DEFERRAL is set, e.g. whether the new CI is set) { Config: configDeferral, @@ -222,6 +261,74 @@ resource "dbtcloud_job" "test_job" { `, projectName, environmentName, DBT_CLOUD_VERSION, environmentName, DBT_CLOUD_VERSION, jobName, DBT_CLOUD_VERSION) } +func testAccDbtCloudJobResourceJobChaining( + jobName, projectName, environmentName, jobName4 string, +) string { + return fmt.Sprintf(` +resource "dbtcloud_project" "test_job_project" { + name = "%s" +} + +resource "dbtcloud_environment" "test_job_environment" { + project_id = dbtcloud_project.test_job_project.id + name = "%s" + dbt_version = "%s" + type = "development" +} + +resource "dbtcloud_environment" "test_job_environment_new" { + project_id = dbtcloud_project.test_job_project.id + name = "DEPL %s" + dbt_version = "%s" + type = "deployment" +} + +resource "dbtcloud_job" "test_job" { + name = "%s" + project_id = dbtcloud_project.test_job_project.id + environment_id = dbtcloud_environment.test_job_environment_new.environment_id + dbt_version = "%s" + execute_steps = [ + "dbt test" + ] + triggers = { + "github_webhook": false, + "git_provider_webhook": false, + "schedule": true, + "custom_branch_only": false, + } + is_active = true + num_threads = 37 + target_name = "test" + run_generate_sources = true + generate_docs = true + schedule_type = "every_day" + schedule_hours = [9, 17] + timeout_seconds = 180 +} + +resource "dbtcloud_job" "test_job_4" { + name = "%s" + project_id = dbtcloud_project.test_job_project.id + environment_id = dbtcloud_environment.test_job_environment.environment_id + execute_steps = [ + "dbt build +my_model" + ] + triggers = { + "github_webhook": false, + "git_provider_webhook": false, + "schedule": false, + "custom_branch_only": false, + } + job_completion_trigger_condition { + job_id = dbtcloud_job.test_job.id + project_id = dbtcloud_project.test_job_project.id + statuses = ["error", "success"] + } + } +`, projectName, environmentName, DBT_CLOUD_VERSION, environmentName, DBT_CLOUD_VERSION, jobName, DBT_CLOUD_VERSION, jobName4) +} + func testAccDbtCloudJobResourceDeferringConfig( jobName, jobName2, jobName3, projectName, environmentName string, deferring string, diff --git a/pkg/utils/job.go b/pkg/utils/job.go new file mode 100644 index 00000000..d5237d31 --- /dev/null +++ b/pkg/utils/job.go @@ -0,0 +1,65 @@ +package utils + +import ( + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/samber/lo" +) + +var ( + JobCompletionTriggerConditionsMappingCodeHuman = map[int]any{ + 10: "success", + 20: "error", + 30: "canceled", + } +) + +var JobCompletionTriggerConditionsMappingHumanCode = lo.Invert( + JobCompletionTriggerConditionsMappingCodeHuman, +) + +var objectSchema = map[string]*schema.Schema{ + "job_id": { + Type: schema.TypeInt, + }, + "project_id": { + Type: schema.TypeInt, + }, + "statuses": { + // we use TypeList here, just for moving from Map to Set + // the resource parameter itself is a TypeSet so that duplicates are removed and order doesn't matter + Type: schema.TypeList, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, +} + +func JobConditionMapToSet(item map[string]any) *schema.Set { + // The hash function helps identify unique items in the set + hashFunc := schema.HashResource(&schema.Resource{Schema: objectSchema}) + + // Create a slice of maps as required by schema.NewSet + items := []interface{}{item} + + return schema.NewSet(hashFunc, items) +} + +func ExtractJobConditionSet( + d *schema.ResourceData, +) (empty bool, jobID, projectID int, statuses []int) { + + if d.Get("job_completion_trigger_condition").(*schema.Set).Len() == 0 { + return true, 0, 0, []int{} + } else { + // this is a set but we only allow 1 item + jobCompletionTrigger := d.Get("job_completion_trigger_condition").(*schema.Set).List()[0].(map[string]any) + + jobCompletionStatuses := lo.Map( + jobCompletionTrigger["statuses"].(*schema.Set).List(), + func(status interface{}, idx int) int { + return JobCompletionTriggerConditionsMappingHumanCode[status.(string)] + }, + ) + return false, jobCompletionTrigger["job_id"].(int), jobCompletionTrigger["project_id"].(int), jobCompletionStatuses + } +} From 204acf8dde5654625641438c67343e04cf65ea23 Mon Sep 17 00:00:00 2001 From: Benoit Perigaud <8754100+b-per@users.noreply.github.com> Date: Mon, 5 Feb 2024 09:15:44 +0100 Subject: [PATCH 3/4] Update docs for jobs --- docs/data-sources/job.md | 10 +++++ docs/resources/job.md | 50 +++++++++++++++++++-- examples/resources/dbtcloud_job/resource.tf | 29 +++++++++++- templates/resources/job.md.tmpl | 6 +++ 4 files changed, 91 insertions(+), 4 deletions(-) diff --git a/docs/data-sources/job.md b/docs/data-sources/job.md index abb1a7f4..6d14b664 100644 --- a/docs/data-sources/job.md +++ b/docs/data-sources/job.md @@ -27,8 +27,18 @@ description: |- - `description` (String) Long description for the job - `environment_id` (Number) ID of the environment the job is in - `id` (String) The ID of this resource. +- `job_completion_trigger_condition` (Set of Object) Which other job should trigger this job when it finishes, and on which conditions. (see [below for nested schema](#nestedatt--job_completion_trigger_condition)) - `name` (String) Given name for the job - `self_deferring` (Boolean) Whether this job defers on a previous run of itself (overrides value in deferring_job_id) - `timeout_seconds` (Number) Number of seconds before the job times out - `triggers` (Map of Boolean) Flags for which types of triggers to use, keys of github_webhook, git_provider_webhook, schedule, custom_branch_only - `triggers_on_draft_pr` (Boolean) Whether the CI job should be automatically triggered on draft PRs + + +### Nested Schema for `job_completion_trigger_condition` + +Read-Only: + +- `job_id` (Number) +- `project_id` (Number) +- `statuses` (Set of String) diff --git a/docs/resources/job.md b/docs/resources/job.md index 2dd528f6..dc4a2cda 100644 --- a/docs/resources/job.md +++ b/docs/resources/job.md @@ -13,6 +13,13 @@ description: |- Those improvements include modifications to deferral which was historically set at the job level and will now be set at the environment level. Deferral can still be set to "self" by setting `self_deferring` to `true` but with the new approach, deferral to other runs need to be done with `deferring_environment_id` instead of `deferring_job_id`. + +~> As of beginning of February 2024, job chaining with `job_completion_trigger_condition` is in private beta and not available to all users. +
+
+This notice will be removed once the feature is generally available. + + ## Example Usage ```terraform @@ -71,6 +78,33 @@ resource "dbtcloud_job" "ci_job" { schedule_days = [0, 1, 2, 3, 4, 5, 6] schedule_type = "days_of_week" } + +# a job that is set to be triggered after another job finishes +# this is sometimes referred as 'job chaining' +resource "dbtcloud_job" "downstream_job" { + environment_id = dbtcloud_environment.project2_prod_environment.environment_id + execute_steps = [ + "dbt build -s +my_model" + ] + generate_docs = true + name = "Downstream job in project 2" + num_threads = 32 + project_id = dbtcloud_project.dbt_project2.id + run_generate_sources = true + triggers = { + "custom_branch_only" : false, + "github_webhook" : false, + "git_provider_webhook" : false, + "schedule" : false + } + schedule_days = [0, 1, 2, 3, 4, 5, 6] + schedule_type = "days_of_week" + job_completion_trigger_condition { + job_id = dbtcloud_job.daily_job.id + project_id = dbtcloud_project.dbt_project.id + statuses = ["success"] + } +} ``` @@ -82,7 +116,7 @@ resource "dbtcloud_job" "ci_job" { - `execute_steps` (List of String) List of commands to execute for the job - `name` (String) Job name - `project_id` (Number) Project ID to create the job in -- `triggers` (Map of Boolean) Flags for which types of triggers to use, possible values are `github_webhook`, `git_provider_webhook`, `schedule` and `custom_branch_only`.
`custom_branch_only` is only relevant for CI jobs triggered automatically on PR creation to only trigger a job on a PR to the custom branch of the environment. +- `triggers` (Map of Boolean) Flags for which types of triggers to use, possible values are `github_webhook`, `git_provider_webhook`, `schedule` and `custom_branch_only`.
`custom_branch_only` is only relevant for CI jobs triggered automatically on PR creation to only trigger a job on a PR to the custom branch of the environment. To create a job in a 'deactivated' state, set all to `false`. ### Optional @@ -91,9 +125,10 @@ resource "dbtcloud_job" "ci_job" { - `deferring_job_id` (Number) Job identifier that this job defers to (legacy deferring approach) - `description` (String) Description for the job - `generate_docs` (Boolean) Flag for whether the job should generate documentation -- `is_active` (Boolean) Flag for whether the job is marked active or deleted +- `is_active` (Boolean) Flag for whether the job is marked active or deleted. To create/keep a job in a 'deactivated' state, check the `triggers` config. +- `job_completion_trigger_condition` (Block Set, Max: 1) Which other job should trigger this job when it finishes, and on which conditions (sometimes referred as 'job chaining'). (see [below for nested schema](#nestedblock--job_completion_trigger_condition)) - `num_threads` (Number) Number of threads to use in the job -- `run_generate_sources` (Boolean) Flag for whether the job should run generate sources +- `run_generate_sources` (Boolean) Flag for whether the job should add a `dbt source freshness` step to the job. The difference between manually adding a step with `dbt source freshness` in the job steps or using this flag is that with this flag, a failed freshness will still allow the following steps to run. - `schedule_cron` (String) Custom cron expression for schedule - `schedule_days` (List of Number) List of days of week as numbers (0 = Sunday, 7 = Saturday) to execute the job at if running on a schedule - `schedule_hours` (List of Number) List of hours to execute the job at if running on a schedule @@ -108,6 +143,15 @@ resource "dbtcloud_job" "ci_job" { - `id` (String) The ID of this resource. + +### Nested Schema for `job_completion_trigger_condition` + +Required: + +- `job_id` (Number) The ID of the job that would trigger this job after completion. +- `project_id` (Number) The ID of the project where the trigger job is running in. +- `statuses` (Set of String) List of statuses to trigger the job on. Possible values are `success`, `error` and `canceled`. + ## Import Import is supported using the following syntax: diff --git a/examples/resources/dbtcloud_job/resource.tf b/examples/resources/dbtcloud_job/resource.tf index 0efa3738..c0f237f6 100644 --- a/examples/resources/dbtcloud_job/resource.tf +++ b/examples/resources/dbtcloud_job/resource.tf @@ -52,4 +52,31 @@ resource "dbtcloud_job" "ci_job" { # this is not going to be used when schedule is set to false schedule_days = [0, 1, 2, 3, 4, 5, 6] schedule_type = "days_of_week" -} \ No newline at end of file +} + +# a job that is set to be triggered after another job finishes +# this is sometimes referred as 'job chaining' +resource "dbtcloud_job" "downstream_job" { + environment_id = dbtcloud_environment.project2_prod_environment.environment_id + execute_steps = [ + "dbt build -s +my_model" + ] + generate_docs = true + name = "Downstream job in project 2" + num_threads = 32 + project_id = dbtcloud_project.dbt_project2.id + run_generate_sources = true + triggers = { + "custom_branch_only" : false, + "github_webhook" : false, + "git_provider_webhook" : false, + "schedule" : false + } + schedule_days = [0, 1, 2, 3, 4, 5, 6] + schedule_type = "days_of_week" + job_completion_trigger_condition { + job_id = dbtcloud_job.daily_job.id + project_id = dbtcloud_project.dbt_project.id + statuses = ["success"] + } +} diff --git a/templates/resources/job.md.tmpl b/templates/resources/job.md.tmpl index 79f7eb6c..e3539417 100644 --- a/templates/resources/job.md.tmpl +++ b/templates/resources/job.md.tmpl @@ -13,6 +13,12 @@ description: |- Those improvements include modifications to deferral which was historically set at the job level and will now be set at the environment level. Deferral can still be set to "self" by setting `self_deferring` to `true` but with the new approach, deferral to other runs need to be done with `deferring_environment_id` instead of `deferring_job_id`. +~> As of beginning of February 2024, job chaining with `job_completion_trigger_condition` is in private beta and not available to all users. +
+
+This notice will be removed once the feature is generally available. + + ## Example Usage {{ tffile (printf "%s%s%s" "examples/resources/" .Name "/resource.tf") }} From 49daa6c26fe130a7edd2c0d41ef941f087125fef Mon Sep 17 00:00:00 2001 From: Benoit Perigaud <8754100+b-per@users.noreply.github.com> Date: Mon, 5 Feb 2024 09:15:58 +0100 Subject: [PATCH 4/4] Changelog for new release --- CHANGELOG.md | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 66ed988d..af3d828e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,10 +2,19 @@ All notable changes to this project will be documented in this file. -## [Unreleased](https://github.com/dbt-labs/terraform-provider-dbtcloud/compare/v0.2.19...HEAD) +## [Unreleased](https://github.com/dbt-labs/terraform-provider-dbtcloud/compare/v0.2.20...HEAD) +## [0.2.20](https://github.com/dbt-labs/terraform-provider-dbtcloud/compare/v0.2.19...v0.2.20) -## [0.2.18](https://github.com/dbt-labs/terraform-provider-dbtcloud/compare/v0.2.18...v0.2.19) +## Changes + +- Add support for job chaining and `job_completion_trigger_condition` (feature is in closed Beta in dbt Cloud as of 5 FEB 2024) + +## Documentations + +- Improve docs for jobs + +## [0.2.19](https://github.com/dbt-labs/terraform-provider-dbtcloud/compare/v0.2.18...v0.2.19) ## Changes