From 94c179b31611be0bd4d7ac337129772150a64aed Mon Sep 17 00:00:00 2001 From: maastha <122359335+maastha@users.noreply.github.com> Date: Fri, 28 Apr 2023 08:50:35 +0100 Subject: [PATCH 01/13] fix: microsoft_teams_webhook_url keeps updating on every apply (#1148) --- .../data_source_mongodbatlas_third_party_integrations.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongodbatlas/data_source_mongodbatlas_third_party_integrations.go b/mongodbatlas/data_source_mongodbatlas_third_party_integrations.go index 4a368f368c..698d318b01 100644 --- a/mongodbatlas/data_source_mongodbatlas_third_party_integrations.go +++ b/mongodbatlas/data_source_mongodbatlas_third_party_integrations.go @@ -115,7 +115,7 @@ func integrationToSchema(d *schema.ResourceData, integration *matlas.ThirdPartyI "org_name": integration.OrgName, "url": integrationSchema.URL, "secret": integrationSchema.Secret, - "microsoft_teams_webhook_url": integration.MicrosoftTeamsWebhookURL, + "microsoft_teams_webhook_url": integrationSchema.MicrosoftTeamsWebhookURL, "user_name": integrationSchema.UserName, "password": integrationSchema.Password, "service_discovery": integration.ServiceDiscovery, From 6a1b0c15821267d77d5890c1650985906429afd1 Mon Sep 17 00:00:00 2001 From: Andrea Angiolillo Date: Wed, 3 May 2023 08:20:03 +0100 Subject: [PATCH 02/13] INTMDB-783: Point in Time Restore is not enabled when should_copy_oplogs is set to true, when copying backups to other regions (#1150) --- .../resource_mongodbatlas_cloud_backup_schedule.go | 1 + .../resource_mongodbatlas_cloud_backup_schedule_test.go | 8 +++++--- website/docs/d/cloud_backup_schedule.html.markdown | 1 - 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/mongodbatlas/resource_mongodbatlas_cloud_backup_schedule.go b/mongodbatlas/resource_mongodbatlas_cloud_backup_schedule.go index 014f1b0b87..e3523ce440 100644 --- a/mongodbatlas/resource_mongodbatlas_cloud_backup_schedule.go +++ b/mongodbatlas/resource_mongodbatlas_cloud_backup_schedule.go @@ -597,6 +597,7 @@ func expandCopySetting(tfMap map[string]interface{}) *matlas.CopySetting { Frequencies: expandStringList(tfMap["frequencies"].(*schema.Set).List()), RegionName: pointy.String(tfMap["region_name"].(string)), ReplicationSpecID: pointy.String(tfMap["replication_spec_id"].(string)), + ShouldCopyOplogs: pointy.Bool(tfMap["should_copy_oplogs"].(bool)), } return copySetting } diff --git a/mongodbatlas/resource_mongodbatlas_cloud_backup_schedule_test.go b/mongodbatlas/resource_mongodbatlas_cloud_backup_schedule_test.go index 868b83769e..28f6ec5a50 100644 --- a/mongodbatlas/resource_mongodbatlas_cloud_backup_schedule_test.go +++ b/mongodbatlas/resource_mongodbatlas_cloud_backup_schedule_test.go @@ -237,7 +237,7 @@ func TestAccBackupRSCloudBackupSchedule_copySettings(t *testing.T) { Config: testAccMongoDBAtlasCloudBackupScheduleCopySettingsConfig(projectID, clusterName, &matlas.CloudProviderSnapshotBackupPolicy{ ReferenceHourOfDay: pointy.Int64(3), ReferenceMinuteOfHour: pointy.Int64(45), - RestoreWindowDays: pointy.Int64(4), + RestoreWindowDays: pointy.Int64(1), }), Check: resource.ComposeTestCheckFunc( testAccCheckMongoDBAtlasCloudBackupScheduleExists(resourceName), @@ -245,7 +245,7 @@ func TestAccBackupRSCloudBackupSchedule_copySettings(t *testing.T) { resource.TestCheckResourceAttr(resourceName, "cluster_name", clusterName), resource.TestCheckResourceAttr(resourceName, "reference_hour_of_day", "3"), resource.TestCheckResourceAttr(resourceName, "reference_minute_of_hour", "45"), - resource.TestCheckResourceAttr(resourceName, "restore_window_days", "4"), + resource.TestCheckResourceAttr(resourceName, "restore_window_days", "1"), resource.TestCheckResourceAttr(resourceName, "policy_item_hourly.#", "1"), resource.TestCheckResourceAttr(resourceName, "policy_item_daily.#", "1"), resource.TestCheckResourceAttr(resourceName, "policy_item_weekly.#", "1"), @@ -264,6 +264,7 @@ func TestAccBackupRSCloudBackupSchedule_copySettings(t *testing.T) { resource.TestCheckResourceAttr(resourceName, "policy_item_monthly.0.retention_value", "4"), resource.TestCheckResourceAttr(resourceName, "copy_settings.0.cloud_provider", "AWS"), resource.TestCheckResourceAttr(resourceName, "copy_settings.0.region_name", "US_EAST_1"), + resource.TestCheckResourceAttr(resourceName, "copy_settings.0.should_copy_oplogs", "true"), ), }, }, @@ -515,6 +516,7 @@ func testAccMongoDBAtlasCloudBackupScheduleCopySettingsConfig(projectID, cluster provider_region_name = "US_EAST_2" provider_instance_size_name = "M10" cloud_backup = true //enable cloud provider snapshots + pit_enabled = true // enable point in time restore. you cannot copy oplogs when pit is not enabled. } resource "mongodbatlas_cloud_backup_schedule" "schedule_test" { @@ -554,7 +556,7 @@ func testAccMongoDBAtlasCloudBackupScheduleCopySettingsConfig(projectID, cluster "ON_DEMAND"] region_name = "US_EAST_1" replication_spec_id = mongodbatlas_cluster.my_cluster.replication_specs.*.id[0] - should_copy_oplogs = false + should_copy_oplogs = true } } `, projectID, clusterName, *p.ReferenceHourOfDay, *p.ReferenceMinuteOfHour, *p.RestoreWindowDays) diff --git a/website/docs/d/cloud_backup_schedule.html.markdown b/website/docs/d/cloud_backup_schedule.html.markdown index 9a6bf1100d..dfc95d707c 100644 --- a/website/docs/d/cloud_backup_schedule.html.markdown +++ b/website/docs/d/cloud_backup_schedule.html.markdown @@ -100,7 +100,6 @@ In addition to all arguments above, the following attributes are exported: * `retention_value` - Value to associate with `retention_unit`. Monthly policy must have retention days of at least 31 days or 5 weeks or 1 month. Note that for less frequent policy items, Atlas requires that you specify a retention period greater than or equal to the retention period specified for more frequent policy items. For example: If the weekly policy item specifies a retention of two weeks, the montly retention policy must specify two weeks or greater. ### Snapshot Distribution -* * `cloud_provider` - Human-readable label that identifies the cloud provider that stores the snapshot copy. i.e. "AWS" "AZURE" "GCP" * `frequencies` - List that describes which types of snapshots to copy. i.e. "HOURLY" "DAILY" "WEEKLY" "MONTHLY" "ON_DEMAND" * `region_name` - Target region to copy snapshots belonging to replicationSpecId to. Please supply the 'Atlas Region' which can be found under https://www.mongodb.com/docs/atlas/reference/cloud-providers/ 'regions' link From 4f153208273e3a18c9eb305a9247d49f77f73301 Mon Sep 17 00:00:00 2001 From: andreaangiolillo Date: Fri, 19 May 2023 13:26:05 +0100 Subject: [PATCH 03/13] INTMDB-805: [Terraform] Create new TF Data Lake Pipelines Run data sources only (not resource) --- go.mod | 2 +- go.sum | 2 + ...ongodbatlas_data_lake_pipeline_run_test.go | 48 ++++++ .../data_source_mongodbatlas_data_lake_run.go | 160 ++++++++++++++++++ mongodbatlas/provider.go | 1 + mongodbatlas/provider_test.go | 10 ++ 6 files changed, 222 insertions(+), 1 deletion(-) create mode 100644 mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run_test.go create mode 100644 mongodbatlas/data_source_mongodbatlas_data_lake_run.go diff --git a/go.mod b/go.mod index d9c5ee81e0..2d1a357b79 100644 --- a/go.mod +++ b/go.mod @@ -13,7 +13,7 @@ require ( github.com/mwielbut/pointy v1.1.0 github.com/spf13/cast v1.5.0 github.com/zclconf/go-cty v1.13.1 - go.mongodb.org/atlas v0.26.1-0.20230515144022-94881a2555b8 + go.mongodb.org/atlas v0.26.2-0.20230519092542-835012b2f90f go.mongodb.org/realm v0.1.0 golang.org/x/exp v0.0.0-20221208152030-732eee02a75a ) diff --git a/go.sum b/go.sum index 0abcd37793..6a8094fb7a 100644 --- a/go.sum +++ b/go.sum @@ -484,6 +484,8 @@ github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b/go.mod h1:ZRK go.mongodb.org/atlas v0.12.0/go.mod h1:wVCnHcm/7/IfTjEB6K8K35PLG70yGz8BdkRwX0oK9/M= go.mongodb.org/atlas v0.26.1-0.20230515144022-94881a2555b8 h1:8yKkk+xu/h5Wx5wDTSRNzv/NGV9T6dQiMs+RQz+MFZA= go.mongodb.org/atlas v0.26.1-0.20230515144022-94881a2555b8/go.mod h1:L4BKwVx/OeEhOVjCSdgo90KJm4469iv7ZLzQms/EPTg= +go.mongodb.org/atlas v0.26.2-0.20230519092542-835012b2f90f h1:y/jKdhJzii7Qt/q6umK924FVQa8KP5E3laQAqG44BsE= +go.mongodb.org/atlas v0.26.2-0.20230519092542-835012b2f90f/go.mod h1:L4BKwVx/OeEhOVjCSdgo90KJm4469iv7ZLzQms/EPTg= go.mongodb.org/realm v0.1.0 h1:zJiXyLaZrznQ+Pz947ziSrDKUep39DO4SfA0Fzx8M4M= go.mongodb.org/realm v0.1.0/go.mod h1:4Vj6iy+Puo1TDERcoh4XZ+pjtwbOzPpzqy3Cwe8ZmDM= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= diff --git a/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run_test.go b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run_test.go new file mode 100644 index 0000000000..ed42114a1e --- /dev/null +++ b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run_test.go @@ -0,0 +1,48 @@ +package mongodbatlas + +import ( + "fmt" + "os" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccBackupDSDataLakePipelineRun_basic(t *testing.T) { + testCheckDataLakePipelineRun(t) + var ( + dataSourceName = "data.mongodbatlas_data_lake_pipeline_run.test" + projectID = os.Getenv("MONGODB_ATLAS_PROJECT_ID") + pipelineName = os.Getenv("MONGODB_ATLAS_DATA_LAKE_PIPELINE_NAME") + runID = os.Getenv("MONGODB_ATLAS_DATA_LAKE_PIPELINE_RUN_ID") + + // projectID = "63f4d4a47baeac59406dc131" + // pipelineName = "sample_guides.planets" + // runID = "6467558d70fc1a140034adf0" + ) + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProviderFactories: testAccProviderFactories, + Steps: []resource.TestStep{ + { + Config: testAccMongoDBAtlasDataLakeDataSourcePipelineRunConfig(projectID, pipelineName, runID), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet(dataSourceName, "project_id"), + resource.TestCheckResourceAttr(dataSourceName, "name", pipelineName), + ), + }, + }, + }) +} + +func testAccMongoDBAtlasDataLakeDataSourcePipelineRunConfig(projectID, pipelineName, runID string) string { + return fmt.Sprintf(` + +data "mongodbatlas_data_lake_pipeline_run" "test" { + project_id = %[1]q + name = %[2]q + pipeline_run_id = %[3]q +} + `, projectID, pipelineName, runID) +} diff --git a/mongodbatlas/data_source_mongodbatlas_data_lake_run.go b/mongodbatlas/data_source_mongodbatlas_data_lake_run.go new file mode 100644 index 0000000000..e463d9a3cc --- /dev/null +++ b/mongodbatlas/data_source_mongodbatlas_data_lake_run.go @@ -0,0 +1,160 @@ +package mongodbatlas + +import ( + "context" + "fmt" + "net/http" + + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + matlas "go.mongodb.org/atlas/mongodbatlas" +) + +const errorDataLakePipelineRunRead = "error reading MongoDB Atlas DataLake Run (%s): %s" + +func dataSourceMongoDBAtlasDataLakePipelineRun() *schema.Resource { + return &schema.Resource{ + ReadContext: dataSourceMongoDBAtlasDataLakeRunRead, + Schema: map[string]*schema.Schema{ + "project_id": { + Type: schema.TypeString, + Required: true, + }, + "name": { + Type: schema.TypeString, + Required: true, + }, + "pipeline_run_id": { + Type: schema.TypeString, + Required: true, + }, + "id": { + Type: schema.TypeString, + Computed: true, + }, + "created_date": { + Type: schema.TypeString, + Computed: true, + }, + "last_updated_date": { + Type: schema.TypeString, + Computed: true, + }, + "state": { + Type: schema.TypeString, + Computed: true, + }, + "phase": { + Type: schema.TypeString, + Computed: true, + }, + "pipeline_id": { + Type: schema.TypeString, + Computed: true, + }, + "snapshot_id": { + Type: schema.TypeString, + Computed: true, + }, + "backup_frequency_type": { + Type: schema.TypeString, + Computed: true, + }, + "stats": { + Type: schema.TypeList, + Computed: true, + MaxItems: 0, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "bytes_exported": { + Type: schema.TypeInt, + Computed: true, + }, + "num_docs": { + Type: schema.TypeInt, + Computed: true, + }, + }, + }, + }, + }, + } +} + +func dataSourceMongoDBAtlasDataLakeRunRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { + conn := meta.(*MongoDBClient).Atlas + projectID := d.Get("project_id").(string) + name := d.Get("name").(string) + pipelineRunID := d.Get("pipeline_run_id").(string) + + dataLakeRun, resp, err := conn.DataLakePipeline.GetRun(ctx, projectID, name, pipelineRunID) + if err != nil { + if resp != nil && resp.StatusCode == http.StatusNotFound { + d.SetId("") + return nil + } + + return diag.FromErr(fmt.Errorf(errorDataLakePipelineRunRead, name, err)) + } + + if err := d.Set("id", dataLakeRun.ID); err != nil { + return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "hostnames", name, err)) + } + + if err := d.Set("project_id", dataLakeRun.GroupID); err != nil { + return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "state", name, err)) + } + + if err := d.Set("created_date", dataLakeRun.CreatedDate); err != nil { + return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "storage_databases", name, err)) + } + + if err := d.Set("last_updated_date", dataLakeRun.LastUpdatedDate); err != nil { + return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "storage_databases", name, err)) + } + + if err := d.Set("state", dataLakeRun.State); err != nil { + return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "storage_databases", name, err)) + } + + if err := d.Set("phase", dataLakeRun.Phase); err != nil { + return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "storage_databases", name, err)) + } + + if err := d.Set("pipeline_id", dataLakeRun.PipelineID); err != nil { + return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "storage_stores", name, err)) + } + + if err := d.Set("snapshot_id", dataLakeRun.SnapshotID); err != nil { + return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "storage_stores", name, err)) + } + + if err := d.Set("backup_frequency_type", dataLakeRun.BackupFrequencyType); err != nil { + return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "storage_stores", name, err)) + } + + if err := d.Set("stats", flattenDataLakePipelineRunStats(dataLakeRun.Stats)); err != nil { + return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "storage_stores", name, err)) + } + + d.SetId(encodeStateID(map[string]string{ + "project_id": projectID, + "name": name, + "pipeline_run_id": pipelineRunID, + })) + + return nil +} + +func flattenDataLakePipelineRunStats(datalakeRunStats *matlas.DataLakePipelineRunStats) []map[string]interface{} { + if datalakeRunStats == nil { + return nil + } + + maps := make([]map[string]interface{}, 1) + maps[0] = map[string]interface{}{ + "bytes_exported": datalakeRunStats.BytesExported, + "num_docs": datalakeRunStats.NumDocs, + } + return maps +} diff --git a/mongodbatlas/provider.go b/mongodbatlas/provider.go index 88a3cda1dc..a392307821 100644 --- a/mongodbatlas/provider.go +++ b/mongodbatlas/provider.go @@ -201,6 +201,7 @@ func getDataSourcesMap() map[string]*schema.Resource { "mongodbatlas_search_indexes": dataSourceMongoDBAtlasSearchIndexes(), "mongodbatlas_data_lake": dataSourceMongoDBAtlasDataLake(), "mongodbatlas_data_lakes": dataSourceMongoDBAtlasDataLakes(), + "mongodbatlas_data_lake_pipeline_run": dataSourceMongoDBAtlasDataLakePipelineRun(), "mongodbatlas_event_trigger": dataSourceMongoDBAtlasEventTrigger(), "mongodbatlas_event_triggers": dataSourceMongoDBAtlasEventTriggers(), "mongodbatlas_project_invitation": dataSourceMongoDBAtlasProjectInvitation(), diff --git a/mongodbatlas/provider_test.go b/mongodbatlas/provider_test.go index 429f459c19..6f912a2ca7 100644 --- a/mongodbatlas/provider_test.go +++ b/mongodbatlas/provider_test.go @@ -179,6 +179,16 @@ func SkipTestExtCred(tb testing.TB) { } } +func testCheckDataLakePipelineRun(tb testing.TB) { + if os.Getenv("MONGODB_ATLAS_DATA_LAKE_PIPELINE_NAME") == "" { + tb.Skip("`MONGODB_ATLAS_DATA_LAKE_PIPELINE_NAME` must be set for Projects acceptance testing") + } + + if os.Getenv("MONGODB_ATLAS_DATA_LAKE_PIPELINE_RUN_ID") == "" { + tb.Skip("`MONGODB_ATLAS_DATA_LAKE_PIPELINE_RUN_ID` must be set for Projects acceptance testing") + } +} + func testCheckTeamsIds(tb testing.TB) { if os.Getenv("MONGODB_ATLAS_TEAMS_IDS") == "" { tb.Skip("`MONGODB_ATLAS_TEAMS_IDS` must be set for Projects acceptance testing") From d0af34897d45ac1f455b87dfb84e2b39c9b80cd9 Mon Sep 17 00:00:00 2001 From: andreaangiolillo Date: Fri, 19 May 2023 13:26:51 +0100 Subject: [PATCH 04/13] Create data_source_mongodbatlas_data_lakes2_run.go --- ...ata_source_mongodbatlas_data_lakes2_run.go | 134 ++++++++++++++++++ 1 file changed, 134 insertions(+) create mode 100644 mongodbatlas/data_source_mongodbatlas_data_lakes2_run.go diff --git a/mongodbatlas/data_source_mongodbatlas_data_lakes2_run.go b/mongodbatlas/data_source_mongodbatlas_data_lakes2_run.go new file mode 100644 index 0000000000..b45e6a31a7 --- /dev/null +++ b/mongodbatlas/data_source_mongodbatlas_data_lakes2_run.go @@ -0,0 +1,134 @@ +package mongodbatlas + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + matlas "go.mongodb.org/atlas/mongodbatlas" +) + +const errorDataLakePipelineRunList = "error reading MongoDB Atlas DataLake Runs (%s): %s" + +func dataSourceMongoDBAtlasDataLakePipelineRuns() *schema.Resource { + return &schema.Resource{ + ReadContext: dataSourceMongoDBAtlasDataLakeRunsRead, + Schema: map[string]*schema.Schema{ + "project_id": { + Type: schema.TypeString, + Required: true, + }, + "name": { + Type: schema.TypeString, + Required: true, + }, + "results": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "pipeline_run_id": { + Type: schema.TypeString, + Required: true, + }, + "id": { + Type: schema.TypeString, + Computed: true, + }, + "created_date": { + Type: schema.TypeString, + Computed: true, + }, + "last_updated_date": { + Type: schema.TypeString, + Computed: true, + }, + "state": { + Type: schema.TypeString, + Computed: true, + }, + "phase": { + Type: schema.TypeString, + Computed: true, + }, + "pipeline_id": { + Type: schema.TypeString, + Computed: true, + }, + "snapshot_id": { + Type: schema.TypeString, + Computed: true, + }, + "backup_frequency_type": { + Type: schema.TypeString, + Computed: true, + }, + "stats": { + Type: schema.TypeList, + Computed: true, + MaxItems: 0, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "bytes_exported": { + Type: schema.TypeInt, + Computed: true, + }, + "num_docs": { + Type: schema.TypeInt, + Computed: true, + }, + }, + }, + + }, + }, + }, + }, + }, + } +} + +func dataSourceMongoDBAtlasDataLakeRunsRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { + conn := meta.(*MongoDBClient).Atlas + projectID := d.Get("project_id").(string) + name := d.Get("name").(string) + + dataLakeRuns, _, err := conn.DataLakePipeline.ListRuns(ctx, projectID, name) + return diag.FromErr(fmt.Errorf(errorDataLakePipelineRunList, name, err)) + + if err := d.Set("results", flattenDataLakePipelineRunResult(dataLakeRuns.Results)); err != nil { + return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "results", projectID, err)) + } + + d.SetId(resource.UniqueId()) + + return nil +} + + +func flattenDataLakePipelineRunResult(datalakePipelineRuns []*matlas.DataLakePipelineRun) []map[string]interface{} { + var results []map[string]interface{} + + if len(datalakePipelineRuns) == 0 { + return results + } + + results = make([]map[string]interface{}, len(datalakePipelineRuns)) + + for k, run := range datalakePipelineRuns { + results[k] = map[string]interface{}{ + "id": run.ID, + "created_date": run.CreatedDate, + "last_updated_date": run.LastUpdatedDate, + "state": run.State, + "pipeline_id": run.PipelineID, + "snapshot_id": run.SnapshotID, + "backup_frequency_type": run.BackupFrequencyType, + "stats": run.Stats, + } + } + + return results +} From 6b6bf2b900cbc4a2328b74317c1cccf58f7b5513 Mon Sep 17 00:00:00 2001 From: andreaangiolillo Date: Fri, 19 May 2023 13:37:27 +0100 Subject: [PATCH 05/13] added data source --- ...ce_mongodbatlas_data_lake_pipeline_run.go} | 0 ...e_mongodbatlas_data_lake_pipeline_runs.go} | 26 +++++------ ...ngodbatlas_data_lake_pipeline_runs_test.go | 46 +++++++++++++++++++ mongodbatlas/provider.go | 1 + mongodbatlas/provider_test.go | 11 +++-- 5 files changed, 67 insertions(+), 17 deletions(-) rename mongodbatlas/{data_source_mongodbatlas_data_lake_run.go => data_source_mongodbatlas_data_lake_pipeline_run.go} (100%) rename mongodbatlas/{data_source_mongodbatlas_data_lakes2_run.go => data_source_mongodbatlas_data_lake_pipeline_runs.go} (82%) create mode 100644 mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_runs_test.go diff --git a/mongodbatlas/data_source_mongodbatlas_data_lake_run.go b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run.go similarity index 100% rename from mongodbatlas/data_source_mongodbatlas_data_lake_run.go rename to mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run.go diff --git a/mongodbatlas/data_source_mongodbatlas_data_lakes2_run.go b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_runs.go similarity index 82% rename from mongodbatlas/data_source_mongodbatlas_data_lakes2_run.go rename to mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_runs.go index b45e6a31a7..f3ea58d98c 100644 --- a/mongodbatlas/data_source_mongodbatlas_data_lakes2_run.go +++ b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_runs.go @@ -5,7 +5,7 @@ import ( "fmt" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/id" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" matlas "go.mongodb.org/atlas/mongodbatlas" ) @@ -81,7 +81,6 @@ func dataSourceMongoDBAtlasDataLakePipelineRuns() *schema.Resource { }, }, }, - }, }, }, @@ -96,18 +95,19 @@ func dataSourceMongoDBAtlasDataLakeRunsRead(ctx context.Context, d *schema.Resou name := d.Get("name").(string) dataLakeRuns, _, err := conn.DataLakePipeline.ListRuns(ctx, projectID, name) - return diag.FromErr(fmt.Errorf(errorDataLakePipelineRunList, name, err)) + if err != nil { + return diag.FromErr(fmt.Errorf(errorDataLakePipelineRunList, projectID, err)) + } if err := d.Set("results", flattenDataLakePipelineRunResult(dataLakeRuns.Results)); err != nil { return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "results", projectID, err)) } - d.SetId(resource.UniqueId()) + d.SetId(id.UniqueId()) return nil } - func flattenDataLakePipelineRunResult(datalakePipelineRuns []*matlas.DataLakePipelineRun) []map[string]interface{} { var results []map[string]interface{} @@ -119,14 +119,14 @@ func flattenDataLakePipelineRunResult(datalakePipelineRuns []*matlas.DataLakePip for k, run := range datalakePipelineRuns { results[k] = map[string]interface{}{ - "id": run.ID, - "created_date": run.CreatedDate, - "last_updated_date": run.LastUpdatedDate, - "state": run.State, - "pipeline_id": run.PipelineID, - "snapshot_id": run.SnapshotID, - "backup_frequency_type": run.BackupFrequencyType, - "stats": run.Stats, + "id": run.ID, + "created_date": run.CreatedDate, + "last_updated_date": run.LastUpdatedDate, + "state": run.State, + "pipeline_id": run.PipelineID, + "snapshot_id": run.SnapshotID, + "backup_frequency_type": run.BackupFrequencyType, + "stats": flattenDataLakePipelineRunStats(run.Stats), } } diff --git a/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_runs_test.go b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_runs_test.go new file mode 100644 index 0000000000..b7f7985d76 --- /dev/null +++ b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_runs_test.go @@ -0,0 +1,46 @@ +package mongodbatlas + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccBackupDSDataLakePipelineRuns_basic(t *testing.T) { + // testCheckDataLakePipelineRuns(t) + var ( + dataSourceName = "data.mongodbatlas_data_lake_pipeline_runs.test" + // projectID = os.Getenv("MONGODB_ATLAS_PROJECT_ID") + // pipelineName = os.Getenv("MONGODB_ATLAS_DATA_LAKE_PIPELINE_NAME") + + projectID = "63f4d4a47baeac59406dc131" + pipelineName = "sample_guides.planets" + // runID = "6467558d70fc1a140034adf0" + ) + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProviderFactories: testAccProviderFactories, + Steps: []resource.TestStep{ + { + Config: testAccMongoDBAtlasDataLakeDataSourcePipelineRunsConfig(projectID, pipelineName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet(dataSourceName, "project_id"), + resource.TestCheckResourceAttr(dataSourceName, "name", pipelineName), + resource.TestCheckResourceAttrSet(dataSourceName, "results.#"), + ), + }, + }, + }) +} + +func testAccMongoDBAtlasDataLakeDataSourcePipelineRunsConfig(projectID, pipelineName string) string { + return fmt.Sprintf(` + +data "mongodbatlas_data_lake_pipeline_runs" "test" { + project_id = %[1]q + name = %[2]q +} + `, projectID, pipelineName) +} diff --git a/mongodbatlas/provider.go b/mongodbatlas/provider.go index a392307821..16fcaa06f2 100644 --- a/mongodbatlas/provider.go +++ b/mongodbatlas/provider.go @@ -202,6 +202,7 @@ func getDataSourcesMap() map[string]*schema.Resource { "mongodbatlas_data_lake": dataSourceMongoDBAtlasDataLake(), "mongodbatlas_data_lakes": dataSourceMongoDBAtlasDataLakes(), "mongodbatlas_data_lake_pipeline_run": dataSourceMongoDBAtlasDataLakePipelineRun(), + "mongodbatlas_data_lake_pipeline_runs": dataSourceMongoDBAtlasDataLakePipelineRuns(), "mongodbatlas_event_trigger": dataSourceMongoDBAtlasEventTrigger(), "mongodbatlas_event_triggers": dataSourceMongoDBAtlasEventTriggers(), "mongodbatlas_project_invitation": dataSourceMongoDBAtlasProjectInvitation(), diff --git a/mongodbatlas/provider_test.go b/mongodbatlas/provider_test.go index 6f912a2ca7..0335be2f92 100644 --- a/mongodbatlas/provider_test.go +++ b/mongodbatlas/provider_test.go @@ -180,13 +180,16 @@ func SkipTestExtCred(tb testing.TB) { } func testCheckDataLakePipelineRun(tb testing.TB) { - if os.Getenv("MONGODB_ATLAS_DATA_LAKE_PIPELINE_NAME") == "" { - tb.Skip("`MONGODB_ATLAS_DATA_LAKE_PIPELINE_NAME` must be set for Projects acceptance testing") - } - if os.Getenv("MONGODB_ATLAS_DATA_LAKE_PIPELINE_RUN_ID") == "" { tb.Skip("`MONGODB_ATLAS_DATA_LAKE_PIPELINE_RUN_ID` must be set for Projects acceptance testing") } + testCheckDataLakePipelineRuns(tb) +} + +func testCheckDataLakePipelineRuns(tb testing.TB) { + if os.Getenv("MONGODB_ATLAS_DATA_LAKE_PIPELINE_NAME") == "" { + tb.Skip("`MONGODB_ATLAS_DATA_LAKE_PIPELINE_NAME` must be set for Projects acceptance testing") + } } func testCheckTeamsIds(tb testing.TB) { From 074db58faf7907a3f5e3e0a8ca1119e6b2db749f Mon Sep 17 00:00:00 2001 From: andreaangiolillo Date: Fri, 19 May 2023 14:00:48 +0100 Subject: [PATCH 06/13] Added documentation --- ...rce_mongodbatlas_data_lake_pipeline_run.go | 8 +++ ...ongodbatlas_data_lake_pipeline_run_test.go | 6 +-- ...ngodbatlas_data_lake_pipeline_runs_test.go | 11 ++--- .../d/data_lake_pipeline_run.html.markdown | 49 +++++++++++++++++++ .../d/data_lake_pipeline_runs.html.markdown | 49 +++++++++++++++++++ 5 files changed, 111 insertions(+), 12 deletions(-) create mode 100644 website/docs/d/data_lake_pipeline_run.html.markdown create mode 100644 website/docs/d/data_lake_pipeline_runs.html.markdown diff --git a/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run.go b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run.go index e463d9a3cc..70b58d3c40 100644 --- a/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run.go +++ b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run.go @@ -44,6 +44,10 @@ func dataSourceMongoDBAtlasDataLakePipelineRun() *schema.Resource { Type: schema.TypeString, Computed: true, }, + "dataset_name": { + Type: schema.TypeString, + Computed: true, + }, "phase": { Type: schema.TypeString, Computed: true, @@ -125,6 +129,10 @@ func dataSourceMongoDBAtlasDataLakeRunRead(ctx context.Context, d *schema.Resour return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "storage_stores", name, err)) } + if err := d.Set("dataset_name", dataLakeRun.DatasetName); err != nil { + return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "storage_stores", name, err)) + } + if err := d.Set("snapshot_id", dataLakeRun.SnapshotID); err != nil { return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "storage_stores", name, err)) } diff --git a/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run_test.go b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run_test.go index ed42114a1e..975df8b8a1 100644 --- a/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run_test.go +++ b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run_test.go @@ -9,16 +9,12 @@ import ( ) func TestAccBackupDSDataLakePipelineRun_basic(t *testing.T) { - testCheckDataLakePipelineRun(t) + // testCheckDataLakePipelineRun(t) var ( dataSourceName = "data.mongodbatlas_data_lake_pipeline_run.test" projectID = os.Getenv("MONGODB_ATLAS_PROJECT_ID") pipelineName = os.Getenv("MONGODB_ATLAS_DATA_LAKE_PIPELINE_NAME") runID = os.Getenv("MONGODB_ATLAS_DATA_LAKE_PIPELINE_RUN_ID") - - // projectID = "63f4d4a47baeac59406dc131" - // pipelineName = "sample_guides.planets" - // runID = "6467558d70fc1a140034adf0" ) resource.ParallelTest(t, resource.TestCase{ diff --git a/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_runs_test.go b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_runs_test.go index b7f7985d76..8dd44ed223 100644 --- a/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_runs_test.go +++ b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_runs_test.go @@ -2,21 +2,18 @@ package mongodbatlas import ( "fmt" + "os" "testing" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" ) func TestAccBackupDSDataLakePipelineRuns_basic(t *testing.T) { - // testCheckDataLakePipelineRuns(t) + testCheckDataLakePipelineRuns(t) var ( dataSourceName = "data.mongodbatlas_data_lake_pipeline_runs.test" - // projectID = os.Getenv("MONGODB_ATLAS_PROJECT_ID") - // pipelineName = os.Getenv("MONGODB_ATLAS_DATA_LAKE_PIPELINE_NAME") - - projectID = "63f4d4a47baeac59406dc131" - pipelineName = "sample_guides.planets" - // runID = "6467558d70fc1a140034adf0" + projectID = os.Getenv("MONGODB_ATLAS_PROJECT_ID") + pipelineName = os.Getenv("MONGODB_ATLAS_DATA_LAKE_PIPELINE_NAME") ) resource.ParallelTest(t, resource.TestCase{ diff --git a/website/docs/d/data_lake_pipeline_run.html.markdown b/website/docs/d/data_lake_pipeline_run.html.markdown new file mode 100644 index 0000000000..9263692b16 --- /dev/null +++ b/website/docs/d/data_lake_pipeline_run.html.markdown @@ -0,0 +1,49 @@ +--- +layout: "mongodbatlas" +page_title: "MongoDB Atlas: data_lake_pipeline_run" +sidebar_current: "docs-mongodbatlas-datasource-data-lake-pipeline-run" +description: |- + Describes a Data Lake Pipeline Run. +--- + +# Data Source: mongodbatlas_data_lake_pipeline_run + +`mongodbatlas_data_lake_pipeline_run` describe a Data Lake Pipeline Run. + + +-> **NOTE:** Groups and projects are synonymous terms. You may find `groupId` in the official documentation. + +## Example Usage + +```terraform +data "mongodbatlas_data_lake_pipeline_run" "test" { + project_id = "PROJECT ID" + name = "DATA LAKE PIPELINE NAME" + pipeline_run_id = "DATA LAKE PIPELINE RUN ID" +} +``` + +## Argument Reference + +* `project_id` - (Required) Unique 24-hexadecimal digit string that identifies your project. +* `name` - (Required) Human-readable label that identifies the Data Lake Pipeline. +* `pipeline_run_id` - (Required) Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run. + +## Attributes Reference + +In addition to all arguments above, the following attributes are exported: + +* `id` - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run. +* `created_date` - Timestamp that indicates when the pipeline run was created. +* `last_updated_date` - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run. +* `state` - State of the pipeline run. +* `dataset_name` - Human-readable label that identifies the dataset that Atlas generates during this pipeline run. +* `phase` - Processing phase of the Data Lake Pipeline. +* `pipeline_id` - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline. +* `snapshot_id` - Unique 24-hexadecimal character string that identifies the snapshot of a cluster. +* `backup_frequency_type` - Backup schedule interval of the Data Lake Pipeline. +* `stats` - Runtime statistics for this Data Lake Pipeline run. + * `bytes_exported` - Total data size in bytes exported for this pipeline run. + * `num_docs` - Number of docs ingested for a this pipeline run. + +See [MongoDB Atlas API](https://www.mongodb.com/docs/atlas/reference/api-resources-spec/#tag/Data-Lake-Pipelines/operation/getPipelineRun) Documentation for more information. \ No newline at end of file diff --git a/website/docs/d/data_lake_pipeline_runs.html.markdown b/website/docs/d/data_lake_pipeline_runs.html.markdown new file mode 100644 index 0000000000..db86676f74 --- /dev/null +++ b/website/docs/d/data_lake_pipeline_runs.html.markdown @@ -0,0 +1,49 @@ +--- +layout: "mongodbatlas" +page_title: "MongoDB Atlas: data_lake_pipeline_runs" +sidebar_current: "docs-mongodbatlas-datasource-data-lake-pipeline-runs" +description: |- + Describes Data Lake Pipeline Runs. +--- + +# Data Source: mongodbatlas_data_lake_pipeline_runs + +`mongodbatlas_data_lake_pipeline_run` describe Data Lake Pipeline Runs. + + +-> **NOTE:** Groups and projects are synonymous terms. You may find `groupId` in the official documentation. + +## Example Usage + +```terraform +data "mongodbatlas_data_lake_pipeline_runs" "test" { + project_id = "PROJECT ID" + name = "DATA LAKE PIPELINE NAME" +} +``` + +## Argument Reference + +* `project_id` - (Required) Unique 24-hexadecimal digit string that identifies your project. +* `name` - (Required) Human-readable label that identifies the Data Lake Pipeline. + +## Attributes Reference +* `results` - A list where each represents a Data Lake Pipeline Run. + +### Data Lake Pipeline Run + +* `id` - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run. +* `pipeline_run_id` - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run. +* `created_date` - Timestamp that indicates when the pipeline run was created. +* `last_updated_date` - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run. +* `state` - State of the pipeline run. +* `dataset_name` - Human-readable label that identifies the dataset that Atlas generates during this pipeline run. +* `phase` - Processing phase of the Data Lake Pipeline. +* `pipeline_id` - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline. +* `snapshot_id` - Unique 24-hexadecimal character string that identifies the snapshot of a cluster. +* `backup_frequency_type` - Backup schedule interval of the Data Lake Pipeline. +* `stats` - Runtime statistics for this Data Lake Pipeline run. + * `bytes_exported` - Total data size in bytes exported for this pipeline run. + * `num_docs` - Number of docs ingested for a this pipeline run. + +See [MongoDB Atlas API](https://www.mongodb.com/docs/atlas/reference/api-resources-spec/#tag/Data-Lake-Pipelines/operation/listPipelineRuns) Documentation for more information. \ No newline at end of file From 63784f210b53ae558e6e0da9c2821ed3a2037261 Mon Sep 17 00:00:00 2001 From: andreaangiolillo Date: Fri, 19 May 2023 14:06:56 +0100 Subject: [PATCH 07/13] Update data_source_mongodbatlas_data_lake_pipeline_run_test.go --- .../data_source_mongodbatlas_data_lake_pipeline_run_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run_test.go b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run_test.go index 975df8b8a1..8576153ae2 100644 --- a/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run_test.go +++ b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run_test.go @@ -9,7 +9,7 @@ import ( ) func TestAccBackupDSDataLakePipelineRun_basic(t *testing.T) { - // testCheckDataLakePipelineRun(t) + testCheckDataLakePipelineRun(t) var ( dataSourceName = "data.mongodbatlas_data_lake_pipeline_run.test" projectID = os.Getenv("MONGODB_ATLAS_PROJECT_ID") From b81b1c2bb34c8acd55fa226221f86c84fe3e7210 Mon Sep 17 00:00:00 2001 From: andreaangiolillo Date: Tue, 23 May 2023 12:28:38 +0100 Subject: [PATCH 08/13] Addressed Zuhair's comments --- .../data_source_mongodbatlas_data_lake_pipeline_run.go | 6 +++--- .../data_source_mongodbatlas_data_lake_pipeline_run_test.go | 2 +- .../data_source_mongodbatlas_data_lake_pipeline_runs.go | 4 ++-- ...data_source_mongodbatlas_data_lake_pipeline_runs_test.go | 2 +- website/docs/d/data_lake_pipeline_run.html.markdown | 4 ++-- website/docs/d/data_lake_pipeline_runs.html.markdown | 4 ++-- 6 files changed, 11 insertions(+), 11 deletions(-) diff --git a/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run.go b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run.go index 70b58d3c40..66603e886c 100644 --- a/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run.go +++ b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run.go @@ -20,7 +20,7 @@ func dataSourceMongoDBAtlasDataLakePipelineRun() *schema.Resource { Type: schema.TypeString, Required: true, }, - "name": { + "pipeline_name": { Type: schema.TypeString, Required: true, }, @@ -88,7 +88,7 @@ func dataSourceMongoDBAtlasDataLakePipelineRun() *schema.Resource { func dataSourceMongoDBAtlasDataLakeRunRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { conn := meta.(*MongoDBClient).Atlas projectID := d.Get("project_id").(string) - name := d.Get("name").(string) + name := d.Get("pipeline_name").(string) pipelineRunID := d.Get("pipeline_run_id").(string) dataLakeRun, resp, err := conn.DataLakePipeline.GetRun(ctx, projectID, name, pipelineRunID) @@ -147,7 +147,7 @@ func dataSourceMongoDBAtlasDataLakeRunRead(ctx context.Context, d *schema.Resour d.SetId(encodeStateID(map[string]string{ "project_id": projectID, - "name": name, + "pipeline_name": name, "pipeline_run_id": pipelineRunID, })) diff --git a/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run_test.go b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run_test.go index 8576153ae2..d4a242c443 100644 --- a/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run_test.go +++ b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run_test.go @@ -37,7 +37,7 @@ func testAccMongoDBAtlasDataLakeDataSourcePipelineRunConfig(projectID, pipelineN data "mongodbatlas_data_lake_pipeline_run" "test" { project_id = %[1]q - name = %[2]q + pipeline_name = %[2]q pipeline_run_id = %[3]q } `, projectID, pipelineName, runID) diff --git a/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_runs.go b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_runs.go index f3ea58d98c..d8c0d715df 100644 --- a/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_runs.go +++ b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_runs.go @@ -20,7 +20,7 @@ func dataSourceMongoDBAtlasDataLakePipelineRuns() *schema.Resource { Type: schema.TypeString, Required: true, }, - "name": { + "pipeline_name": { Type: schema.TypeString, Required: true, }, @@ -92,7 +92,7 @@ func dataSourceMongoDBAtlasDataLakePipelineRuns() *schema.Resource { func dataSourceMongoDBAtlasDataLakeRunsRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { conn := meta.(*MongoDBClient).Atlas projectID := d.Get("project_id").(string) - name := d.Get("name").(string) + name := d.Get("pipeline_name").(string) dataLakeRuns, _, err := conn.DataLakePipeline.ListRuns(ctx, projectID, name) if err != nil { diff --git a/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_runs_test.go b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_runs_test.go index 8dd44ed223..3cbf7efffe 100644 --- a/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_runs_test.go +++ b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_runs_test.go @@ -37,7 +37,7 @@ func testAccMongoDBAtlasDataLakeDataSourcePipelineRunsConfig(projectID, pipeline data "mongodbatlas_data_lake_pipeline_runs" "test" { project_id = %[1]q - name = %[2]q + pipeline_name = %[2]q } `, projectID, pipelineName) } diff --git a/website/docs/d/data_lake_pipeline_run.html.markdown b/website/docs/d/data_lake_pipeline_run.html.markdown index 9263692b16..1c10d086b5 100644 --- a/website/docs/d/data_lake_pipeline_run.html.markdown +++ b/website/docs/d/data_lake_pipeline_run.html.markdown @@ -18,7 +18,7 @@ description: |- ```terraform data "mongodbatlas_data_lake_pipeline_run" "test" { project_id = "PROJECT ID" - name = "DATA LAKE PIPELINE NAME" + pipeline_name = "DATA LAKE PIPELINE NAME" pipeline_run_id = "DATA LAKE PIPELINE RUN ID" } ``` @@ -26,7 +26,7 @@ data "mongodbatlas_data_lake_pipeline_run" "test" { ## Argument Reference * `project_id` - (Required) Unique 24-hexadecimal digit string that identifies your project. -* `name` - (Required) Human-readable label that identifies the Data Lake Pipeline. +* `pipeline_name` - (Required) Human-readable label that identifies the Data Lake Pipeline. * `pipeline_run_id` - (Required) Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run. ## Attributes Reference diff --git a/website/docs/d/data_lake_pipeline_runs.html.markdown b/website/docs/d/data_lake_pipeline_runs.html.markdown index db86676f74..e343bb25ab 100644 --- a/website/docs/d/data_lake_pipeline_runs.html.markdown +++ b/website/docs/d/data_lake_pipeline_runs.html.markdown @@ -18,14 +18,14 @@ description: |- ```terraform data "mongodbatlas_data_lake_pipeline_runs" "test" { project_id = "PROJECT ID" - name = "DATA LAKE PIPELINE NAME" + pipeline_name = "DATA LAKE PIPELINE NAME" } ``` ## Argument Reference * `project_id` - (Required) Unique 24-hexadecimal digit string that identifies your project. -* `name` - (Required) Human-readable label that identifies the Data Lake Pipeline. +* `pipeline_name` - (Required) Human-readable label that identifies the Data Lake Pipeline. ## Attributes Reference * `results` - A list where each represents a Data Lake Pipeline Run. From 7da19aa645bb23840e4ca65ea88be6cf6708b971 Mon Sep 17 00:00:00 2001 From: andreaangiolillo Date: Tue, 23 May 2023 12:31:50 +0100 Subject: [PATCH 09/13] Addressed Melissa's comments --- .../d/data_lake_pipeline_run.html.markdown | 31 +++++++++++++++++-- .../d/data_lake_pipeline_runs.html.markdown | 29 ++++++++++++++++- 2 files changed, 57 insertions(+), 3 deletions(-) diff --git a/website/docs/d/data_lake_pipeline_run.html.markdown b/website/docs/d/data_lake_pipeline_run.html.markdown index 1c10d086b5..5d57fee846 100644 --- a/website/docs/d/data_lake_pipeline_run.html.markdown +++ b/website/docs/d/data_lake_pipeline_run.html.markdown @@ -16,9 +16,36 @@ description: |- ## Example Usage ```terraform +resource "mongodbatlas_data_lake_pipeline" "pipeline" { + //assuming we've already setup project and cluster in another block + project_id = mongodbatlas_project.projectTest.project_id + name = "DataLakePipelineName" + sink { + type = "DLS" + partition_fields { + name = "access" + order = 0 + } + } + source { + type = "ON_DEMAND_CPS" + cluster_name = mongodbatlas_cluster.clusterTest.name + database_name = "sample_airbnb" + collection_name = "listingsAndReviews" + } + transformations { + field = "test" + type = "EXCLUDE" + } + transformations { + field = "test22" + type = "EXCLUDE" + } +} + data "mongodbatlas_data_lake_pipeline_run" "test" { - project_id = "PROJECT ID" - pipeline_name = "DATA LAKE PIPELINE NAME" + project_id = mongodbatlas_project.projectTest.project_id + name = mongodbatlas_data_lake_pipeline.pipeline.name pipeline_run_id = "DATA LAKE PIPELINE RUN ID" } ``` diff --git a/website/docs/d/data_lake_pipeline_runs.html.markdown b/website/docs/d/data_lake_pipeline_runs.html.markdown index e343bb25ab..60db9228b7 100644 --- a/website/docs/d/data_lake_pipeline_runs.html.markdown +++ b/website/docs/d/data_lake_pipeline_runs.html.markdown @@ -16,8 +16,35 @@ description: |- ## Example Usage ```terraform +resource "mongodbatlas_data_lake_pipeline" "pipeline" { + //assuming we've already setup project and cluster in another block + project_id = mongodbatlas_project.projectTest.project_id + name = "DataLakePipelineName" + sink { + type = "DLS" + partition_fields { + name = "access" + order = 0 + } + } + source { + type = "ON_DEMAND_CPS" + cluster_name = mongodbatlas_cluster.clusterTest.name + database_name = "sample_airbnb" + collection_name = "listingsAndReviews" + } + transformations { + field = "test" + type = "EXCLUDE" + } + transformations { + field = "test22" + type = "EXCLUDE" + } +} + data "mongodbatlas_data_lake_pipeline_runs" "test" { - project_id = "PROJECT ID" + project_id = mongodbatlas_project.projectTest.project_id pipeline_name = "DATA LAKE PIPELINE NAME" } ``` From 1de84aeff38f5515e9ae3fca6e85ee9673a0839f Mon Sep 17 00:00:00 2001 From: andreaangiolillo Date: Tue, 23 May 2023 12:41:06 +0100 Subject: [PATCH 10/13] small fix to the example --- website/docs/d/data_lake_pipeline_run.html.markdown | 2 +- website/docs/d/data_lake_pipeline_runs.html.markdown | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/website/docs/d/data_lake_pipeline_run.html.markdown b/website/docs/d/data_lake_pipeline_run.html.markdown index 5d57fee846..730fda9ee8 100644 --- a/website/docs/d/data_lake_pipeline_run.html.markdown +++ b/website/docs/d/data_lake_pipeline_run.html.markdown @@ -45,7 +45,7 @@ resource "mongodbatlas_data_lake_pipeline" "pipeline" { data "mongodbatlas_data_lake_pipeline_run" "test" { project_id = mongodbatlas_project.projectTest.project_id - name = mongodbatlas_data_lake_pipeline.pipeline.name + pipeline_name = mongodbatlas_data_lake_pipeline.pipeline.name pipeline_run_id = "DATA LAKE PIPELINE RUN ID" } ``` diff --git a/website/docs/d/data_lake_pipeline_runs.html.markdown b/website/docs/d/data_lake_pipeline_runs.html.markdown index 60db9228b7..e6bef8d43f 100644 --- a/website/docs/d/data_lake_pipeline_runs.html.markdown +++ b/website/docs/d/data_lake_pipeline_runs.html.markdown @@ -45,7 +45,7 @@ resource "mongodbatlas_data_lake_pipeline" "pipeline" { data "mongodbatlas_data_lake_pipeline_runs" "test" { project_id = mongodbatlas_project.projectTest.project_id - pipeline_name = "DATA LAKE PIPELINE NAME" + pipeline_name = mongodbatlas_data_lake_pipeline.pipeline.name } ``` From 7e2e3788a42027b4af6264c843d8ab2de579420b Mon Sep 17 00:00:00 2001 From: andreaangiolillo Date: Tue, 23 May 2023 13:18:38 +0100 Subject: [PATCH 11/13] updated tests --- ...data_source_mongodbatlas_data_lake_pipeline_run_test.go | 7 ++++++- ...ata_source_mongodbatlas_data_lake_pipeline_runs_test.go | 2 +- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run_test.go b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run_test.go index d4a242c443..e519404708 100644 --- a/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run_test.go +++ b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run_test.go @@ -25,7 +25,12 @@ func TestAccBackupDSDataLakePipelineRun_basic(t *testing.T) { Config: testAccMongoDBAtlasDataLakeDataSourcePipelineRunConfig(projectID, pipelineName, runID), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttrSet(dataSourceName, "project_id"), - resource.TestCheckResourceAttr(dataSourceName, "name", pipelineName), + resource.TestCheckResourceAttr(dataSourceName, "pipeline_name", pipelineName), + resource.TestCheckResourceAttrSet(dataSourceName, "id"), + resource.TestCheckResourceAttrSet(dataSourceName, "state"), + resource.TestCheckResourceAttrSet(dataSourceName, "phase"), + resource.TestCheckResourceAttrSet(dataSourceName, "pipeline_id"), + resource.TestCheckResourceAttrSet(dataSourceName, "dataset_name"), ), }, }, diff --git a/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_runs_test.go b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_runs_test.go index 3cbf7efffe..c78ca369e4 100644 --- a/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_runs_test.go +++ b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_runs_test.go @@ -24,7 +24,7 @@ func TestAccBackupDSDataLakePipelineRuns_basic(t *testing.T) { Config: testAccMongoDBAtlasDataLakeDataSourcePipelineRunsConfig(projectID, pipelineName), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttrSet(dataSourceName, "project_id"), - resource.TestCheckResourceAttr(dataSourceName, "name", pipelineName), + resource.TestCheckResourceAttr(dataSourceName, "pipeline_name", pipelineName), resource.TestCheckResourceAttrSet(dataSourceName, "results.#"), ), }, From aef2665f24a461c80cdec91ff2aba5b0426c8ae5 Mon Sep 17 00:00:00 2001 From: andreaangiolillo Date: Wed, 24 May 2023 15:10:31 +0100 Subject: [PATCH 12/13] Update data_lake_pipeline_run.html.markdown --- website/docs/d/data_lake_pipeline_run.html.markdown | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/website/docs/d/data_lake_pipeline_run.html.markdown b/website/docs/d/data_lake_pipeline_run.html.markdown index 730fda9ee8..7076ebfb40 100644 --- a/website/docs/d/data_lake_pipeline_run.html.markdown +++ b/website/docs/d/data_lake_pipeline_run.html.markdown @@ -43,10 +43,15 @@ resource "mongodbatlas_data_lake_pipeline" "pipeline" { } } +data "mongodbatlas_data_lake_pipeline_runs" "pipeline_run" { + project_id = mongodbatlas_project.projectTest.project_id + name = mongodbatlas_data_lake_pipeline.pipeline.name +} + data "mongodbatlas_data_lake_pipeline_run" "test" { project_id = mongodbatlas_project.projectTest.project_id pipeline_name = mongodbatlas_data_lake_pipeline.pipeline.name - pipeline_run_id = "DATA LAKE PIPELINE RUN ID" + pipeline_run_id = mongodbatlas_data_lake_pipeline_runs.pipeline_run.results.0.pipeline_run_id # pipeline_run_id will only be returned if a schedule or ondemand run is active } ``` From 3779d769e2ae31510c2688a4343404984ec7554f Mon Sep 17 00:00:00 2001 From: andreaangiolillo Date: Wed, 31 May 2023 09:46:06 +0100 Subject: [PATCH 13/13] Update go.mod --- go.mod | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/go.mod b/go.mod index 38d87d72df..ea306b1236 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module github.com/mongodb/terraform-provider-mongodbatlas -go 1.19 +go 1.20 require ( github.com/aws/aws-sdk-go v1.44.226