Skip to content

Commit e68dff1

Browse files
andreaangiolillomaastha
authored andcommitted
INTMDB-805: [Terraform] Create new TF Data Lake Pipelines Run data sources only (not resource) (#1177)
1 parent 6c690dc commit e68dff1

9 files changed

+567
-0
lines changed

go.sum

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -481,6 +481,7 @@ github.com/zclconf/go-cty v1.8.1/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uUA
481481
github.com/zclconf/go-cty v1.13.1 h1:0a6bRwuiSHtAmqCqNOE+c2oHgepv0ctoxU4FUe43kwc=
482482
github.com/zclconf/go-cty v1.13.1/go.mod h1:YKQzy/7pZ7iq2jNFzy5go57xdxdWoLLpaEp4u238AE0=
483483
github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b/go.mod h1:ZRKQfBXbGkpdV6QMzT3rU1kSTAnfu1dO8dPKjYprgj8=
484+
go.mongodb.org/atlas v0.12.0/go.mod h1:wVCnHcm/7/IfTjEB6K8K35PLG70yGz8BdkRwX0oK9/M=
484485
go.mongodb.org/atlas v0.28.0 h1:CelAXtmiM36tdifSDwWdDH1nNbdvq0M2XfUR8208JxA=
485486
go.mongodb.org/atlas v0.28.0/go.mod h1:L4BKwVx/OeEhOVjCSdgo90KJm4469iv7ZLzQms/EPTg=
486487
go.mongodb.org/realm v0.1.0 h1:zJiXyLaZrznQ+Pz947ziSrDKUep39DO4SfA0Fzx8M4M=
Lines changed: 168 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,168 @@
1+
package mongodbatlas
2+
3+
import (
4+
"context"
5+
"fmt"
6+
"net/http"
7+
8+
"github.com/hashicorp/terraform-plugin-sdk/v2/diag"
9+
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
10+
matlas "go.mongodb.org/atlas/mongodbatlas"
11+
)
12+
13+
const errorDataLakePipelineRunRead = "error reading MongoDB Atlas DataLake Run (%s): %s"
14+
15+
func dataSourceMongoDBAtlasDataLakePipelineRun() *schema.Resource {
16+
return &schema.Resource{
17+
ReadContext: dataSourceMongoDBAtlasDataLakeRunRead,
18+
Schema: map[string]*schema.Schema{
19+
"project_id": {
20+
Type: schema.TypeString,
21+
Required: true,
22+
},
23+
"pipeline_name": {
24+
Type: schema.TypeString,
25+
Required: true,
26+
},
27+
"pipeline_run_id": {
28+
Type: schema.TypeString,
29+
Required: true,
30+
},
31+
"id": {
32+
Type: schema.TypeString,
33+
Computed: true,
34+
},
35+
"created_date": {
36+
Type: schema.TypeString,
37+
Computed: true,
38+
},
39+
"last_updated_date": {
40+
Type: schema.TypeString,
41+
Computed: true,
42+
},
43+
"state": {
44+
Type: schema.TypeString,
45+
Computed: true,
46+
},
47+
"dataset_name": {
48+
Type: schema.TypeString,
49+
Computed: true,
50+
},
51+
"phase": {
52+
Type: schema.TypeString,
53+
Computed: true,
54+
},
55+
"pipeline_id": {
56+
Type: schema.TypeString,
57+
Computed: true,
58+
},
59+
"snapshot_id": {
60+
Type: schema.TypeString,
61+
Computed: true,
62+
},
63+
"backup_frequency_type": {
64+
Type: schema.TypeString,
65+
Computed: true,
66+
},
67+
"stats": {
68+
Type: schema.TypeList,
69+
Computed: true,
70+
MaxItems: 0,
71+
Elem: &schema.Resource{
72+
Schema: map[string]*schema.Schema{
73+
"bytes_exported": {
74+
Type: schema.TypeInt,
75+
Computed: true,
76+
},
77+
"num_docs": {
78+
Type: schema.TypeInt,
79+
Computed: true,
80+
},
81+
},
82+
},
83+
},
84+
},
85+
}
86+
}
87+
88+
func dataSourceMongoDBAtlasDataLakeRunRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics {
89+
conn := meta.(*MongoDBClient).Atlas
90+
projectID := d.Get("project_id").(string)
91+
name := d.Get("pipeline_name").(string)
92+
pipelineRunID := d.Get("pipeline_run_id").(string)
93+
94+
dataLakeRun, resp, err := conn.DataLakePipeline.GetRun(ctx, projectID, name, pipelineRunID)
95+
if err != nil {
96+
if resp != nil && resp.StatusCode == http.StatusNotFound {
97+
d.SetId("")
98+
return nil
99+
}
100+
101+
return diag.FromErr(fmt.Errorf(errorDataLakePipelineRunRead, name, err))
102+
}
103+
104+
if err := d.Set("id", dataLakeRun.ID); err != nil {
105+
return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "hostnames", name, err))
106+
}
107+
108+
if err := d.Set("project_id", dataLakeRun.GroupID); err != nil {
109+
return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "state", name, err))
110+
}
111+
112+
if err := d.Set("created_date", dataLakeRun.CreatedDate); err != nil {
113+
return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "storage_databases", name, err))
114+
}
115+
116+
if err := d.Set("last_updated_date", dataLakeRun.LastUpdatedDate); err != nil {
117+
return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "storage_databases", name, err))
118+
}
119+
120+
if err := d.Set("state", dataLakeRun.State); err != nil {
121+
return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "storage_databases", name, err))
122+
}
123+
124+
if err := d.Set("phase", dataLakeRun.Phase); err != nil {
125+
return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "storage_databases", name, err))
126+
}
127+
128+
if err := d.Set("pipeline_id", dataLakeRun.PipelineID); err != nil {
129+
return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "storage_stores", name, err))
130+
}
131+
132+
if err := d.Set("dataset_name", dataLakeRun.DatasetName); err != nil {
133+
return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "storage_stores", name, err))
134+
}
135+
136+
if err := d.Set("snapshot_id", dataLakeRun.SnapshotID); err != nil {
137+
return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "storage_stores", name, err))
138+
}
139+
140+
if err := d.Set("backup_frequency_type", dataLakeRun.BackupFrequencyType); err != nil {
141+
return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "storage_stores", name, err))
142+
}
143+
144+
if err := d.Set("stats", flattenDataLakePipelineRunStats(dataLakeRun.Stats)); err != nil {
145+
return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "storage_stores", name, err))
146+
}
147+
148+
d.SetId(encodeStateID(map[string]string{
149+
"project_id": projectID,
150+
"pipeline_name": name,
151+
"pipeline_run_id": pipelineRunID,
152+
}))
153+
154+
return nil
155+
}
156+
157+
func flattenDataLakePipelineRunStats(datalakeRunStats *matlas.DataLakePipelineRunStats) []map[string]interface{} {
158+
if datalakeRunStats == nil {
159+
return nil
160+
}
161+
162+
maps := make([]map[string]interface{}, 1)
163+
maps[0] = map[string]interface{}{
164+
"bytes_exported": datalakeRunStats.BytesExported,
165+
"num_docs": datalakeRunStats.NumDocs,
166+
}
167+
return maps
168+
}
Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
package mongodbatlas
2+
3+
import (
4+
"fmt"
5+
"os"
6+
"testing"
7+
8+
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
9+
)
10+
11+
func TestAccBackupDSDataLakePipelineRun_basic(t *testing.T) {
12+
testCheckDataLakePipelineRun(t)
13+
var (
14+
dataSourceName = "data.mongodbatlas_data_lake_pipeline_run.test"
15+
projectID = os.Getenv("MONGODB_ATLAS_PROJECT_ID")
16+
pipelineName = os.Getenv("MONGODB_ATLAS_DATA_LAKE_PIPELINE_NAME")
17+
runID = os.Getenv("MONGODB_ATLAS_DATA_LAKE_PIPELINE_RUN_ID")
18+
)
19+
20+
resource.ParallelTest(t, resource.TestCase{
21+
PreCheck: func() { testAccPreCheck(t) },
22+
ProviderFactories: testAccProviderFactories,
23+
Steps: []resource.TestStep{
24+
{
25+
Config: testAccMongoDBAtlasDataLakeDataSourcePipelineRunConfig(projectID, pipelineName, runID),
26+
Check: resource.ComposeTestCheckFunc(
27+
resource.TestCheckResourceAttrSet(dataSourceName, "project_id"),
28+
resource.TestCheckResourceAttr(dataSourceName, "pipeline_name", pipelineName),
29+
resource.TestCheckResourceAttrSet(dataSourceName, "id"),
30+
resource.TestCheckResourceAttrSet(dataSourceName, "state"),
31+
resource.TestCheckResourceAttrSet(dataSourceName, "phase"),
32+
resource.TestCheckResourceAttrSet(dataSourceName, "pipeline_id"),
33+
resource.TestCheckResourceAttrSet(dataSourceName, "dataset_name"),
34+
),
35+
},
36+
},
37+
})
38+
}
39+
40+
func testAccMongoDBAtlasDataLakeDataSourcePipelineRunConfig(projectID, pipelineName, runID string) string {
41+
return fmt.Sprintf(`
42+
43+
data "mongodbatlas_data_lake_pipeline_run" "test" {
44+
project_id = %[1]q
45+
pipeline_name = %[2]q
46+
pipeline_run_id = %[3]q
47+
}
48+
`, projectID, pipelineName, runID)
49+
}
Lines changed: 134 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,134 @@
1+
package mongodbatlas
2+
3+
import (
4+
"context"
5+
"fmt"
6+
7+
"github.com/hashicorp/terraform-plugin-sdk/v2/diag"
8+
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/id"
9+
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
10+
matlas "go.mongodb.org/atlas/mongodbatlas"
11+
)
12+
13+
const errorDataLakePipelineRunList = "error reading MongoDB Atlas DataLake Runs (%s): %s"
14+
15+
func dataSourceMongoDBAtlasDataLakePipelineRuns() *schema.Resource {
16+
return &schema.Resource{
17+
ReadContext: dataSourceMongoDBAtlasDataLakeRunsRead,
18+
Schema: map[string]*schema.Schema{
19+
"project_id": {
20+
Type: schema.TypeString,
21+
Required: true,
22+
},
23+
"pipeline_name": {
24+
Type: schema.TypeString,
25+
Required: true,
26+
},
27+
"results": {
28+
Type: schema.TypeList,
29+
Computed: true,
30+
Elem: &schema.Resource{
31+
Schema: map[string]*schema.Schema{
32+
"pipeline_run_id": {
33+
Type: schema.TypeString,
34+
Required: true,
35+
},
36+
"id": {
37+
Type: schema.TypeString,
38+
Computed: true,
39+
},
40+
"created_date": {
41+
Type: schema.TypeString,
42+
Computed: true,
43+
},
44+
"last_updated_date": {
45+
Type: schema.TypeString,
46+
Computed: true,
47+
},
48+
"state": {
49+
Type: schema.TypeString,
50+
Computed: true,
51+
},
52+
"phase": {
53+
Type: schema.TypeString,
54+
Computed: true,
55+
},
56+
"pipeline_id": {
57+
Type: schema.TypeString,
58+
Computed: true,
59+
},
60+
"snapshot_id": {
61+
Type: schema.TypeString,
62+
Computed: true,
63+
},
64+
"backup_frequency_type": {
65+
Type: schema.TypeString,
66+
Computed: true,
67+
},
68+
"stats": {
69+
Type: schema.TypeList,
70+
Computed: true,
71+
MaxItems: 0,
72+
Elem: &schema.Resource{
73+
Schema: map[string]*schema.Schema{
74+
"bytes_exported": {
75+
Type: schema.TypeInt,
76+
Computed: true,
77+
},
78+
"num_docs": {
79+
Type: schema.TypeInt,
80+
Computed: true,
81+
},
82+
},
83+
},
84+
},
85+
},
86+
},
87+
},
88+
},
89+
}
90+
}
91+
92+
func dataSourceMongoDBAtlasDataLakeRunsRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics {
93+
conn := meta.(*MongoDBClient).Atlas
94+
projectID := d.Get("project_id").(string)
95+
name := d.Get("pipeline_name").(string)
96+
97+
dataLakeRuns, _, err := conn.DataLakePipeline.ListRuns(ctx, projectID, name)
98+
if err != nil {
99+
return diag.FromErr(fmt.Errorf(errorDataLakePipelineRunList, projectID, err))
100+
}
101+
102+
if err := d.Set("results", flattenDataLakePipelineRunResult(dataLakeRuns.Results)); err != nil {
103+
return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "results", projectID, err))
104+
}
105+
106+
d.SetId(id.UniqueId())
107+
108+
return nil
109+
}
110+
111+
func flattenDataLakePipelineRunResult(datalakePipelineRuns []*matlas.DataLakePipelineRun) []map[string]interface{} {
112+
var results []map[string]interface{}
113+
114+
if len(datalakePipelineRuns) == 0 {
115+
return results
116+
}
117+
118+
results = make([]map[string]interface{}, len(datalakePipelineRuns))
119+
120+
for k, run := range datalakePipelineRuns {
121+
results[k] = map[string]interface{}{
122+
"id": run.ID,
123+
"created_date": run.CreatedDate,
124+
"last_updated_date": run.LastUpdatedDate,
125+
"state": run.State,
126+
"pipeline_id": run.PipelineID,
127+
"snapshot_id": run.SnapshotID,
128+
"backup_frequency_type": run.BackupFrequencyType,
129+
"stats": flattenDataLakePipelineRunStats(run.Stats),
130+
}
131+
}
132+
133+
return results
134+
}

0 commit comments

Comments
 (0)