diff --git a/go.mod b/go.mod index d7d8129587..ea306b1236 100644 --- a/go.mod +++ b/go.mod @@ -13,7 +13,7 @@ require ( github.com/mwielbut/pointy v1.1.0 github.com/spf13/cast v1.5.0 github.com/zclconf/go-cty v1.13.1 - go.mongodb.org/atlas v0.26.2-0.20230518140002-aee9ff7710a0 + go.mongodb.org/atlas v0.28.0 go.mongodb.org/realm v0.1.0 golang.org/x/exp v0.0.0-20221208152030-732eee02a75a ) diff --git a/go.sum b/go.sum index 484b82298d..852d0b4210 100644 --- a/go.sum +++ b/go.sum @@ -482,10 +482,8 @@ github.com/zclconf/go-cty v1.13.1 h1:0a6bRwuiSHtAmqCqNOE+c2oHgepv0ctoxU4FUe43kwc github.com/zclconf/go-cty v1.13.1/go.mod h1:YKQzy/7pZ7iq2jNFzy5go57xdxdWoLLpaEp4u238AE0= github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b/go.mod h1:ZRKQfBXbGkpdV6QMzT3rU1kSTAnfu1dO8dPKjYprgj8= go.mongodb.org/atlas v0.12.0/go.mod h1:wVCnHcm/7/IfTjEB6K8K35PLG70yGz8BdkRwX0oK9/M= -go.mongodb.org/atlas v0.26.2-0.20230517121323-88420f887c30 h1:bsNzJPOJ7T7An+6HLbyIyBioE9LnJneLqD63uBK+rpY= -go.mongodb.org/atlas v0.26.2-0.20230517121323-88420f887c30/go.mod h1:L4BKwVx/OeEhOVjCSdgo90KJm4469iv7ZLzQms/EPTg= -go.mongodb.org/atlas v0.26.2-0.20230518140002-aee9ff7710a0 h1:e6pAkoOnAOCjQHzRHX6RopDvRF09apl84Vpltfn0HYA= -go.mongodb.org/atlas v0.26.2-0.20230518140002-aee9ff7710a0/go.mod h1:L4BKwVx/OeEhOVjCSdgo90KJm4469iv7ZLzQms/EPTg= +go.mongodb.org/atlas v0.28.0 h1:CelAXtmiM36tdifSDwWdDH1nNbdvq0M2XfUR8208JxA= +go.mongodb.org/atlas v0.28.0/go.mod h1:L4BKwVx/OeEhOVjCSdgo90KJm4469iv7ZLzQms/EPTg= go.mongodb.org/realm v0.1.0 h1:zJiXyLaZrznQ+Pz947ziSrDKUep39DO4SfA0Fzx8M4M= go.mongodb.org/realm v0.1.0/go.mod h1:4Vj6iy+Puo1TDERcoh4XZ+pjtwbOzPpzqy3Cwe8ZmDM= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= diff --git a/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run.go b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run.go new file mode 100644 index 0000000000..66603e886c --- /dev/null +++ b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run.go @@ -0,0 +1,168 @@ +package mongodbatlas + +import ( + "context" + "fmt" + "net/http" + + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + matlas "go.mongodb.org/atlas/mongodbatlas" +) + +const errorDataLakePipelineRunRead = "error reading MongoDB Atlas DataLake Run (%s): %s" + +func dataSourceMongoDBAtlasDataLakePipelineRun() *schema.Resource { + return &schema.Resource{ + ReadContext: dataSourceMongoDBAtlasDataLakeRunRead, + Schema: map[string]*schema.Schema{ + "project_id": { + Type: schema.TypeString, + Required: true, + }, + "pipeline_name": { + Type: schema.TypeString, + Required: true, + }, + "pipeline_run_id": { + Type: schema.TypeString, + Required: true, + }, + "id": { + Type: schema.TypeString, + Computed: true, + }, + "created_date": { + Type: schema.TypeString, + Computed: true, + }, + "last_updated_date": { + Type: schema.TypeString, + Computed: true, + }, + "state": { + Type: schema.TypeString, + Computed: true, + }, + "dataset_name": { + Type: schema.TypeString, + Computed: true, + }, + "phase": { + Type: schema.TypeString, + Computed: true, + }, + "pipeline_id": { + Type: schema.TypeString, + Computed: true, + }, + "snapshot_id": { + Type: schema.TypeString, + Computed: true, + }, + "backup_frequency_type": { + Type: schema.TypeString, + Computed: true, + }, + "stats": { + Type: schema.TypeList, + Computed: true, + MaxItems: 0, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "bytes_exported": { + Type: schema.TypeInt, + Computed: true, + }, + "num_docs": { + Type: schema.TypeInt, + Computed: true, + }, + }, + }, + }, + }, + } +} + +func dataSourceMongoDBAtlasDataLakeRunRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { + conn := meta.(*MongoDBClient).Atlas + projectID := d.Get("project_id").(string) + name := d.Get("pipeline_name").(string) + pipelineRunID := d.Get("pipeline_run_id").(string) + + dataLakeRun, resp, err := conn.DataLakePipeline.GetRun(ctx, projectID, name, pipelineRunID) + if err != nil { + if resp != nil && resp.StatusCode == http.StatusNotFound { + d.SetId("") + return nil + } + + return diag.FromErr(fmt.Errorf(errorDataLakePipelineRunRead, name, err)) + } + + if err := d.Set("id", dataLakeRun.ID); err != nil { + return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "hostnames", name, err)) + } + + if err := d.Set("project_id", dataLakeRun.GroupID); err != nil { + return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "state", name, err)) + } + + if err := d.Set("created_date", dataLakeRun.CreatedDate); err != nil { + return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "storage_databases", name, err)) + } + + if err := d.Set("last_updated_date", dataLakeRun.LastUpdatedDate); err != nil { + return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "storage_databases", name, err)) + } + + if err := d.Set("state", dataLakeRun.State); err != nil { + return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "storage_databases", name, err)) + } + + if err := d.Set("phase", dataLakeRun.Phase); err != nil { + return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "storage_databases", name, err)) + } + + if err := d.Set("pipeline_id", dataLakeRun.PipelineID); err != nil { + return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "storage_stores", name, err)) + } + + if err := d.Set("dataset_name", dataLakeRun.DatasetName); err != nil { + return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "storage_stores", name, err)) + } + + if err := d.Set("snapshot_id", dataLakeRun.SnapshotID); err != nil { + return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "storage_stores", name, err)) + } + + if err := d.Set("backup_frequency_type", dataLakeRun.BackupFrequencyType); err != nil { + return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "storage_stores", name, err)) + } + + if err := d.Set("stats", flattenDataLakePipelineRunStats(dataLakeRun.Stats)); err != nil { + return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "storage_stores", name, err)) + } + + d.SetId(encodeStateID(map[string]string{ + "project_id": projectID, + "pipeline_name": name, + "pipeline_run_id": pipelineRunID, + })) + + return nil +} + +func flattenDataLakePipelineRunStats(datalakeRunStats *matlas.DataLakePipelineRunStats) []map[string]interface{} { + if datalakeRunStats == nil { + return nil + } + + maps := make([]map[string]interface{}, 1) + maps[0] = map[string]interface{}{ + "bytes_exported": datalakeRunStats.BytesExported, + "num_docs": datalakeRunStats.NumDocs, + } + return maps +} diff --git a/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run_test.go b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run_test.go new file mode 100644 index 0000000000..e519404708 --- /dev/null +++ b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_run_test.go @@ -0,0 +1,49 @@ +package mongodbatlas + +import ( + "fmt" + "os" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccBackupDSDataLakePipelineRun_basic(t *testing.T) { + testCheckDataLakePipelineRun(t) + var ( + dataSourceName = "data.mongodbatlas_data_lake_pipeline_run.test" + projectID = os.Getenv("MONGODB_ATLAS_PROJECT_ID") + pipelineName = os.Getenv("MONGODB_ATLAS_DATA_LAKE_PIPELINE_NAME") + runID = os.Getenv("MONGODB_ATLAS_DATA_LAKE_PIPELINE_RUN_ID") + ) + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProviderFactories: testAccProviderFactories, + Steps: []resource.TestStep{ + { + Config: testAccMongoDBAtlasDataLakeDataSourcePipelineRunConfig(projectID, pipelineName, runID), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet(dataSourceName, "project_id"), + resource.TestCheckResourceAttr(dataSourceName, "pipeline_name", pipelineName), + resource.TestCheckResourceAttrSet(dataSourceName, "id"), + resource.TestCheckResourceAttrSet(dataSourceName, "state"), + resource.TestCheckResourceAttrSet(dataSourceName, "phase"), + resource.TestCheckResourceAttrSet(dataSourceName, "pipeline_id"), + resource.TestCheckResourceAttrSet(dataSourceName, "dataset_name"), + ), + }, + }, + }) +} + +func testAccMongoDBAtlasDataLakeDataSourcePipelineRunConfig(projectID, pipelineName, runID string) string { + return fmt.Sprintf(` + +data "mongodbatlas_data_lake_pipeline_run" "test" { + project_id = %[1]q + pipeline_name = %[2]q + pipeline_run_id = %[3]q +} + `, projectID, pipelineName, runID) +} diff --git a/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_runs.go b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_runs.go new file mode 100644 index 0000000000..d8c0d715df --- /dev/null +++ b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_runs.go @@ -0,0 +1,134 @@ +package mongodbatlas + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/id" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + matlas "go.mongodb.org/atlas/mongodbatlas" +) + +const errorDataLakePipelineRunList = "error reading MongoDB Atlas DataLake Runs (%s): %s" + +func dataSourceMongoDBAtlasDataLakePipelineRuns() *schema.Resource { + return &schema.Resource{ + ReadContext: dataSourceMongoDBAtlasDataLakeRunsRead, + Schema: map[string]*schema.Schema{ + "project_id": { + Type: schema.TypeString, + Required: true, + }, + "pipeline_name": { + Type: schema.TypeString, + Required: true, + }, + "results": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "pipeline_run_id": { + Type: schema.TypeString, + Required: true, + }, + "id": { + Type: schema.TypeString, + Computed: true, + }, + "created_date": { + Type: schema.TypeString, + Computed: true, + }, + "last_updated_date": { + Type: schema.TypeString, + Computed: true, + }, + "state": { + Type: schema.TypeString, + Computed: true, + }, + "phase": { + Type: schema.TypeString, + Computed: true, + }, + "pipeline_id": { + Type: schema.TypeString, + Computed: true, + }, + "snapshot_id": { + Type: schema.TypeString, + Computed: true, + }, + "backup_frequency_type": { + Type: schema.TypeString, + Computed: true, + }, + "stats": { + Type: schema.TypeList, + Computed: true, + MaxItems: 0, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "bytes_exported": { + Type: schema.TypeInt, + Computed: true, + }, + "num_docs": { + Type: schema.TypeInt, + Computed: true, + }, + }, + }, + }, + }, + }, + }, + }, + } +} + +func dataSourceMongoDBAtlasDataLakeRunsRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { + conn := meta.(*MongoDBClient).Atlas + projectID := d.Get("project_id").(string) + name := d.Get("pipeline_name").(string) + + dataLakeRuns, _, err := conn.DataLakePipeline.ListRuns(ctx, projectID, name) + if err != nil { + return diag.FromErr(fmt.Errorf(errorDataLakePipelineRunList, projectID, err)) + } + + if err := d.Set("results", flattenDataLakePipelineRunResult(dataLakeRuns.Results)); err != nil { + return diag.FromErr(fmt.Errorf(errorDataLakeSetting, "results", projectID, err)) + } + + d.SetId(id.UniqueId()) + + return nil +} + +func flattenDataLakePipelineRunResult(datalakePipelineRuns []*matlas.DataLakePipelineRun) []map[string]interface{} { + var results []map[string]interface{} + + if len(datalakePipelineRuns) == 0 { + return results + } + + results = make([]map[string]interface{}, len(datalakePipelineRuns)) + + for k, run := range datalakePipelineRuns { + results[k] = map[string]interface{}{ + "id": run.ID, + "created_date": run.CreatedDate, + "last_updated_date": run.LastUpdatedDate, + "state": run.State, + "pipeline_id": run.PipelineID, + "snapshot_id": run.SnapshotID, + "backup_frequency_type": run.BackupFrequencyType, + "stats": flattenDataLakePipelineRunStats(run.Stats), + } + } + + return results +} diff --git a/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_runs_test.go b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_runs_test.go new file mode 100644 index 0000000000..c78ca369e4 --- /dev/null +++ b/mongodbatlas/data_source_mongodbatlas_data_lake_pipeline_runs_test.go @@ -0,0 +1,43 @@ +package mongodbatlas + +import ( + "fmt" + "os" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccBackupDSDataLakePipelineRuns_basic(t *testing.T) { + testCheckDataLakePipelineRuns(t) + var ( + dataSourceName = "data.mongodbatlas_data_lake_pipeline_runs.test" + projectID = os.Getenv("MONGODB_ATLAS_PROJECT_ID") + pipelineName = os.Getenv("MONGODB_ATLAS_DATA_LAKE_PIPELINE_NAME") + ) + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProviderFactories: testAccProviderFactories, + Steps: []resource.TestStep{ + { + Config: testAccMongoDBAtlasDataLakeDataSourcePipelineRunsConfig(projectID, pipelineName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet(dataSourceName, "project_id"), + resource.TestCheckResourceAttr(dataSourceName, "pipeline_name", pipelineName), + resource.TestCheckResourceAttrSet(dataSourceName, "results.#"), + ), + }, + }, + }) +} + +func testAccMongoDBAtlasDataLakeDataSourcePipelineRunsConfig(projectID, pipelineName string) string { + return fmt.Sprintf(` + +data "mongodbatlas_data_lake_pipeline_runs" "test" { + project_id = %[1]q + pipeline_name = %[2]q +} + `, projectID, pipelineName) +} diff --git a/mongodbatlas/provider.go b/mongodbatlas/provider.go index 1307dffc76..904d3f4a1a 100644 --- a/mongodbatlas/provider.go +++ b/mongodbatlas/provider.go @@ -196,6 +196,8 @@ func getDataSourcesMap() map[string]*schema.Resource { "mongodbatlas_search_indexes": dataSourceMongoDBAtlasSearchIndexes(), "mongodbatlas_data_lake": dataSourceMongoDBAtlasDataLake(), "mongodbatlas_data_lakes": dataSourceMongoDBAtlasDataLakes(), + "mongodbatlas_data_lake_pipeline_run": dataSourceMongoDBAtlasDataLakePipelineRun(), + "mongodbatlas_data_lake_pipeline_runs": dataSourceMongoDBAtlasDataLakePipelineRuns(), "mongodbatlas_data_lake_pipeline": dataSourceMongoDBAtlasDataLakePipeline(), "mongodbatlas_data_lake_pipelines": dataSourceMongoDBAtlasDataLakePipelines(), "mongodbatlas_event_trigger": dataSourceMongoDBAtlasEventTrigger(), diff --git a/mongodbatlas/provider_test.go b/mongodbatlas/provider_test.go index 429f459c19..0335be2f92 100644 --- a/mongodbatlas/provider_test.go +++ b/mongodbatlas/provider_test.go @@ -179,6 +179,19 @@ func SkipTestExtCred(tb testing.TB) { } } +func testCheckDataLakePipelineRun(tb testing.TB) { + if os.Getenv("MONGODB_ATLAS_DATA_LAKE_PIPELINE_RUN_ID") == "" { + tb.Skip("`MONGODB_ATLAS_DATA_LAKE_PIPELINE_RUN_ID` must be set for Projects acceptance testing") + } + testCheckDataLakePipelineRuns(tb) +} + +func testCheckDataLakePipelineRuns(tb testing.TB) { + if os.Getenv("MONGODB_ATLAS_DATA_LAKE_PIPELINE_NAME") == "" { + tb.Skip("`MONGODB_ATLAS_DATA_LAKE_PIPELINE_NAME` must be set for Projects acceptance testing") + } +} + func testCheckTeamsIds(tb testing.TB) { if os.Getenv("MONGODB_ATLAS_TEAMS_IDS") == "" { tb.Skip("`MONGODB_ATLAS_TEAMS_IDS` must be set for Projects acceptance testing") diff --git a/website/docs/d/data_lake_pipeline_run.html.markdown b/website/docs/d/data_lake_pipeline_run.html.markdown new file mode 100644 index 0000000000..7076ebfb40 --- /dev/null +++ b/website/docs/d/data_lake_pipeline_run.html.markdown @@ -0,0 +1,81 @@ +--- +layout: "mongodbatlas" +page_title: "MongoDB Atlas: data_lake_pipeline_run" +sidebar_current: "docs-mongodbatlas-datasource-data-lake-pipeline-run" +description: |- + Describes a Data Lake Pipeline Run. +--- + +# Data Source: mongodbatlas_data_lake_pipeline_run + +`mongodbatlas_data_lake_pipeline_run` describe a Data Lake Pipeline Run. + + +-> **NOTE:** Groups and projects are synonymous terms. You may find `groupId` in the official documentation. + +## Example Usage + +```terraform +resource "mongodbatlas_data_lake_pipeline" "pipeline" { + //assuming we've already setup project and cluster in another block + project_id = mongodbatlas_project.projectTest.project_id + name = "DataLakePipelineName" + sink { + type = "DLS" + partition_fields { + name = "access" + order = 0 + } + } + source { + type = "ON_DEMAND_CPS" + cluster_name = mongodbatlas_cluster.clusterTest.name + database_name = "sample_airbnb" + collection_name = "listingsAndReviews" + } + transformations { + field = "test" + type = "EXCLUDE" + } + transformations { + field = "test22" + type = "EXCLUDE" + } +} + +data "mongodbatlas_data_lake_pipeline_runs" "pipeline_run" { + project_id = mongodbatlas_project.projectTest.project_id + name = mongodbatlas_data_lake_pipeline.pipeline.name +} + +data "mongodbatlas_data_lake_pipeline_run" "test" { + project_id = mongodbatlas_project.projectTest.project_id + pipeline_name = mongodbatlas_data_lake_pipeline.pipeline.name + pipeline_run_id = mongodbatlas_data_lake_pipeline_runs.pipeline_run.results.0.pipeline_run_id # pipeline_run_id will only be returned if a schedule or ondemand run is active +} +``` + +## Argument Reference + +* `project_id` - (Required) Unique 24-hexadecimal digit string that identifies your project. +* `pipeline_name` - (Required) Human-readable label that identifies the Data Lake Pipeline. +* `pipeline_run_id` - (Required) Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run. + +## Attributes Reference + +In addition to all arguments above, the following attributes are exported: + +* `id` - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run. +* `created_date` - Timestamp that indicates when the pipeline run was created. +* `last_updated_date` - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run. +* `state` - State of the pipeline run. +* `dataset_name` - Human-readable label that identifies the dataset that Atlas generates during this pipeline run. +* `phase` - Processing phase of the Data Lake Pipeline. +* `pipeline_id` - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline. +* `snapshot_id` - Unique 24-hexadecimal character string that identifies the snapshot of a cluster. +* `backup_frequency_type` - Backup schedule interval of the Data Lake Pipeline. +* `stats` - Runtime statistics for this Data Lake Pipeline run. + * `bytes_exported` - Total data size in bytes exported for this pipeline run. + * `num_docs` - Number of docs ingested for a this pipeline run. + +See [MongoDB Atlas API](https://www.mongodb.com/docs/atlas/reference/api-resources-spec/#tag/Data-Lake-Pipelines/operation/getPipelineRun) Documentation for more information. \ No newline at end of file diff --git a/website/docs/d/data_lake_pipeline_runs.html.markdown b/website/docs/d/data_lake_pipeline_runs.html.markdown new file mode 100644 index 0000000000..e6bef8d43f --- /dev/null +++ b/website/docs/d/data_lake_pipeline_runs.html.markdown @@ -0,0 +1,76 @@ +--- +layout: "mongodbatlas" +page_title: "MongoDB Atlas: data_lake_pipeline_runs" +sidebar_current: "docs-mongodbatlas-datasource-data-lake-pipeline-runs" +description: |- + Describes Data Lake Pipeline Runs. +--- + +# Data Source: mongodbatlas_data_lake_pipeline_runs + +`mongodbatlas_data_lake_pipeline_run` describe Data Lake Pipeline Runs. + + +-> **NOTE:** Groups and projects are synonymous terms. You may find `groupId` in the official documentation. + +## Example Usage + +```terraform +resource "mongodbatlas_data_lake_pipeline" "pipeline" { + //assuming we've already setup project and cluster in another block + project_id = mongodbatlas_project.projectTest.project_id + name = "DataLakePipelineName" + sink { + type = "DLS" + partition_fields { + name = "access" + order = 0 + } + } + source { + type = "ON_DEMAND_CPS" + cluster_name = mongodbatlas_cluster.clusterTest.name + database_name = "sample_airbnb" + collection_name = "listingsAndReviews" + } + transformations { + field = "test" + type = "EXCLUDE" + } + transformations { + field = "test22" + type = "EXCLUDE" + } +} + +data "mongodbatlas_data_lake_pipeline_runs" "test" { + project_id = mongodbatlas_project.projectTest.project_id + pipeline_name = mongodbatlas_data_lake_pipeline.pipeline.name +} +``` + +## Argument Reference + +* `project_id` - (Required) Unique 24-hexadecimal digit string that identifies your project. +* `pipeline_name` - (Required) Human-readable label that identifies the Data Lake Pipeline. + +## Attributes Reference +* `results` - A list where each represents a Data Lake Pipeline Run. + +### Data Lake Pipeline Run + +* `id` - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run. +* `pipeline_run_id` - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run. +* `created_date` - Timestamp that indicates when the pipeline run was created. +* `last_updated_date` - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run. +* `state` - State of the pipeline run. +* `dataset_name` - Human-readable label that identifies the dataset that Atlas generates during this pipeline run. +* `phase` - Processing phase of the Data Lake Pipeline. +* `pipeline_id` - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline. +* `snapshot_id` - Unique 24-hexadecimal character string that identifies the snapshot of a cluster. +* `backup_frequency_type` - Backup schedule interval of the Data Lake Pipeline. +* `stats` - Runtime statistics for this Data Lake Pipeline run. + * `bytes_exported` - Total data size in bytes exported for this pipeline run. + * `num_docs` - Number of docs ingested for a this pipeline run. + +See [MongoDB Atlas API](https://www.mongodb.com/docs/atlas/reference/api-resources-spec/#tag/Data-Lake-Pipelines/operation/listPipelineRuns) Documentation for more information. \ No newline at end of file