Skip to content

Commit 1485d42

Browse files
authored
Merge branch 'main' into issue-4471
2 parents 5ffe1cb + 83024cd commit 1485d42

File tree

7 files changed

+1206
-1113
lines changed

7 files changed

+1206
-1113
lines changed

NEXT_CHANGELOG.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
### New Features and Improvements
66

77
* Support updating `options` in `databricks_catalog` ([#4476](https://github.com/databricks/terraform-provider-databricks/pull/4476)).
8+
* Increase `databricks_library` timeout from 15m to 30m.
89

910
### Bug Fixes
1011

@@ -16,4 +17,6 @@
1617

1718
### Exporter
1819

20+
* Refactor UC, SQL and SCIM objects into separate files ([#4477](https://github.com/databricks/terraform-provider-databricks/pull/4477)).
21+
1922
### Internal Changes

exporter/impl_dbsql.go

Lines changed: 279 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,279 @@
1+
package exporter
2+
3+
import (
4+
"encoding/json"
5+
"fmt"
6+
"log"
7+
"strings"
8+
9+
"github.com/databricks/databricks-sdk-go/service/dashboards"
10+
"github.com/databricks/databricks-sdk-go/service/sql"
11+
"github.com/databricks/terraform-provider-databricks/common"
12+
tf_sql "github.com/databricks/terraform-provider-databricks/sql"
13+
tf_sql_api "github.com/databricks/terraform-provider-databricks/sql/api"
14+
)
15+
16+
func listQueries(ic *importContext) error {
17+
it := ic.workspaceClient.Queries.List(ic.Context, sql.ListQueriesRequest{PageSize: 100})
18+
i := 0
19+
for it.HasNext(ic.Context) {
20+
q, err := it.Next(ic.Context)
21+
if err != nil {
22+
return err
23+
}
24+
i++
25+
if !ic.MatchesName(q.DisplayName) {
26+
continue
27+
}
28+
// TODO: look if we can create data based on the response, without calling Get
29+
ic.EmitIfUpdatedAfterIsoString(&resource{
30+
Resource: "databricks_query",
31+
ID: q.Id,
32+
Incremental: ic.incremental,
33+
}, q.UpdateTime, fmt.Sprintf("query '%s'", q.DisplayName))
34+
if i%50 == 0 {
35+
log.Printf("[INFO] Imported %d Queries", i)
36+
}
37+
}
38+
log.Printf("[INFO] Listed %d Queries", i)
39+
return nil
40+
}
41+
42+
func importQuery(ic *importContext, r *resource) error {
43+
var query tf_sql.QueryStruct
44+
s := ic.Resources["databricks_query"].Schema
45+
common.DataToStructPointer(r.Data, s, &query)
46+
if query.WarehouseId != "" {
47+
ic.Emit(&resource{
48+
Resource: "databricks_sql_endpoint",
49+
ID: query.WarehouseId,
50+
})
51+
}
52+
// emit queries specified as parameters
53+
for _, p := range query.Parameters {
54+
if p.QueryBackedValue != nil {
55+
ic.Emit(&resource{
56+
Resource: "databricks_query",
57+
ID: p.QueryBackedValue.QueryId,
58+
})
59+
}
60+
}
61+
ic.emitUserOrServicePrincipal(query.OwnerUserName)
62+
ic.emitDirectoryOrRepo(query.ParentPath)
63+
// TODO: r.AddExtraData(ParentDirectoryExtraKey, directoryPath) ?
64+
ic.emitPermissionsIfNotIgnored(r, fmt.Sprintf("/sql/queries/%s", r.ID),
65+
"query_"+ic.Importables["databricks_query"].Name(ic, r.Data))
66+
if query.Catalog != "" && query.Schema != "" {
67+
ic.Emit(&resource{
68+
Resource: "databricks_schema",
69+
ID: fmt.Sprintf("%s.%s", query.Catalog, query.Schema),
70+
})
71+
}
72+
return nil
73+
}
74+
75+
func listSqlEndpoints(ic *importContext) error {
76+
it := ic.workspaceClient.Warehouses.List(ic.Context, sql.ListWarehousesRequest{})
77+
i := 0
78+
for it.HasNext(ic.Context) {
79+
q, err := it.Next(ic.Context)
80+
if err != nil {
81+
return err
82+
}
83+
if !ic.MatchesName(q.Name) {
84+
continue
85+
}
86+
ic.Emit(&resource{
87+
Resource: "databricks_sql_endpoint",
88+
ID: q.Id,
89+
})
90+
i++
91+
log.Printf("[INFO] Imported %d SQL endpoints", i)
92+
}
93+
return nil
94+
}
95+
96+
func importSqlEndpoint(ic *importContext, r *resource) error {
97+
ic.emitPermissionsIfNotIgnored(r, fmt.Sprintf("/sql/warehouses/%s", r.ID),
98+
"sql_endpoint_"+ic.Importables["databricks_sql_endpoint"].Name(ic, r.Data))
99+
if ic.meAdmin {
100+
ic.Emit(&resource{
101+
Resource: "databricks_sql_global_config",
102+
ID: tf_sql.GlobalSqlConfigResourceID,
103+
})
104+
}
105+
return nil
106+
}
107+
108+
func listRedashDashboards(ic *importContext) error {
109+
qs, err := dbsqlListObjects(ic, "/preview/sql/dashboards")
110+
if err != nil {
111+
return nil
112+
}
113+
for i, q := range qs {
114+
name := q["name"].(string)
115+
if !ic.MatchesName(name) {
116+
continue
117+
}
118+
ic.EmitIfUpdatedAfterIsoString(&resource{
119+
Resource: "databricks_sql_dashboard",
120+
ID: q["id"].(string),
121+
Incremental: ic.incremental,
122+
}, q["updated_at"].(string), fmt.Sprintf("dashboard '%s'", name))
123+
log.Printf("[INFO] Imported %d of %d SQL dashboards", i+1, len(qs))
124+
}
125+
return nil
126+
}
127+
128+
func importRedashDashboard(ic *importContext, r *resource) error {
129+
ic.emitPermissionsIfNotIgnored(r, fmt.Sprintf("/sql/dashboards/%s", r.ID),
130+
"sql_dashboard_"+ic.Importables["databricks_sql_dashboard"].Name(ic, r.Data))
131+
dashboardID := r.ID
132+
dashboardAPI := tf_sql.NewDashboardAPI(ic.Context, ic.Client)
133+
dashboard, err := dashboardAPI.Read(dashboardID)
134+
if err != nil {
135+
return err
136+
}
137+
138+
ic.emitSqlParentDirectory(dashboard.Parent)
139+
for _, rv := range dashboard.Widgets {
140+
var widget tf_sql_api.Widget
141+
err = json.Unmarshal(rv, &widget)
142+
if err != nil {
143+
log.Printf("[WARN] Problems decoding widget for dashboard with ID: %s", dashboardID)
144+
continue
145+
}
146+
widgetID := dashboardID + "/" + widget.ID.String()
147+
ic.Emit(&resource{
148+
Resource: "databricks_sql_widget",
149+
ID: widgetID,
150+
})
151+
152+
if widget.VisualizationID != nil {
153+
var visualization tf_sql_api.Visualization
154+
err = json.Unmarshal(widget.Visualization, &visualization)
155+
if err != nil {
156+
log.Printf("[WARN] Problems decoding visualization for widget with ID: %s", widget.ID.String())
157+
continue
158+
}
159+
if len(visualization.Query) > 0 {
160+
var query tf_sql_api.Query
161+
err = json.Unmarshal(visualization.Query, &query)
162+
if err != nil {
163+
log.Printf("[WARN] Problems decoding query for visualization with ID: %s", visualization.ID.String())
164+
continue
165+
}
166+
visualizationID := query.ID + "/" + visualization.ID.String()
167+
ic.Emit(&resource{
168+
Resource: "databricks_sql_visualization",
169+
ID: visualizationID,
170+
})
171+
ic.Emit(&resource{
172+
Resource: "databricks_query",
173+
ID: query.ID,
174+
})
175+
sqlEndpointID, err := ic.getSqlEndpoint(query.DataSourceID)
176+
if err != nil {
177+
log.Printf("[WARN] Can't find SQL endpoint for data source id %s", query.DataSourceID)
178+
} else {
179+
ic.Emit(&resource{
180+
Resource: "databricks_sql_endpoint",
181+
ID: sqlEndpointID,
182+
})
183+
}
184+
} else {
185+
log.Printf("[DEBUG] Empty query in visualization %v", visualization)
186+
}
187+
}
188+
}
189+
return nil
190+
}
191+
192+
func listAlerts(ic *importContext) error {
193+
it := ic.workspaceClient.Alerts.List(ic.Context, sql.ListAlertsRequest{PageSize: 100})
194+
i := 0
195+
for it.HasNext(ic.Context) {
196+
a, err := it.Next(ic.Context)
197+
if err != nil {
198+
return err
199+
}
200+
i++
201+
if !ic.MatchesName(a.DisplayName) {
202+
continue
203+
}
204+
// TODO: look if we can create data based on the response, without calling Get
205+
ic.EmitIfUpdatedAfterIsoString(&resource{
206+
Resource: "databricks_alert",
207+
ID: a.Id,
208+
Incremental: ic.incremental,
209+
}, a.UpdateTime, fmt.Sprintf("alert '%s'", a.DisplayName))
210+
if i%50 == 0 {
211+
log.Printf("[INFO] Imported %d Alerts", i)
212+
}
213+
}
214+
log.Printf("[INFO] Listed %d Alerts", i)
215+
return nil
216+
}
217+
218+
func listLakeviewDashboards(ic *importContext) error {
219+
it := ic.workspaceClient.Lakeview.List(ic.Context, dashboards.ListDashboardsRequest{PageSize: 1000})
220+
i := 0
221+
for it.HasNext(ic.Context) {
222+
d, err := it.Next(ic.Context)
223+
if err != nil {
224+
return err
225+
}
226+
i++
227+
if !ic.MatchesName(d.DisplayName) {
228+
continue
229+
}
230+
// TODO: add emit for incremental mode. But this information isn't included into the List response
231+
ic.Emit(&resource{
232+
Resource: "databricks_dashboard",
233+
ID: d.DashboardId,
234+
})
235+
if i%100 == 0 {
236+
log.Printf("[INFO] Processed %d dashboards", i)
237+
}
238+
}
239+
log.Printf("[INFO] Listed %d dashboards", i)
240+
return nil
241+
}
242+
243+
func importLakeviewDashboard(ic *importContext, r *resource) error {
244+
path := r.Data.Get("path").(string)
245+
if ic.isInRepoOrGitFolder(path, false) {
246+
ic.emitRepoOrGitFolder(path, false)
247+
return nil
248+
}
249+
parts := strings.Split(path, "/")
250+
plen := len(parts)
251+
if idx := strings.Index(parts[plen-1], "."); idx != -1 {
252+
parts[plen-1] = parts[plen-1][:idx] + "_" + r.ID + parts[plen-1][idx:]
253+
} else {
254+
parts[plen-1] = parts[plen-1] + "_" + r.ID
255+
}
256+
name := fileNameNormalizationRegex.ReplaceAllString(strings.Join(parts, "/")[1:], "_")
257+
fileName, err := ic.saveFileIn("dashboards", name, []byte(r.Data.Get("serialized_dashboard").(string)))
258+
if err != nil {
259+
return err
260+
}
261+
r.Data.Set("file_path", fileName)
262+
r.Data.Set("serialized_dashboard", "")
263+
264+
ic.emitPermissionsIfNotIgnored(r, "/dashboards/"+r.ID,
265+
"dashboard_"+ic.Importables["databricks_dashboard"].Name(ic, r.Data))
266+
parentPath := r.Data.Get("parent_path").(string)
267+
if parentPath != "" && parentPath != "/" {
268+
ic.emitDirectoryOrRepo(parentPath)
269+
}
270+
warehouseId := r.Data.Get("warehouse_id").(string)
271+
if warehouseId != "" {
272+
ic.Emit(&resource{
273+
Resource: "databricks_sql_endpoint",
274+
ID: warehouseId,
275+
})
276+
}
277+
278+
return nil
279+
}

0 commit comments

Comments
 (0)