Skip to content

Commit 95b6e0d

Browse files
authored
Merge pull request #100 from databrickslabs/cleanup-and-tests
[CLEANUP] Making VSCode highlight only new issues
2 parents 3f56ba9 + f57c17d commit 95b6e0d

39 files changed

+376
-376
lines changed

databricks/azure_auth.go

+2-2
Original file line numberDiff line numberDiff line change
@@ -87,10 +87,10 @@ func (a *AzureAuth) getWorkspaceID(config *service.DBApiClientConfig) error {
8787
"Authorization": "Bearer " + a.ManagementToken,
8888
}
8989
type apiVersion struct {
90-
ApiVersion string `url:"api-version"`
90+
APIVersion string `url:"api-version"`
9191
}
9292
uriPayload := apiVersion{
93-
ApiVersion: "2018-04-01",
93+
APIVersion: "2018-04-01",
9494
}
9595
var responseMap map[string]interface{}
9696
resp, err := service.PerformQuery(config, http.MethodGet, url, "2.0", headers, false, true, uriPayload, nil)

databricks/data_source_databricks_dbfs_file.go

+4-4
Original file line numberDiff line numberDiff line change
@@ -9,22 +9,22 @@ func dataSourceDBFSFile() *schema.Resource {
99
return &schema.Resource{
1010
Read: dataSourceDBFSFileRead,
1111
Schema: map[string]*schema.Schema{
12-
"path": &schema.Schema{
12+
"path": {
1313
Type: schema.TypeString,
1414
Required: true,
1515
ForceNew: true,
1616
},
17-
"limit_file_size": &schema.Schema{
17+
"limit_file_size": {
1818
Type: schema.TypeBool,
1919
Required: true,
2020
ForceNew: true,
2121
},
22-
"content": &schema.Schema{
22+
"content": {
2323
Type: schema.TypeString,
2424
Computed: true,
2525
ForceNew: true,
2626
},
27-
"file_size": &schema.Schema{
27+
"file_size": {
2828
Type: schema.TypeInt,
2929
Computed: true,
3030
},

databricks/data_source_databricks_dbfs_file_paths.go

+3-3
Original file line numberDiff line numberDiff line change
@@ -9,17 +9,17 @@ func dataSourceDBFSFilePaths() *schema.Resource {
99
return &schema.Resource{
1010
Read: dataSourceDBFSFilePathsRead,
1111
Schema: map[string]*schema.Schema{
12-
"path": &schema.Schema{
12+
"path": {
1313
Type: schema.TypeString,
1414
Required: true,
1515
ForceNew: true,
1616
},
17-
"recursive": &schema.Schema{
17+
"recursive": {
1818
Type: schema.TypeBool,
1919
Required: true,
2020
ForceNew: true,
2121
},
22-
"path_list": &schema.Schema{
22+
"path_list": {
2323
Type: schema.TypeSet,
2424
Computed: true,
2525
Elem: &schema.Resource{

databricks/data_source_databricks_default_user_roles.go

+2-2
Original file line numberDiff line numberDiff line change
@@ -25,12 +25,12 @@ func dataSourceDefaultUserRoles() *schema.Resource {
2525
return err
2626
},
2727
Schema: map[string]*schema.Schema{
28-
"default_username": &schema.Schema{
28+
"default_username": {
2929
Type: schema.TypeString,
3030
Required: true,
3131
ForceNew: true,
3232
},
33-
"roles": &schema.Schema{
33+
"roles": {
3434
Type: schema.TypeList,
3535
Computed: true,
3636
Elem: &schema.Schema{Type: schema.TypeString},

databricks/data_source_databricks_notebook.go

+6-6
Original file line numberDiff line numberDiff line change
@@ -12,12 +12,12 @@ func dataSourceNotebook() *schema.Resource {
1212
Read: dataSourceNotebookRead,
1313
Schema: map[string]*schema.Schema{
1414

15-
"path": &schema.Schema{
15+
"path": {
1616
Type: schema.TypeString,
1717
Required: true,
1818
ForceNew: true,
1919
},
20-
"format": &schema.Schema{
20+
"format": {
2121
Type: schema.TypeString,
2222
Required: true,
2323
ForceNew: true,
@@ -27,19 +27,19 @@ func dataSourceNotebook() *schema.Resource {
2727
string(model.HTML),
2828
}, false),
2929
},
30-
"content": &schema.Schema{
30+
"content": {
3131
Type: schema.TypeString,
3232
Computed: true,
3333
},
34-
"language": &schema.Schema{
34+
"language": {
3535
Type: schema.TypeString,
3636
Computed: true,
3737
},
38-
"object_type": &schema.Schema{
38+
"object_type": {
3939
Type: schema.TypeString,
4040
Computed: true,
4141
},
42-
"object_id": &schema.Schema{
42+
"object_id": {
4343
Type: schema.TypeInt,
4444
Computed: true,
4545
},

databricks/data_source_databricks_notebook_paths.go

+3-3
Original file line numberDiff line numberDiff line change
@@ -12,17 +12,17 @@ func dataSourceNotebookPaths() *schema.Resource {
1212
Read: dataSourceNotebookPathsRead,
1313
Schema: map[string]*schema.Schema{
1414

15-
"path": &schema.Schema{
15+
"path": {
1616
Type: schema.TypeString,
1717
Required: true,
1818
ForceNew: true,
1919
},
20-
"recursive": &schema.Schema{
20+
"recursive": {
2121
Type: schema.TypeBool,
2222
Required: true,
2323
ForceNew: true,
2424
},
25-
"notebook_path_list": &schema.Schema{
25+
"notebook_path_list": {
2626
Type: schema.TypeSet,
2727
Computed: true,
2828
Elem: &schema.Resource{

databricks/data_source_databricks_zones.go

+2-2
Original file line numberDiff line numberDiff line change
@@ -24,12 +24,12 @@ func dataSourceClusterZones() *schema.Resource {
2424
return err
2525
},
2626
Schema: map[string]*schema.Schema{
27-
"default_zone": &schema.Schema{
27+
"default_zone": {
2828
Type: schema.TypeString,
2929
Computed: true,
3030
ForceNew: true,
3131
},
32-
"zones": &schema.Schema{
32+
"zones": {
3333
Type: schema.TypeList,
3434
Computed: true,
3535
Elem: &schema.Schema{Type: schema.TypeString},

databricks/mounts_test.go

+12-12
Original file line numberDiff line numberDiff line change
@@ -8,16 +8,16 @@ import (
88

99
func TestValidateMountDirectory(t *testing.T) {
1010
testCases := []struct {
11-
directory string
12-
errorCount int
13-
}{
14-
{"", 0},
15-
{"/directory", 0},
16-
{"directory", 1},
17-
}
18-
for _, tc := range testCases {
19-
_, errs := ValidateMountDirectory(tc.directory, "key")
20-
21-
assert.Lenf(t, errs, tc.errorCount, "directory '%s' does not generate the expected error count", tc.directory)
22-
}
11+
directory string
12+
errorCount int
13+
}{
14+
{"", 0},
15+
{"/directory", 0},
16+
{"directory", 1},
17+
}
18+
for _, tc := range testCases {
19+
_, errs := ValidateMountDirectory(tc.directory, "key")
20+
21+
assert.Lenf(t, errs, tc.errorCount, "directory '%s' does not generate the expected error count", tc.directory)
22+
}
2323
}

databricks/provider.go

+8-8
Original file line numberDiff line numberDiff line change
@@ -50,30 +50,30 @@ func Provider(version string) terraform.ResourceProvider {
5050
"databricks_mws_workspaces": resourceMWSWorkspaces(),
5151
},
5252
Schema: map[string]*schema.Schema{
53-
"host": &schema.Schema{
53+
"host": {
5454
Type: schema.TypeString,
5555
Optional: true,
5656
DefaultFunc: schema.EnvDefaultFunc("DATABRICKS_HOST", nil),
5757
},
58-
"token": &schema.Schema{
58+
"token": {
5959
Type: schema.TypeString,
6060
Optional: true,
6161
Sensitive: true,
6262
DefaultFunc: schema.EnvDefaultFunc("DATABRICKS_TOKEN", nil),
6363
ConflictsWith: []string{"basic_auth"},
6464
},
65-
"basic_auth": &schema.Schema{
65+
"basic_auth": {
6666
Type: schema.TypeList,
6767
Optional: true,
6868
MaxItems: 1,
6969
Elem: &schema.Resource{
7070
Schema: map[string]*schema.Schema{
71-
"username": &schema.Schema{
71+
"username": {
7272
Type: schema.TypeString,
7373
Required: true,
7474
DefaultFunc: schema.EnvDefaultFunc("DATABRICKS_USERNAME", nil),
7575
},
76-
"password": &schema.Schema{
76+
"password": {
7777
Type: schema.TypeString,
7878
Sensitive: true,
7979
Required: true,
@@ -83,7 +83,7 @@ func Provider(version string) terraform.ResourceProvider {
8383
},
8484
ConflictsWith: []string{"token"},
8585
},
86-
"config_file": &schema.Schema{
86+
"config_file": {
8787
Type: schema.TypeString,
8888
Optional: true,
8989
DefaultFunc: schema.EnvDefaultFunc("DATABRICKS_CONFIG_FILE", "~/.databrickscfg"),
@@ -92,14 +92,14 @@ func Provider(version string) terraform.ResourceProvider {
9292
"in ~/.databrickscfg. Check https://docs.databricks.com/dev-tools/cli/index.html#set-up-authentication for docs. Config\n" +
9393
"file credetials will only be used when host/token are not provided.",
9494
},
95-
"profile": &schema.Schema{
95+
"profile": {
9696
Type: schema.TypeString,
9797
Optional: true,
9898
Default: "DEFAULT",
9999
Description: "Connection profile specified within ~/.databrickscfg. Please check\n" +
100100
"https://docs.databricks.com/dev-tools/cli/index.html#connection-profiles for documentation.",
101101
},
102-
"azure_auth": &schema.Schema{
102+
"azure_auth": {
103103
Type: schema.TypeMap,
104104
Optional: true,
105105
Elem: &schema.Resource{

databricks/resource_databricks_aws_s3_mount.go

+1-1
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ func resourceAWSS3Mount() *schema.Resource {
1212
Delete: resourceAWSS3Delete,
1313

1414
Schema: map[string]*schema.Schema{
15-
"cluster_id": &schema.Schema{
15+
"cluster_id": {
1616
Type: schema.TypeString,
1717
Required: true,
1818
ForceNew: true,

databricks/resource_databricks_azure_adls_gen1_mount.go

+2-2
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ func resourceAzureAdlsGen1Mount() *schema.Resource {
1717
Delete: resourceAzureAdlsGen1Delete,
1818

1919
Schema: map[string]*schema.Schema{
20-
"cluster_id": &schema.Schema{
20+
"cluster_id": {
2121
Type: schema.TypeString,
2222
Required: true,
2323
ForceNew: true,
@@ -39,7 +39,7 @@ func resourceAzureAdlsGen1Mount() *schema.Resource {
3939
Optional: true,
4040
Computed: true,
4141
//Default: "/",
42-
ForceNew: true,
42+
ForceNew: true,
4343
ValidateFunc: ValidateMountDirectory,
4444
},
4545
"mount_name": {

databricks/resource_databricks_azure_adls_gen2_mount.go

+5-5
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ func resourceAzureAdlsGen2Mount() *schema.Resource {
1616
Delete: resourceAzureAdlsGen2Delete,
1717

1818
Schema: map[string]*schema.Schema{
19-
"cluster_id": &schema.Schema{
19+
"cluster_id": {
2020
Type: schema.TypeString,
2121
Required: true,
2222
ForceNew: true,
@@ -32,10 +32,10 @@ func resourceAzureAdlsGen2Mount() *schema.Resource {
3232
ForceNew: true,
3333
},
3434
"directory": {
35-
Type: schema.TypeString,
36-
Optional: true,
37-
Computed: true,
38-
ForceNew: true,
35+
Type: schema.TypeString,
36+
Optional: true,
37+
Computed: true,
38+
ForceNew: true,
3939
ValidateFunc: ValidateMountDirectory,
4040
},
4141
"mount_name": {

databricks/resource_databricks_azure_adls_gen2_mount_test.go

+5-5
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ import (
1414
)
1515

1616
func TestAccAzureAdlsGen2Mount_correctly_mounts(t *testing.T) {
17-
terraformToApply := testAccAzureAdlsGen2Mount_correctly_mounts()
17+
terraformToApply := testAccAzureAdlsGen2MountCorrectlyMounts()
1818

1919
resource.Test(t, resource.TestCase{
2020
Providers: testAccProviders,
@@ -27,7 +27,7 @@ func TestAccAzureAdlsGen2Mount_correctly_mounts(t *testing.T) {
2727
}
2828

2929
func TestAccAzureAdlsGen2Mount_cluster_deleted_correctly_mounts(t *testing.T) {
30-
terraformToApply := testAccAzureAdlsGen2Mount_correctly_mounts()
30+
terraformToApply := testAccAzureAdlsGen2MountCorrectlyMounts()
3131
var cluster model.ClusterInfo
3232

3333
resource.Test(t, resource.TestCase{
@@ -50,7 +50,7 @@ func TestAccAzureAdlsGen2Mount_cluster_deleted_correctly_mounts(t *testing.T) {
5050
}
5151

5252
func TestAccAzureAdlsGen2Mount_capture_error(t *testing.T) {
53-
terraformToApply := testAccAzureAdlsGen2Mount_capture_error()
53+
terraformToApply := testAccAzureAdlsGen2MountCaptureError()
5454

5555
resource.Test(t, resource.TestCase{
5656
Providers: testAccProviders,
@@ -65,7 +65,7 @@ func TestAccAzureAdlsGen2Mount_capture_error(t *testing.T) {
6565
})
6666
}
6767

68-
func testAccAzureAdlsGen2Mount_correctly_mounts() string {
68+
func testAccAzureAdlsGen2MountCorrectlyMounts() string {
6969
clientID := os.Getenv("ARM_CLIENT_ID")
7070
clientSecret := os.Getenv("ARM_CLIENT_SECRET")
7171
tenantID := os.Getenv("ARM_TENANT_ID")
@@ -129,7 +129,7 @@ func testAccAzureAdlsGen2Mount_correctly_mounts() string {
129129
return definition
130130
}
131131

132-
func testAccAzureAdlsGen2Mount_capture_error() string {
132+
func testAccAzureAdlsGen2MountCaptureError() string {
133133
clientID := os.Getenv("ARM_CLIENT_ID")
134134
clientSecret := os.Getenv("ARM_CLIENT_SECRET")
135135
tenantID := os.Getenv("ARM_TENANT_ID")

databricks/resource_databricks_azure_blob_mount.go

+2-2
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ func resourceAzureBlobMount() *schema.Resource {
1717
Delete: resourceAzureBlobMountDelete,
1818

1919
Schema: map[string]*schema.Schema{
20-
"cluster_id": &schema.Schema{
20+
"cluster_id": {
2121
Type: schema.TypeString,
2222
Required: true,
2323
ForceNew: true,
@@ -37,7 +37,7 @@ func resourceAzureBlobMount() *schema.Resource {
3737
Optional: true,
3838
Computed: true,
3939
//Default: "/",
40-
ForceNew: true,
40+
ForceNew: true,
4141
ValidateFunc: ValidateMountDirectory,
4242
},
4343
"mount_name": {

0 commit comments

Comments
 (0)