Skip to content

Commit 326a90d

Browse files
tombuildsstuffhc-github-team-tf-azure
authored andcommitted
d/hdinsight_cluster: fixing an issue where the tier and kind were lower-cased due to an issue in the API Spec
The API Specification defines `tier` as a Constant which can be recased - however the `kind` is defined as a string, thus can't be. I've opened hashicorp/pandora#3404 (to workaround it) and Azure/azure-rest-api-specs#26838 (to fix it) - once those are threaded through both of these should be normalized.
1 parent 1eb7b44 commit 326a90d

File tree

2 files changed

+13
-18
lines changed

2 files changed

+13
-18
lines changed

internal/services/hdinsight/hdinsight_cluster_data_source.go

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@ package hdinsight
55

66
import (
77
"fmt"
8-
"strings"
98
"time"
109

1110
"github.com/hashicorp/go-azure-helpers/lang/pointer"
@@ -157,11 +156,7 @@ func dataSourceHDInsightClusterRead(d *pluginsdk.ResourceData, meta interface{})
157156
d.Set("tls_min_version", props.MinSupportedTlsVersion)
158157

159158
d.Set("component_versions", flattenHDInsightsDataSourceComponentVersions(props.ClusterDefinition.ComponentVersion))
160-
kind := ""
161-
if props.ClusterDefinition.Kind != nil {
162-
kind = strings.ToLower(*props.ClusterDefinition.Kind) // TODO: investigate OOB why this is ToLowered, missing Constants?
163-
}
164-
d.Set("kind", kind)
159+
d.Set("kind", pointer.From(props.ClusterDefinition.Kind))
165160
if err := d.Set("gateway", FlattenHDInsightsConfigurations(configuration, d)); err != nil {
166161
return fmt.Errorf("flattening `gateway`: %+v", err)
167162
}

internal/services/hdinsight/hdinsight_cluster_data_source_test.go

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,8 @@ func TestAccDataSourceHDInsightCluster_hadoop(t *testing.T) {
2020
{
2121
Config: r.hadoop(data),
2222
Check: acceptance.ComposeTestCheckFunc(
23-
check.That(data.ResourceName).Key("kind").HasValue("hadoop"),
24-
check.That(data.ResourceName).Key("tier").HasValue("standard"),
23+
check.That(data.ResourceName).Key("kind").HasValue("HADOOP"),
24+
check.That(data.ResourceName).Key("tier").HasValue("Standard"),
2525
check.That(data.ResourceName).Key("edge_ssh_endpoint").HasValue(""),
2626
check.That(data.ResourceName).Key("https_endpoint").Exists(),
2727
check.That(data.ResourceName).Key("ssh_endpoint").Exists(),
@@ -37,8 +37,8 @@ func TestAccDataSourceHDInsightCluster_hbase(t *testing.T) {
3737
{
3838
Config: r.hbase(data),
3939
Check: acceptance.ComposeTestCheckFunc(
40-
check.That(data.ResourceName).Key("kind").HasValue("hbase"),
41-
check.That(data.ResourceName).Key("tier").HasValue("standard"),
40+
check.That(data.ResourceName).Key("kind").HasValue("HBASE"),
41+
check.That(data.ResourceName).Key("tier").HasValue("Standard"),
4242
check.That(data.ResourceName).Key("edge_ssh_endpoint").HasValue(""),
4343
check.That(data.ResourceName).Key("https_endpoint").Exists(),
4444
check.That(data.ResourceName).Key("ssh_endpoint").Exists(),
@@ -54,8 +54,8 @@ func TestAccDataSourceHDInsightCluster_interactiveQuery(t *testing.T) {
5454
{
5555
Config: r.interactiveQuery(data),
5656
Check: acceptance.ComposeTestCheckFunc(
57-
check.That(data.ResourceName).Key("kind").HasValue("interactivehive"),
58-
check.That(data.ResourceName).Key("tier").HasValue("standard"),
57+
check.That(data.ResourceName).Key("kind").HasValue("INTERACTIVEHIVE"),
58+
check.That(data.ResourceName).Key("tier").HasValue("Standard"),
5959
check.That(data.ResourceName).Key("edge_ssh_endpoint").HasValue(""),
6060
check.That(data.ResourceName).Key("https_endpoint").Exists(),
6161
check.That(data.ResourceName).Key("ssh_endpoint").Exists(),
@@ -71,8 +71,8 @@ func TestAccDataSourceHDInsightCluster_kafka(t *testing.T) {
7171
{
7272
Config: r.kafka(data),
7373
Check: acceptance.ComposeTestCheckFunc(
74-
check.That(data.ResourceName).Key("kind").HasValue("kafka"),
75-
check.That(data.ResourceName).Key("tier").HasValue("standard"),
74+
check.That(data.ResourceName).Key("kind").HasValue("KAFKA"),
75+
check.That(data.ResourceName).Key("tier").HasValue("Standard"),
7676
check.That(data.ResourceName).Key("edge_ssh_endpoint").HasValue(""),
7777
check.That(data.ResourceName).Key("https_endpoint").Exists(),
7878
check.That(data.ResourceName).Key("ssh_endpoint").Exists(),
@@ -88,8 +88,8 @@ func TestAccDataSourceHDInsightCluster_kafkaWithRestProxy(t *testing.T) {
8888
{
8989
Config: r.kafkaWithRestProxy(data),
9090
Check: acceptance.ComposeTestCheckFunc(
91-
check.That(data.ResourceName).Key("kind").HasValue("kafka"),
92-
check.That(data.ResourceName).Key("tier").HasValue("standard"),
91+
check.That(data.ResourceName).Key("kind").HasValue("KAFKA"),
92+
check.That(data.ResourceName).Key("tier").HasValue("Standard"),
9393
check.That(data.ResourceName).Key("edge_ssh_endpoint").HasValue(""),
9494
check.That(data.ResourceName).Key("https_endpoint").Exists(),
9595
check.That(data.ResourceName).Key("ssh_endpoint").Exists(),
@@ -106,8 +106,8 @@ func TestAccDataSourceHDInsightCluster_spark(t *testing.T) {
106106
{
107107
Config: r.spark(data),
108108
Check: acceptance.ComposeTestCheckFunc(
109-
check.That(data.ResourceName).Key("kind").HasValue("spark"),
110-
check.That(data.ResourceName).Key("tier").HasValue("standard"),
109+
check.That(data.ResourceName).Key("kind").HasValue("SPARK"),
110+
check.That(data.ResourceName).Key("tier").HasValue("Standard"),
111111
check.That(data.ResourceName).Key("edge_ssh_endpoint").HasValue(""),
112112
check.That(data.ResourceName).Key("https_endpoint").Exists(),
113113
check.That(data.ResourceName).Key("ssh_endpoint").Exists(),

0 commit comments

Comments
 (0)