From e95560822bfca819bdfbc4ab4f10a7ca42010191 Mon Sep 17 00:00:00 2001 From: IvanLH Date: Sat, 29 Mar 2025 06:31:53 +0000 Subject: [PATCH 1/3] Made Google Search enable dependent on Assist availability --- .../config_flow.py | 36 +++- .../test_config_flow.py | 162 +++++++++++++++--- 2 files changed, 168 insertions(+), 30 deletions(-) diff --git a/homeassistant/components/google_generative_ai_conversation/config_flow.py b/homeassistant/components/google_generative_ai_conversation/config_flow.py index b7753c21bf925d..df685ff4639f6c 100644 --- a/homeassistant/components/google_generative_ai_conversation/config_flow.py +++ b/homeassistant/components/google_generative_ai_conversation/config_flow.py @@ -172,6 +172,7 @@ def __init__(self, config_entry: ConfigEntry) -> None: self.last_rendered_recommended = config_entry.options.get( CONF_RECOMMENDED, False ) + self.last_rendered_assist = config_entry.options.get(CONF_LLM_HASS_API, "none") self._genai_client = config_entry.runtime_data async def async_step_init( @@ -181,13 +182,20 @@ async def async_step_init( options: dict[str, Any] | MappingProxyType[str, Any] = self.config_entry.options if user_input is not None: - if user_input[CONF_RECOMMENDED] == self.last_rendered_recommended: + if user_input[CONF_RECOMMENDED] == self.last_rendered_recommended and ( + user_input[CONF_LLM_HASS_API] == self.last_rendered_assist + or user_input[CONF_LLM_HASS_API] != "none" + ): if user_input[CONF_LLM_HASS_API] == "none": user_input.pop(CONF_LLM_HASS_API) + else: + user_input.pop(CONF_USE_GOOGLE_SEARCH_TOOL, None) return self.async_create_entry(title="", data=user_input) # Re-render the options again, now with the recommended options shown/hidden + # also with the google search option shown/hidden self.last_rendered_recommended = user_input[CONF_RECOMMENDED] + self.last_rendered_assist = user_input[CONF_LLM_HASS_API] options = { CONF_RECOMMENDED: user_input[CONF_RECOMMENDED], @@ -297,6 +305,24 @@ async def google_generative_ai_config_option_schema( ): SelectSelector( SelectSelectorConfig(mode=SelectSelectorMode.DROPDOWN, options=models) ), + } + ) + + if options.get(CONF_LLM_HASS_API, "none") == "none": + schema.update( + { + vol.Optional( + CONF_USE_GOOGLE_SEARCH_TOOL, + description={ + "suggested_value": options.get(CONF_USE_GOOGLE_SEARCH_TOOL), + }, + default=RECOMMENDED_USE_GOOGLE_SEARCH_TOOL, + ): bool, + } + ) + + schema.update( + { vol.Optional( CONF_TEMPERATURE, description={"suggested_value": options.get(CONF_TEMPERATURE)}, @@ -343,13 +369,7 @@ async def google_generative_ai_config_option_schema( }, default=RECOMMENDED_HARM_BLOCK_THRESHOLD, ): harm_block_thresholds_selector, - vol.Optional( - CONF_USE_GOOGLE_SEARCH_TOOL, - description={ - "suggested_value": options.get(CONF_USE_GOOGLE_SEARCH_TOOL), - }, - default=RECOMMENDED_USE_GOOGLE_SEARCH_TOOL, - ): bool, } ) + return schema diff --git a/tests/components/google_generative_ai_conversation/test_config_flow.py b/tests/components/google_generative_ai_conversation/test_config_flow.py index f7635c0b45e709..6d6d14f4855020 100644 --- a/tests/components/google_generative_ai_conversation/test_config_flow.py +++ b/tests/components/google_generative_ai_conversation/test_config_flow.py @@ -39,9 +39,8 @@ from tests.common import MockConfigEntry -@pytest.fixture -def mock_models(): - """Mock the model list API.""" +def get_models_pager(): + """Return a generator that yields the models.""" model_20_flash = Mock( display_name="Gemini 2.0 Flash", supported_actions=["generateContent"], @@ -72,11 +71,7 @@ async def models_pager(): yield model_15_pro yield model_10_pro - with patch( - "google.genai.models.AsyncModels.list", - return_value=models_pager(), - ): - yield + return models_pager() async def test_form(hass: HomeAssistant) -> None: @@ -119,6 +114,16 @@ async def test_form(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 1 +def will_options_be_rendered_again(current_options, new_options) -> bool: + """Determine if options will be rendered again.""" + return current_options.get(CONF_RECOMMENDED) != new_options.get( + CONF_RECOMMENDED + ) or ( + current_options.get(CONF_LLM_HASS_API) != new_options.get(CONF_LLM_HASS_API) + and new_options.get(CONF_LLM_HASS_API, "none") == "none" + ) + + @pytest.mark.parametrize( ("current_options", "new_options", "expected_options"), [ @@ -157,6 +162,7 @@ async def test_form(hass: HomeAssistant) -> None: CONF_TOP_P: RECOMMENDED_TOP_P, CONF_TOP_K: RECOMMENDED_TOP_K, CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, + CONF_USE_GOOGLE_SEARCH_TOOL: True, }, { CONF_RECOMMENDED: True, @@ -169,13 +175,102 @@ async def test_form(hass: HomeAssistant) -> None: CONF_PROMPT: "", }, ), + ( + { + CONF_RECOMMENDED: False, + CONF_PROMPT: "Speak like a pirate", + CONF_TEMPERATURE: 0.3, + CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL, + CONF_TOP_P: RECOMMENDED_TOP_P, + CONF_TOP_K: RECOMMENDED_TOP_K, + CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, + CONF_HARASSMENT_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, + CONF_HATE_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, + CONF_SEXUAL_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, + CONF_DANGEROUS_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, + CONF_USE_GOOGLE_SEARCH_TOOL: True, + }, + { + CONF_RECOMMENDED: False, + CONF_PROMPT: "Speak like a pirate", + CONF_LLM_HASS_API: "assist", + CONF_TEMPERATURE: 0.3, + CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL, + CONF_TOP_P: RECOMMENDED_TOP_P, + CONF_TOP_K: RECOMMENDED_TOP_K, + CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, + CONF_HARASSMENT_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, + CONF_HATE_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, + CONF_SEXUAL_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, + CONF_DANGEROUS_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, + CONF_USE_GOOGLE_SEARCH_TOOL: True, + }, + { + CONF_RECOMMENDED: False, + CONF_PROMPT: "Speak like a pirate", + CONF_LLM_HASS_API: "assist", + CONF_TEMPERATURE: 0.3, + CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL, + CONF_TOP_P: RECOMMENDED_TOP_P, + CONF_TOP_K: RECOMMENDED_TOP_K, + CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, + CONF_HARASSMENT_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, + CONF_HATE_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, + CONF_SEXUAL_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, + CONF_DANGEROUS_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, + }, + ), + ( + { + CONF_RECOMMENDED: False, + CONF_PROMPT: "Speak like a pirate", + CONF_LLM_HASS_API: "assist", + CONF_TEMPERATURE: 0.3, + CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL, + CONF_TOP_P: RECOMMENDED_TOP_P, + CONF_TOP_K: RECOMMENDED_TOP_K, + CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, + CONF_HARASSMENT_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, + CONF_HATE_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, + CONF_SEXUAL_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, + CONF_DANGEROUS_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, + }, + { + CONF_RECOMMENDED: False, + CONF_PROMPT: "Speak like a pirate", + CONF_LLM_HASS_API: "none", + CONF_TEMPERATURE: 0.3, + CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL, + CONF_USE_GOOGLE_SEARCH_TOOL: RECOMMENDED_USE_GOOGLE_SEARCH_TOOL, + CONF_TOP_P: RECOMMENDED_TOP_P, + CONF_TOP_K: RECOMMENDED_TOP_K, + CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, + CONF_HARASSMENT_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, + CONF_HATE_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, + CONF_SEXUAL_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, + CONF_DANGEROUS_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, + }, + { + CONF_RECOMMENDED: False, + CONF_PROMPT: "Speak like a pirate", + CONF_TEMPERATURE: 0.3, + CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL, + CONF_TOP_P: RECOMMENDED_TOP_P, + CONF_TOP_K: RECOMMENDED_TOP_K, + CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, + CONF_HARASSMENT_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, + CONF_HATE_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, + CONF_SEXUAL_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, + CONF_DANGEROUS_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, + CONF_USE_GOOGLE_SEARCH_TOOL: RECOMMENDED_USE_GOOGLE_SEARCH_TOOL, + }, + ), ], ) @pytest.mark.usefixtures("mock_init_component") async def test_options_switching( hass: HomeAssistant, mock_config_entry: MockConfigEntry, - mock_models, current_options, new_options, expected_options, @@ -186,21 +281,44 @@ async def test_options_switching( mock_config_entry, options=current_options ) await hass.async_block_till_done() - options_flow = await hass.config_entries.options.async_init( - mock_config_entry.entry_id - ) - if current_options.get(CONF_RECOMMENDED) != new_options.get(CONF_RECOMMENDED): - options_flow = await hass.config_entries.options.async_configure( + with patch( + "google.genai.models.AsyncModels.list", + return_value=get_models_pager(), + ): + options_flow = await hass.config_entries.options.async_init( + mock_config_entry.entry_id + ) + if will_options_be_rendered_again(current_options, new_options): + retry_options = { + **current_options, + CONF_RECOMMENDED: new_options[CONF_RECOMMENDED], + } + + if ( + current_options.get(CONF_LLM_HASS_API) != new_options.get(CONF_LLM_HASS_API) + and new_options.get(CONF_LLM_HASS_API) is not None + ): + retry_options = { + **retry_options, + CONF_LLM_HASS_API: new_options[CONF_LLM_HASS_API], + } + + with patch( + "google.genai.models.AsyncModels.list", + return_value=get_models_pager(), + ): + options_flow = await hass.config_entries.options.async_configure( + options_flow["flow_id"], + retry_options, + ) + with patch( + "google.genai.models.AsyncModels.list", + return_value=get_models_pager(), + ): + options = await hass.config_entries.options.async_configure( options_flow["flow_id"], - { - **current_options, - CONF_RECOMMENDED: new_options[CONF_RECOMMENDED], - }, + new_options, ) - options = await hass.config_entries.options.async_configure( - options_flow["flow_id"], - new_options, - ) await hass.async_block_till_done() assert options["type"] is FlowResultType.CREATE_ENTRY assert options["data"] == expected_options From 4a65f3a41fdd18d97b26d521ea0ebf70078c3022 Mon Sep 17 00:00:00 2001 From: IvanLH Date: Thu, 3 Apr 2025 19:16:53 +0000 Subject: [PATCH 2/3] Show error instead of rendering again --- .../config_flow.py | 54 +++++++------------ .../strings.json | 3 ++ .../test_config_flow.py | 29 ++++++---- 3 files changed, 40 insertions(+), 46 deletions(-) diff --git a/homeassistant/components/google_generative_ai_conversation/config_flow.py b/homeassistant/components/google_generative_ai_conversation/config_flow.py index df685ff4639f6c..ac6cb696a7dfe6 100644 --- a/homeassistant/components/google_generative_ai_conversation/config_flow.py +++ b/homeassistant/components/google_generative_ai_conversation/config_flow.py @@ -172,7 +172,6 @@ def __init__(self, config_entry: ConfigEntry) -> None: self.last_rendered_recommended = config_entry.options.get( CONF_RECOMMENDED, False ) - self.last_rendered_assist = config_entry.options.get(CONF_LLM_HASS_API, "none") self._genai_client = config_entry.runtime_data async def async_step_init( @@ -180,35 +179,30 @@ async def async_step_init( ) -> ConfigFlowResult: """Manage the options.""" options: dict[str, Any] | MappingProxyType[str, Any] = self.config_entry.options + errors: dict[str, str] = {} if user_input is not None: - if user_input[CONF_RECOMMENDED] == self.last_rendered_recommended and ( - user_input[CONF_LLM_HASS_API] == self.last_rendered_assist - or user_input[CONF_LLM_HASS_API] != "none" - ): + if user_input[CONF_RECOMMENDED] == self.last_rendered_recommended: if user_input[CONF_LLM_HASS_API] == "none": user_input.pop(CONF_LLM_HASS_API) - else: - user_input.pop(CONF_USE_GOOGLE_SEARCH_TOOL, None) - return self.async_create_entry(title="", data=user_input) + if not ( + user_input.get(CONF_LLM_HASS_API, "none") != "none" + and user_input.get(CONF_USE_GOOGLE_SEARCH_TOOL, False) is True + ): + # Don't allow to save options that enable the Google Seearch tool with an Assist API + return self.async_create_entry(title="", data=user_input) + errors[CONF_USE_GOOGLE_SEARCH_TOOL] = "invalid_google_search_option" # Re-render the options again, now with the recommended options shown/hidden - # also with the google search option shown/hidden self.last_rendered_recommended = user_input[CONF_RECOMMENDED] - self.last_rendered_assist = user_input[CONF_LLM_HASS_API] - options = { - CONF_RECOMMENDED: user_input[CONF_RECOMMENDED], - CONF_PROMPT: user_input[CONF_PROMPT], - CONF_LLM_HASS_API: user_input[CONF_LLM_HASS_API], - } + options = user_input schema = await google_generative_ai_config_option_schema( self.hass, options, self._genai_client ) return self.async_show_form( - step_id="init", - data_schema=vol.Schema(schema), + step_id="init", data_schema=vol.Schema(schema), errors=errors ) @@ -305,24 +299,6 @@ async def google_generative_ai_config_option_schema( ): SelectSelector( SelectSelectorConfig(mode=SelectSelectorMode.DROPDOWN, options=models) ), - } - ) - - if options.get(CONF_LLM_HASS_API, "none") == "none": - schema.update( - { - vol.Optional( - CONF_USE_GOOGLE_SEARCH_TOOL, - description={ - "suggested_value": options.get(CONF_USE_GOOGLE_SEARCH_TOOL), - }, - default=RECOMMENDED_USE_GOOGLE_SEARCH_TOOL, - ): bool, - } - ) - - schema.update( - { vol.Optional( CONF_TEMPERATURE, description={"suggested_value": options.get(CONF_TEMPERATURE)}, @@ -369,7 +345,13 @@ async def google_generative_ai_config_option_schema( }, default=RECOMMENDED_HARM_BLOCK_THRESHOLD, ): harm_block_thresholds_selector, + vol.Optional( + CONF_USE_GOOGLE_SEARCH_TOOL, + description={ + "suggested_value": options.get(CONF_USE_GOOGLE_SEARCH_TOOL), + }, + default=RECOMMENDED_USE_GOOGLE_SEARCH_TOOL, + ): bool, } ) - return schema diff --git a/homeassistant/components/google_generative_ai_conversation/strings.json b/homeassistant/components/google_generative_ai_conversation/strings.json index b814f89469a908..cd7af711d3a3e0 100644 --- a/homeassistant/components/google_generative_ai_conversation/strings.json +++ b/homeassistant/components/google_generative_ai_conversation/strings.json @@ -43,6 +43,9 @@ "prompt": "Instruct how the LLM should respond. This can be a template." } } + }, + "error": { + "invalid_google_search_option": "Google Search cannot be enabled alongside any Assist capability, this can only be used when Assist is set to \"No control\"." } }, "services": { diff --git a/tests/components/google_generative_ai_conversation/test_config_flow.py b/tests/components/google_generative_ai_conversation/test_config_flow.py index 6d6d14f4855020..062ce27f70f08c 100644 --- a/tests/components/google_generative_ai_conversation/test_config_flow.py +++ b/tests/components/google_generative_ai_conversation/test_config_flow.py @@ -125,7 +125,7 @@ def will_options_be_rendered_again(current_options, new_options) -> bool: @pytest.mark.parametrize( - ("current_options", "new_options", "expected_options"), + ("current_options", "new_options", "expected_options", "errors"), [ ( { @@ -152,6 +152,7 @@ def will_options_be_rendered_again(current_options, new_options) -> bool: CONF_DANGEROUS_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, CONF_USE_GOOGLE_SEARCH_TOOL: RECOMMENDED_USE_GOOGLE_SEARCH_TOOL, }, + None, ), ( { @@ -174,6 +175,7 @@ def will_options_be_rendered_again(current_options, new_options) -> bool: CONF_LLM_HASS_API: "assist", CONF_PROMPT: "", }, + None, ), ( { @@ -188,12 +190,11 @@ def will_options_be_rendered_again(current_options, new_options) -> bool: CONF_HATE_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, CONF_SEXUAL_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, CONF_DANGEROUS_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, - CONF_USE_GOOGLE_SEARCH_TOOL: True, + CONF_USE_GOOGLE_SEARCH_TOOL: RECOMMENDED_USE_GOOGLE_SEARCH_TOOL, }, { CONF_RECOMMENDED: False, CONF_PROMPT: "Speak like a pirate", - CONF_LLM_HASS_API: "assist", CONF_TEMPERATURE: 0.3, CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL, CONF_TOP_P: RECOMMENDED_TOP_P, @@ -208,7 +209,6 @@ def will_options_be_rendered_again(current_options, new_options) -> bool: { CONF_RECOMMENDED: False, CONF_PROMPT: "Speak like a pirate", - CONF_LLM_HASS_API: "assist", CONF_TEMPERATURE: 0.3, CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL, CONF_TOP_P: RECOMMENDED_TOP_P, @@ -218,13 +218,14 @@ def will_options_be_rendered_again(current_options, new_options) -> bool: CONF_HATE_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, CONF_SEXUAL_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, CONF_DANGEROUS_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, + CONF_USE_GOOGLE_SEARCH_TOOL: True, }, + None, ), ( { CONF_RECOMMENDED: False, CONF_PROMPT: "Speak like a pirate", - CONF_LLM_HASS_API: "assist", CONF_TEMPERATURE: 0.3, CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL, CONF_TOP_P: RECOMMENDED_TOP_P, @@ -234,14 +235,14 @@ def will_options_be_rendered_again(current_options, new_options) -> bool: CONF_HATE_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, CONF_SEXUAL_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, CONF_DANGEROUS_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, + CONF_USE_GOOGLE_SEARCH_TOOL: True, }, { CONF_RECOMMENDED: False, CONF_PROMPT: "Speak like a pirate", - CONF_LLM_HASS_API: "none", + CONF_LLM_HASS_API: "assist", CONF_TEMPERATURE: 0.3, CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL, - CONF_USE_GOOGLE_SEARCH_TOOL: RECOMMENDED_USE_GOOGLE_SEARCH_TOOL, CONF_TOP_P: RECOMMENDED_TOP_P, CONF_TOP_K: RECOMMENDED_TOP_K, CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, @@ -249,6 +250,7 @@ def will_options_be_rendered_again(current_options, new_options) -> bool: CONF_HATE_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, CONF_SEXUAL_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, CONF_DANGEROUS_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, + CONF_USE_GOOGLE_SEARCH_TOOL: True, }, { CONF_RECOMMENDED: False, @@ -262,8 +264,9 @@ def will_options_be_rendered_again(current_options, new_options) -> bool: CONF_HATE_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, CONF_SEXUAL_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, CONF_DANGEROUS_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, - CONF_USE_GOOGLE_SEARCH_TOOL: RECOMMENDED_USE_GOOGLE_SEARCH_TOOL, + CONF_USE_GOOGLE_SEARCH_TOOL: True, }, + {CONF_USE_GOOGLE_SEARCH_TOOL: "invalid_google_search_option"}, ), ], ) @@ -274,6 +277,7 @@ async def test_options_switching( current_options, new_options, expected_options, + errors, ) -> None: """Test the options form.""" with patch("google.genai.models.AsyncModels.get"): @@ -320,8 +324,13 @@ async def test_options_switching( new_options, ) await hass.async_block_till_done() - assert options["type"] is FlowResultType.CREATE_ENTRY - assert options["data"] == expected_options + if errors is None: + assert options["type"] is FlowResultType.CREATE_ENTRY + assert options["data"] == expected_options + + else: + assert options["type"] is FlowResultType.FORM + assert options.get("errors", None) == errors @pytest.mark.parametrize( From fc9e537260bdff9faf23e62e7bc8bc5e011b7f71 Mon Sep 17 00:00:00 2001 From: IvanLH Date: Thu, 3 Apr 2025 19:18:55 +0000 Subject: [PATCH 3/3] Cleanup test code --- .../test_config_flow.py | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/tests/components/google_generative_ai_conversation/test_config_flow.py b/tests/components/google_generative_ai_conversation/test_config_flow.py index 062ce27f70f08c..8fda02b335d0d9 100644 --- a/tests/components/google_generative_ai_conversation/test_config_flow.py +++ b/tests/components/google_generative_ai_conversation/test_config_flow.py @@ -116,12 +116,7 @@ async def test_form(hass: HomeAssistant) -> None: def will_options_be_rendered_again(current_options, new_options) -> bool: """Determine if options will be rendered again.""" - return current_options.get(CONF_RECOMMENDED) != new_options.get( - CONF_RECOMMENDED - ) or ( - current_options.get(CONF_LLM_HASS_API) != new_options.get(CONF_LLM_HASS_API) - and new_options.get(CONF_LLM_HASS_API, "none") == "none" - ) + return current_options.get(CONF_RECOMMENDED) != new_options.get(CONF_RECOMMENDED) @pytest.mark.parametrize( @@ -297,16 +292,6 @@ async def test_options_switching( **current_options, CONF_RECOMMENDED: new_options[CONF_RECOMMENDED], } - - if ( - current_options.get(CONF_LLM_HASS_API) != new_options.get(CONF_LLM_HASS_API) - and new_options.get(CONF_LLM_HASS_API) is not None - ): - retry_options = { - **retry_options, - CONF_LLM_HASS_API: new_options[CONF_LLM_HASS_API], - } - with patch( "google.genai.models.AsyncModels.list", return_value=get_models_pager(),