Skip to content

Made Google Search enable dependent on Assist availability #141712

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Apr 3, 2025
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -172,6 +172,7 @@ def __init__(self, config_entry: ConfigEntry) -> None:
self.last_rendered_recommended = config_entry.options.get(
CONF_RECOMMENDED, False
)
self.last_rendered_assist = config_entry.options.get(CONF_LLM_HASS_API, "none")
self._genai_client = config_entry.runtime_data

async def async_step_init(
Expand All @@ -181,13 +182,20 @@ async def async_step_init(
options: dict[str, Any] | MappingProxyType[str, Any] = self.config_entry.options

if user_input is not None:
if user_input[CONF_RECOMMENDED] == self.last_rendered_recommended:
if user_input[CONF_RECOMMENDED] == self.last_rendered_recommended and (
user_input[CONF_LLM_HASS_API] == self.last_rendered_assist
or user_input[CONF_LLM_HASS_API] != "none"
):
if user_input[CONF_LLM_HASS_API] == "none":
user_input.pop(CONF_LLM_HASS_API)
else:
user_input.pop(CONF_USE_GOOGLE_SEARCH_TOOL, None)
return self.async_create_entry(title="", data=user_input)

# Re-render the options again, now with the recommended options shown/hidden
# also with the google search option shown/hidden
self.last_rendered_recommended = user_input[CONF_RECOMMENDED]
self.last_rendered_assist = user_input[CONF_LLM_HASS_API]

options = {
CONF_RECOMMENDED: user_input[CONF_RECOMMENDED],
Expand Down Expand Up @@ -297,6 +305,24 @@ async def google_generative_ai_config_option_schema(
): SelectSelector(
SelectSelectorConfig(mode=SelectSelectorMode.DROPDOWN, options=models)
),
}
)

if options.get(CONF_LLM_HASS_API, "none") == "none":
schema.update(
{
vol.Optional(
CONF_USE_GOOGLE_SEARCH_TOOL,
description={
"suggested_value": options.get(CONF_USE_GOOGLE_SEARCH_TOOL),
},
default=RECOMMENDED_USE_GOOGLE_SEARCH_TOOL,
): bool,
}
)

schema.update(
{
vol.Optional(
CONF_TEMPERATURE,
description={"suggested_value": options.get(CONF_TEMPERATURE)},
Expand Down Expand Up @@ -343,13 +369,7 @@ async def google_generative_ai_config_option_schema(
},
default=RECOMMENDED_HARM_BLOCK_THRESHOLD,
): harm_block_thresholds_selector,
vol.Optional(
CONF_USE_GOOGLE_SEARCH_TOOL,
description={
"suggested_value": options.get(CONF_USE_GOOGLE_SEARCH_TOOL),
},
default=RECOMMENDED_USE_GOOGLE_SEARCH_TOOL,
): bool,
}
)

return schema
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,8 @@
from tests.common import MockConfigEntry


@pytest.fixture
def mock_models():
"""Mock the model list API."""
def get_models_pager():
"""Return a generator that yields the models."""
model_20_flash = Mock(
display_name="Gemini 2.0 Flash",
supported_actions=["generateContent"],
Expand Down Expand Up @@ -72,11 +71,7 @@ async def models_pager():
yield model_15_pro
yield model_10_pro

with patch(
"google.genai.models.AsyncModels.list",
return_value=models_pager(),
):
yield
return models_pager()


async def test_form(hass: HomeAssistant) -> None:
Expand Down Expand Up @@ -119,6 +114,16 @@ async def test_form(hass: HomeAssistant) -> None:
assert len(mock_setup_entry.mock_calls) == 1


def will_options_be_rendered_again(current_options, new_options) -> bool:
"""Determine if options will be rendered again."""
return current_options.get(CONF_RECOMMENDED) != new_options.get(
CONF_RECOMMENDED
) or (
current_options.get(CONF_LLM_HASS_API) != new_options.get(CONF_LLM_HASS_API)
and new_options.get(CONF_LLM_HASS_API, "none") == "none"
)


@pytest.mark.parametrize(
("current_options", "new_options", "expected_options"),
[
Expand Down Expand Up @@ -157,6 +162,7 @@ async def test_form(hass: HomeAssistant) -> None:
CONF_TOP_P: RECOMMENDED_TOP_P,
CONF_TOP_K: RECOMMENDED_TOP_K,
CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS,
CONF_USE_GOOGLE_SEARCH_TOOL: True,
},
{
CONF_RECOMMENDED: True,
Expand All @@ -169,13 +175,102 @@ async def test_form(hass: HomeAssistant) -> None:
CONF_PROMPT: "",
},
),
(
{
CONF_RECOMMENDED: False,
CONF_PROMPT: "Speak like a pirate",
CONF_TEMPERATURE: 0.3,
CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL,
CONF_TOP_P: RECOMMENDED_TOP_P,
CONF_TOP_K: RECOMMENDED_TOP_K,
CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS,
CONF_HARASSMENT_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD,
CONF_HATE_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD,
CONF_SEXUAL_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD,
CONF_DANGEROUS_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD,
CONF_USE_GOOGLE_SEARCH_TOOL: True,
},
{
CONF_RECOMMENDED: False,
CONF_PROMPT: "Speak like a pirate",
CONF_LLM_HASS_API: "assist",
CONF_TEMPERATURE: 0.3,
CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL,
CONF_TOP_P: RECOMMENDED_TOP_P,
CONF_TOP_K: RECOMMENDED_TOP_K,
CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS,
CONF_HARASSMENT_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD,
CONF_HATE_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD,
CONF_SEXUAL_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD,
CONF_DANGEROUS_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD,
CONF_USE_GOOGLE_SEARCH_TOOL: True,
},
{
CONF_RECOMMENDED: False,
CONF_PROMPT: "Speak like a pirate",
CONF_LLM_HASS_API: "assist",
CONF_TEMPERATURE: 0.3,
CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL,
CONF_TOP_P: RECOMMENDED_TOP_P,
CONF_TOP_K: RECOMMENDED_TOP_K,
CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS,
CONF_HARASSMENT_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD,
CONF_HATE_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD,
CONF_SEXUAL_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD,
CONF_DANGEROUS_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD,
},
),
(
{
CONF_RECOMMENDED: False,
CONF_PROMPT: "Speak like a pirate",
CONF_LLM_HASS_API: "assist",
CONF_TEMPERATURE: 0.3,
CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL,
CONF_TOP_P: RECOMMENDED_TOP_P,
CONF_TOP_K: RECOMMENDED_TOP_K,
CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS,
CONF_HARASSMENT_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD,
CONF_HATE_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD,
CONF_SEXUAL_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD,
CONF_DANGEROUS_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD,
},
{
CONF_RECOMMENDED: False,
CONF_PROMPT: "Speak like a pirate",
CONF_LLM_HASS_API: "none",
CONF_TEMPERATURE: 0.3,
CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL,
CONF_USE_GOOGLE_SEARCH_TOOL: RECOMMENDED_USE_GOOGLE_SEARCH_TOOL,
CONF_TOP_P: RECOMMENDED_TOP_P,
CONF_TOP_K: RECOMMENDED_TOP_K,
CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS,
CONF_HARASSMENT_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD,
CONF_HATE_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD,
CONF_SEXUAL_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD,
CONF_DANGEROUS_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD,
},
{
CONF_RECOMMENDED: False,
CONF_PROMPT: "Speak like a pirate",
CONF_TEMPERATURE: 0.3,
CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL,
CONF_TOP_P: RECOMMENDED_TOP_P,
CONF_TOP_K: RECOMMENDED_TOP_K,
CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS,
CONF_HARASSMENT_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD,
CONF_HATE_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD,
CONF_SEXUAL_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD,
CONF_DANGEROUS_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD,
CONF_USE_GOOGLE_SEARCH_TOOL: RECOMMENDED_USE_GOOGLE_SEARCH_TOOL,
},
),
],
)
@pytest.mark.usefixtures("mock_init_component")
async def test_options_switching(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_models,
current_options,
new_options,
expected_options,
Expand All @@ -186,21 +281,44 @@ async def test_options_switching(
mock_config_entry, options=current_options
)
await hass.async_block_till_done()
options_flow = await hass.config_entries.options.async_init(
mock_config_entry.entry_id
)
if current_options.get(CONF_RECOMMENDED) != new_options.get(CONF_RECOMMENDED):
options_flow = await hass.config_entries.options.async_configure(
with patch(
"google.genai.models.AsyncModels.list",
return_value=get_models_pager(),
):
options_flow = await hass.config_entries.options.async_init(
mock_config_entry.entry_id
)
if will_options_be_rendered_again(current_options, new_options):
retry_options = {
**current_options,
CONF_RECOMMENDED: new_options[CONF_RECOMMENDED],
}

if (
current_options.get(CONF_LLM_HASS_API) != new_options.get(CONF_LLM_HASS_API)
and new_options.get(CONF_LLM_HASS_API) is not None
):
retry_options = {
**retry_options,
CONF_LLM_HASS_API: new_options[CONF_LLM_HASS_API],
}

with patch(
"google.genai.models.AsyncModels.list",
return_value=get_models_pager(),
):
options_flow = await hass.config_entries.options.async_configure(
options_flow["flow_id"],
retry_options,
)
with patch(
"google.genai.models.AsyncModels.list",
return_value=get_models_pager(),
):
options = await hass.config_entries.options.async_configure(
options_flow["flow_id"],
{
**current_options,
CONF_RECOMMENDED: new_options[CONF_RECOMMENDED],
},
new_options,
)
options = await hass.config_entries.options.async_configure(
options_flow["flow_id"],
new_options,
)
await hass.async_block_till_done()
assert options["type"] is FlowResultType.CREATE_ENTRY
assert options["data"] == expected_options
Expand Down