Skip to content

Commit 6566529

Browse files
Ark-kuncopybara-github
authored andcommitted
feat: LLM - Improved the handling of temperature and top_p in streaming
The `temperature` and `top_p` parameters have `float` type. However some people might pass integer values to these parameters. The streaming API is sensitive to the `int` vs. `float` parameter type and will throw an error. This CL mitigates that by converting integer values to floats. PiperOrigin-RevId: 562738676
1 parent 50c1591 commit 6566529

File tree

1 file changed

+10
-0
lines changed

1 file changed

+10
-0
lines changed

vertexai/language_models/_language_models.py

+10
Original file line numberDiff line numberDiff line change
@@ -759,9 +759,13 @@ def predict_streaming(
759759
prediction_parameters["maxDecodeSteps"] = max_output_tokens
760760

761761
if temperature is not None:
762+
if isinstance(temperature, int):
763+
temperature = float(temperature)
762764
prediction_parameters["temperature"] = temperature
763765

764766
if top_p:
767+
if isinstance(top_p, int):
768+
top_p = float(top_p)
765769
prediction_parameters["topP"] = top_p
766770

767771
if top_k:
@@ -1389,10 +1393,14 @@ def _prepare_request(
13891393
if temperature is None:
13901394
temperature = self._temperature
13911395
if temperature is not None:
1396+
if isinstance(temperature, int):
1397+
temperature = float(temperature)
13921398
prediction_parameters["temperature"] = temperature
13931399

13941400
top_p = top_p or self._top_p
13951401
if top_p:
1402+
if isinstance(top_p, int):
1403+
top_p = float(top_p)
13961404
prediction_parameters["topP"] = top_p
13971405

13981406
top_k = top_k or self._top_k
@@ -1749,6 +1757,8 @@ def _create_prediction_request(
17491757
prediction_parameters = {}
17501758

17511759
if temperature is not None:
1760+
if isinstance(temperature, int):
1761+
temperature = float(temperature)
17521762
prediction_parameters["temperature"] = temperature
17531763

17541764
if max_output_tokens:

0 commit comments

Comments
 (0)