Skip to content

Commit cf2bff5

Browse files
mekenArk-kun
authored andcommitted
removing the echo parameter, this fix only includes seed
1 parent 6e23d68 commit cf2bff5

File tree

2 files changed

+0
-24
lines changed

2 files changed

+0
-24
lines changed

tests/unit/aiplatform/test_language_models.py

-2
Original file line numberDiff line numberDiff line change
@@ -1776,7 +1776,6 @@ def test_text_generation_ga(self):
17761776
frequency_penalty=1.0,
17771777
logit_bias={1: 100.0, 2: -100.0},
17781778
seed=42,
1779-
echo=True,
17801779
)
17811780

17821781
expected_errors = (100,)
@@ -1791,7 +1790,6 @@ def test_text_generation_ga(self):
17911790
assert prediction_parameters["frequencyPenalty"] == 1.0
17921791
assert prediction_parameters["logitBias"] == {1: 100.0, 2: -100.0}
17931792
assert prediction_parameters["seed"] == 42
1794-
assert prediction_parameters["echo"] is True
17951793
assert response.text == _TEST_TEXT_GENERATION_PREDICTION["content"]
17961794
assert response.errors == expected_errors
17971795

vertexai/language_models/_language_models.py

-22
Original file line numberDiff line numberDiff line change
@@ -1290,7 +1290,6 @@ def predict(
12901290
frequency_penalty: Optional[float] = None,
12911291
logit_bias: Optional[Dict[int, float]] = None,
12921292
seed: Optional[int] = None,
1293-
echo: Optional[bool] = None,
12941293
) -> "MultiCandidateTextGenerationResponse":
12951294
"""Gets model response for a single prompt.
12961295
@@ -1329,8 +1328,6 @@ def predict(
13291328
the same output with the same seed. If seed is not set, the seed used in decoder will not be
13301329
deterministic, thus the generated random noise will not be deterministic. If seed is set, the
13311330
generated random noise will be deterministic.
1332-
echo:
1333-
If true, the prompt is echoed in the generated text.
13341331
13351332
Returns:
13361333
A `MultiCandidateTextGenerationResponse` object that contains the text produced by the model.
@@ -1349,7 +1346,6 @@ def predict(
13491346
frequency_penalty=frequency_penalty,
13501347
logit_bias=logit_bias,
13511348
seed=seed,
1352-
echo=echo,
13531349
)
13541350

13551351
prediction_response = self._endpoint.predict(
@@ -1383,7 +1379,6 @@ async def predict_async(
13831379
frequency_penalty: Optional[float] = None,
13841380
logit_bias: Optional[Dict[int, float]] = None,
13851381
seed: Optional[int] = None,
1386-
echo: Optional[bool] = None,
13871382
) -> "MultiCandidateTextGenerationResponse":
13881383
"""Asynchronously gets model response for a single prompt.
13891384
@@ -1422,8 +1417,6 @@ async def predict_async(
14221417
the same output with the same seed. If seed is not set, the seed used in decoder will not be
14231418
deterministic, thus the generated random noise will not be deterministic. If seed is set, the
14241419
generated random noise will be deterministic.
1425-
echo:
1426-
If true, the prompt is echoed in the generated text.
14271420
14281421
Returns:
14291422
A `MultiCandidateTextGenerationResponse` object that contains the text produced by the model.
@@ -1442,7 +1435,6 @@ async def predict_async(
14421435
frequency_penalty=frequency_penalty,
14431436
logit_bias=logit_bias,
14441437
seed=seed,
1445-
echo=echo,
14461438
)
14471439

14481440
prediction_response = await self._endpoint.predict_async(
@@ -1468,7 +1460,6 @@ def predict_streaming(
14681460
frequency_penalty: Optional[float] = None,
14691461
logit_bias: Optional[Dict[int, float]] = None,
14701462
seed: Optional[int] = None,
1471-
echo: Optional[bool] = None,
14721463
) -> Iterator[TextGenerationResponse]:
14731464
"""Gets a streaming model response for a single prompt.
14741465
@@ -1507,8 +1498,6 @@ def predict_streaming(
15071498
the same output with the same seed. If seed is not set, the seed used in decoder will not be
15081499
deterministic, thus the generated random noise will not be deterministic. If seed is set, the
15091500
generated random noise will be deterministic.
1510-
echo:
1511-
If true, the prompt is echoed in the generated text.
15121501
15131502
Yields:
15141503
A stream of `TextGenerationResponse` objects that contain partial
@@ -1526,7 +1515,6 @@ def predict_streaming(
15261515
frequency_penalty=frequency_penalty,
15271516
logit_bias=logit_bias,
15281517
seed=seed,
1529-
echo=echo,
15301518
)
15311519

15321520
prediction_service_client = self._endpoint._prediction_client
@@ -1558,7 +1546,6 @@ async def predict_streaming_async(
15581546
frequency_penalty: Optional[float] = None,
15591547
logit_bias: Optional[Dict[int, float]] = None,
15601548
seed: Optional[int] = None,
1561-
echo: Optional[bool] = None,
15621549
) -> AsyncIterator[TextGenerationResponse]:
15631550
"""Asynchronously gets a streaming model response for a single prompt.
15641551
@@ -1597,8 +1584,6 @@ async def predict_streaming_async(
15971584
the same output with the same seed. If seed is not set, the seed used in decoder will not be
15981585
deterministic, thus the generated random noise will not be deterministic. If seed is set, the
15991586
generated random noise will be deterministic.
1600-
echo:
1601-
If true, the prompt is echoed in the generated text.
16021587
16031588
Yields:
16041589
A stream of `TextGenerationResponse` objects that contain partial
@@ -1616,7 +1601,6 @@ async def predict_streaming_async(
16161601
frequency_penalty=frequency_penalty,
16171602
logit_bias=logit_bias,
16181603
seed=seed,
1619-
echo=echo,
16201604
)
16211605

16221606
prediction_service_async_client = self._endpoint._prediction_async_client
@@ -1654,7 +1638,6 @@ def _create_text_generation_prediction_request(
16541638
frequency_penalty: Optional[float] = None,
16551639
logit_bias: Optional[Dict[int, int]] = None,
16561640
seed: Optional[int] = None,
1657-
echo: Optional[bool] = None,
16581641
) -> "_PredictionRequest":
16591642
"""Prepares the text generation request for a single prompt.
16601643
@@ -1693,8 +1676,6 @@ def _create_text_generation_prediction_request(
16931676
the same output with the same seed. If seed is not set, the seed used in decoder will not be
16941677
deterministic, thus the generated random noise will not be deterministic. If seed is set, the
16951678
generated random noise will be deterministic.
1696-
echo:
1697-
If true, the prompt is echoed in the generated text.
16981679
16991680
Returns:
17001681
A `_PredictionRequest` object that contains prediction instance and parameters.
@@ -1744,9 +1725,6 @@ def _create_text_generation_prediction_request(
17441725
if seed is not None:
17451726
prediction_parameters["seed"] = seed
17461727

1747-
if echo is not None:
1748-
prediction_parameters["echo"] = echo
1749-
17501728
return _PredictionRequest(
17511729
instance=instance,
17521730
parameters=prediction_parameters,

0 commit comments

Comments
 (0)