@@ -1354,7 +1354,7 @@ def predict(
1354
1354
logprobs : Optional [int ] = None ,
1355
1355
presence_penalty : Optional [float ] = None ,
1356
1356
frequency_penalty : Optional [float ] = None ,
1357
- logit_bias : Optional [Dict [int , float ]] = None ,
1357
+ logit_bias : Optional [Dict [str , float ]] = None ,
1358
1358
seed : Optional [int ] = None ,
1359
1359
) -> "MultiCandidateTextGenerationResponse" :
1360
1360
"""Gets model response for a single prompt.
@@ -1443,7 +1443,7 @@ async def predict_async(
1443
1443
logprobs : Optional [int ] = None ,
1444
1444
presence_penalty : Optional [float ] = None ,
1445
1445
frequency_penalty : Optional [float ] = None ,
1446
- logit_bias : Optional [Dict [int , float ]] = None ,
1446
+ logit_bias : Optional [Dict [str , float ]] = None ,
1447
1447
seed : Optional [int ] = None ,
1448
1448
) -> "MultiCandidateTextGenerationResponse" :
1449
1449
"""Asynchronously gets model response for a single prompt.
@@ -1524,7 +1524,7 @@ def predict_streaming(
1524
1524
logprobs : Optional [int ] = None ,
1525
1525
presence_penalty : Optional [float ] = None ,
1526
1526
frequency_penalty : Optional [float ] = None ,
1527
- logit_bias : Optional [Dict [int , float ]] = None ,
1527
+ logit_bias : Optional [Dict [str , float ]] = None ,
1528
1528
seed : Optional [int ] = None ,
1529
1529
) -> Iterator [TextGenerationResponse ]:
1530
1530
"""Gets a streaming model response for a single prompt.
@@ -1610,7 +1610,7 @@ async def predict_streaming_async(
1610
1610
logprobs : Optional [int ] = None ,
1611
1611
presence_penalty : Optional [float ] = None ,
1612
1612
frequency_penalty : Optional [float ] = None ,
1613
- logit_bias : Optional [Dict [int , float ]] = None ,
1613
+ logit_bias : Optional [Dict [str , float ]] = None ,
1614
1614
seed : Optional [int ] = None ,
1615
1615
) -> AsyncIterator [TextGenerationResponse ]:
1616
1616
"""Asynchronously gets a streaming model response for a single prompt.
@@ -1702,7 +1702,7 @@ def _create_text_generation_prediction_request(
1702
1702
logprobs : Optional [int ] = None ,
1703
1703
presence_penalty : Optional [float ] = None ,
1704
1704
frequency_penalty : Optional [float ] = None ,
1705
- logit_bias : Optional [Dict [int , int ]] = None ,
1705
+ logit_bias : Optional [Dict [str , float ]] = None ,
1706
1706
seed : Optional [int ] = None ,
1707
1707
) -> "_PredictionRequest" :
1708
1708
"""Prepares the text generation request for a single prompt.
0 commit comments