Skip to content

Commit a75e81c

Browse files
vertex-sdk-botcopybara-github
authored andcommitted
feat: enable inline context in grounding to TextGenerationModel predict.
PiperOrigin-RevId: 597296033
1 parent ad8d9c1 commit a75e81c

File tree

1 file changed

+52
-6
lines changed

1 file changed

+52
-6
lines changed

vertexai/language_models/_language_models.py

+52-6
Original file line numberDiff line numberDiff line change
@@ -749,6 +749,27 @@ def _to_grounding_source_dict(self) -> Dict[str, Any]:
749749
}
750750

751751

752+
@dataclasses.dataclass
753+
class InlineContext(_GroundingSourceBase):
754+
"""InlineContext represents a grounding source using provided inline context.
755+
Attributes:
756+
inline_context: The content used as inline context.
757+
"""
758+
759+
inline_context: str
760+
_type: str = dataclasses.field(default="INLINE", init=False, repr=False)
761+
762+
def _to_grounding_source_dict(self) -> Dict[str, Any]:
763+
return {
764+
"sources": [
765+
{
766+
"type": self._type,
767+
}
768+
],
769+
"inlineContext": self.inline_context,
770+
}
771+
772+
752773
@dataclasses.dataclass
753774
class VertexAISearch(_GroundingSourceBase):
754775
"""VertexAISearchDatastore represents a grounding source using Vertex AI Search datastore
@@ -792,6 +813,7 @@ class GroundingSource:
792813

793814
WebSearch = WebSearch
794815
VertexAISearch = VertexAISearch
816+
InlineContext = InlineContext
795817

796818

797819
@dataclasses.dataclass
@@ -976,7 +998,11 @@ def predict(
976998
stop_sequences: Optional[List[str]] = None,
977999
candidate_count: Optional[int] = None,
9781000
grounding_source: Optional[
979-
Union[GroundingSource.WebSearch, GroundingSource.VertexAISearch]
1001+
Union[
1002+
GroundingSource.WebSearch,
1003+
GroundingSource.VertexAISearch,
1004+
GroundingSource.InlineContext,
1005+
]
9801006
] = None,
9811007
logprobs: Optional[int] = None,
9821008
presence_penalty: Optional[float] = None,
@@ -1053,7 +1079,11 @@ async def predict_async(
10531079
stop_sequences: Optional[List[str]] = None,
10541080
candidate_count: Optional[int] = None,
10551081
grounding_source: Optional[
1056-
Union[GroundingSource.WebSearch, GroundingSource.VertexAISearch]
1082+
Union[
1083+
GroundingSource.WebSearch,
1084+
GroundingSource.VertexAISearch,
1085+
GroundingSource.InlineContext,
1086+
]
10571087
] = None,
10581088
logprobs: Optional[int] = None,
10591089
presence_penalty: Optional[float] = None,
@@ -1284,7 +1314,11 @@ def _create_text_generation_prediction_request(
12841314
stop_sequences: Optional[List[str]] = None,
12851315
candidate_count: Optional[int] = None,
12861316
grounding_source: Optional[
1287-
Union[GroundingSource.WebSearch, GroundingSource.VertexAISearch]
1317+
Union[
1318+
GroundingSource.WebSearch,
1319+
GroundingSource.VertexAISearch,
1320+
GroundingSource.InlineContext,
1321+
]
12881322
] = None,
12891323
logprobs: Optional[int] = None,
12901324
presence_penalty: Optional[float] = None,
@@ -2136,7 +2170,11 @@ def _prepare_request(
21362170
stop_sequences: Optional[List[str]] = None,
21372171
candidate_count: Optional[int] = None,
21382172
grounding_source: Optional[
2139-
Union[GroundingSource.WebSearch, GroundingSource.VertexAISearch]
2173+
Union[
2174+
GroundingSource.WebSearch,
2175+
GroundingSource.VertexAISearch,
2176+
GroundingSource.InlineContext,
2177+
]
21402178
] = None,
21412179
) -> _PredictionRequest:
21422180
"""Prepares a request for the language model.
@@ -2289,7 +2327,11 @@ def send_message(
22892327
stop_sequences: Optional[List[str]] = None,
22902328
candidate_count: Optional[int] = None,
22912329
grounding_source: Optional[
2292-
Union[GroundingSource.WebSearch, GroundingSource.VertexAISearch]
2330+
Union[
2331+
GroundingSource.WebSearch,
2332+
GroundingSource.VertexAISearch,
2333+
GroundingSource.InlineContext,
2334+
]
22932335
] = None,
22942336
) -> "MultiCandidateTextGenerationResponse":
22952337
"""Sends message to the language model and gets a response.
@@ -2352,7 +2394,11 @@ async def send_message_async(
23522394
stop_sequences: Optional[List[str]] = None,
23532395
candidate_count: Optional[int] = None,
23542396
grounding_source: Optional[
2355-
Union[GroundingSource.WebSearch, GroundingSource.VertexAISearch]
2397+
Union[
2398+
GroundingSource.WebSearch,
2399+
GroundingSource.VertexAISearch,
2400+
GroundingSource.InlineContext,
2401+
]
23562402
] = None,
23572403
) -> "MultiCandidateTextGenerationResponse":
23582404
"""Asynchronously sends message to the language model and gets a response.

0 commit comments

Comments
 (0)