Skip to content

feat: support json_schema for ollama models #11449

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Dec 8, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 9 additions & 7 deletions api/core/model_runtime/model_providers/ollama/llm/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,9 +181,11 @@ def _generate(
# prepare the payload for a simple ping to the model
data = {"model": model, "stream": stream}

if "format" in model_parameters:
data["format"] = model_parameters["format"]
del model_parameters["format"]
if format_schema := model_parameters.pop("format", None):
try:
data["format"] = format_schema if format_schema == "json" else json.loads(format_schema)
except json.JSONDecodeError as e:
raise InvokeBadRequestError(f"Invalid format schema: {str(e)}")

if "keep_alive" in model_parameters:
data["keep_alive"] = model_parameters["keep_alive"]
Expand Down Expand Up @@ -733,12 +735,12 @@ def get_customizable_model_schema(self, model: str, credentials: dict) -> AIMode
ParameterRule(
name="format",
label=I18nObject(en_US="Format", zh_Hans="返回格式"),
type=ParameterType.STRING,
type=ParameterType.TEXT,
default="json",
help=I18nObject(
en_US="the format to return a response in. Currently the only accepted value is json.",
zh_Hans="返回响应的格式。目前唯一接受的值是json。",
en_US="the format to return a response in. Format can be `json` or a JSON schema.",
zh_Hans="返回响应的格式。目前接受的值是字符串`json`或JSON schema.",
),
options=["json"],
),
],
pricing=PriceConfig(
Expand Down
Loading