Skip to content

Commit 9d3d10a

Browse files
authored
Merge pull request #276 from inclusionAI/feature/api-server
Feature/api server
2 parents 7c5edce + e274087 commit 9d3d10a

File tree

4 files changed

+32
-16
lines changed

4 files changed

+32
-16
lines changed

examples/gaia/cmd/agent_deploy/gaia_agent/.env.template

Lines changed: 18 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,13 @@
11
# LLM Model Config
2-
LLM_PROVIDER = {YOUR_CONFIG:ant/openai}
2+
# LLM_PROVIDER = {YOUR_CONFIG}
33
LLM_MODEL_NAME = {YOUR_CONFIG}
44
LLM_API_KEY = {YOUR_CONFIG}
55
LLM_BASE_URL = {YOUR_CONFIG}
66
LLM_TEMPERATURE = 0.0
77

88
# ===============Path Configurations=================
99
# GAIA_DATASET_PATH="/path/to/your/gaia-benchmark/GAIA/2023"
10-
# AWORLD_WORKSPACE="/path/to/your/workspace_dir"
10+
AWORLD_WORKSPACE="/tmp"
1111

1212
# ===============MCP Server Configurations=================
1313
# [Google Search API](https://developers.google.com/custom-search/v1/introduction)
@@ -29,5 +29,21 @@ IMAGE_LLM_API_KEY={YOUR_CONFIG}
2929
IMAGE_LLM_BASE_URL=https://openrouter.ai/api/v1
3030
IMAGE_LLM_MODEL_NAME=anthropic/claude-3.7-sonnet
3131

32+
# Video Server
33+
VIDEO_LLM_API_KEY={YOUR_CONFIG}
34+
VIDEO_LLM_BASE_URL=https://openrouter.ai/api/v1
35+
VIDEO_LLM_MODEL_NAME=gpt-4o
36+
VIDEO_LLM_TEMPERATURE=1.0
37+
38+
# Code Server
39+
CODE_LLM_API_KEY={YOUR_CONFIG}
40+
CODE_LLM_BASE_URL=https://openrouter.ai/api/v1
41+
CODE_LLM_MODEL_NAME=anthropic/claude-sonnet-4
42+
43+
# Think Server
44+
THINK_LLM_API_KEY={YOUR_CONFIG}
45+
THINK_LLM_BASE_URL=https://openrouter.ai/api/v1
46+
THINK_LLM_MODEL_NAME=deepseek/deepseek-r1-0528:free
47+
3248
# [E2B Server](https://e2b.dev/docs/quickstart)
3349
E2B_API_KEY={YOUR_CONFIG}

examples/gaia/mcp_collections/intelligence/code.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -48,9 +48,9 @@ def __init__(self, arguments: ActionArguments) -> None:
4848
# Initialize code generation model configuration
4949
self._llm_config = AgentConfig(
5050
llm_provider="openai",
51-
llm_model_name="anthropic/claude-sonnet-4",
52-
llm_api_key=os.getenv("LLM_API_KEY"),
53-
llm_base_url=os.getenv("LLM_BASE_URL"),
51+
llm_model_name=os.getenv("CODE_LLM_MODEL_NAME", "anthropic/claude-sonnet-4"),
52+
llm_api_key=os.getenv("CODE_LLM_API_KEY"),
53+
llm_base_url=os.getenv("CODE_LLM_BASE_URL"),
5454
)
5555

5656
self._color_log("Code Generation Service initialized", Color.green, "debug")

examples/gaia/mcp_collections/intelligence/think.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -31,9 +31,9 @@ def __init__(self, arguments: ActionArguments) -> None:
3131
self._llm_config = AgentConfig(
3232
llm_provider="openai",
3333
# llm_model_name="google/gemini-2.5-flash-preview-05-20:thinking",
34-
llm_model_name="deepseek/deepseek-r1-0528:free",
35-
llm_api_key=os.getenv("LLM_API_KEY"),
36-
llm_base_url=os.getenv("LLM_BASE_URL"),
34+
llm_model_name=os.getenv("THINK_LLM_MODEL_NAME", "deepseek/deepseek-r1-0528:free"),
35+
llm_api_key=os.getenv("THINK_LLM_API_KEY"),
36+
llm_base_url=os.getenv("THINK_LLM_BASE_URL"),
3737
)
3838

3939
self._color_log("Intelligence Reasoning Service initialized", Color.green, "debug")

examples/gaia/mcp_collections/media/video.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -371,13 +371,13 @@ def _analyze_frame_chunk(self, chunk_data: tuple[int, list, str]) -> tuple[int,
371371
get_llm_model(
372372
conf=AgentConfig(
373373
llm_provider="openai",
374-
llm_model_name=os.getenv("LLM_MODEL_NAME", "gpt-4o"),
375-
llm_api_key=os.getenv("LLM_API_KEY"),
376-
llm_base_url=os.getenv("LLM_BASE_URL"),
374+
llm_model_name=os.getenv("VIDEO_LLM_MODEL_NAME"),
375+
llm_api_key=os.getenv("VIDEO_LLM_API_KEY"),
376+
llm_base_url=os.getenv("VIDEO_LLM_BASE_URL"),
377377
)
378378
),
379379
inputs,
380-
temperature=float(os.getenv("LLM_TEMPERATURE", "1.0")),
380+
temperature=float(os.getenv("VIDEO_LLM_TEMPERATURE", "1.0")),
381381
)
382382
analysis_result = response.content
383383
self._color_log(f"✅ Completed analysis for chunk {chunk_index + 1}", Color.green)
@@ -579,13 +579,13 @@ async def mcp_summarize_video(
579579
get_llm_model(
580580
conf=AgentConfig(
581581
llm_provider="openai",
582-
llm_model_name=os.getenv("LLM_MODEL_NAME", "gpt-4o"),
583-
llm_api_key=os.getenv("LLM_API_KEY", "your_openai_api_key"),
584-
llm_base_url=os.getenv("LLM_BASE_URL", "your_openai_base_url"),
582+
llm_model_name=os.getenv("VIDEO_LLM_MODEL_NAME", "gpt-4o"),
583+
llm_api_key=os.getenv("VIDEO_LLM_API_KEY"),
584+
llm_base_url=os.getenv("VIDEO_LLM_BASE_URL"),
585585
)
586586
),
587587
inputs,
588-
temperature=float(os.getenv("LLM_TEMPERATURE", "1.0")),
588+
temperature=float(os.getenv("VIDEO_LLM_TEMPERATURE", "1.0")),
589589
)
590590
cur_summary = response.content
591591
except Exception as e:

0 commit comments

Comments
 (0)