Compare commits

...

4 Commits

Author SHA1 Message Date
Rumio
dc92d7abc1
Merge 78c59f4f28 into e5b2a872d3 2025-12-12 09:12:39 +00:00
pre-commit-ci[bot]
78c59f4f28 🚨 auto fix by pre-commit hooks 2025-12-12 09:12:35 +00:00
webjoin111
de5c77407d feat(search): 为搜索功能默认启用 Gemini Google Search 工具 2025-12-12 17:12:18 +08:00
webjoin111
30101cf1f0 feat(llm): 支持结构化生成函数接收 UniMessage 2025-12-12 17:12:18 +08:00
3 changed files with 6 additions and 5 deletions

View File

@ -119,8 +119,7 @@ class GeminiAdapter(BaseAdapter):
system_instruction_parts = [{"text": msg.content}] system_instruction_parts = [{"text": msg.content}]
elif isinstance(msg.content, list): elif isinstance(msg.content, list):
system_instruction_parts = [ system_instruction_parts = [
await converter.convert_part(part) await converter.convert_part(part) for part in msg.content
for part in msg.content
] ]
continue continue

View File

@ -30,6 +30,7 @@ from .types import (
ToolChoice, ToolChoice,
) )
from .types.exceptions import get_user_friendly_error_message from .types.exceptions import get_user_friendly_error_message
from .types.models import GeminiGoogleSearch
from .utils import create_multimodal_message from .utils import create_multimodal_message
T = TypeVar("T", bound=BaseModel) T = TypeVar("T", bound=BaseModel)
@ -182,7 +183,7 @@ async def embed_documents(
async def generate_structured( async def generate_structured(
message: str | LLMMessage | list[LLMContentPart], message: str | UniMessage | LLMMessage | list[LLMContentPart],
response_model: type[T], response_model: type[T],
*, *,
model: ModelName = None, model: ModelName = None,
@ -302,7 +303,6 @@ async def _generate_image_from_message(
messages = await normalize_to_llm_messages(message) messages = await normalize_to_llm_messages(message)
async with await get_model_instance(model) as model_instance: async with await get_model_instance(model) as model_instance:
response = await model_instance.generate_response(messages, config=config) response = await model_instance.generate_response(messages, config=config)
if not response.images: if not response.images:
@ -403,4 +403,5 @@ async def search(
model=model, model=model,
instruction=instruction, instruction=instruction,
config=final_config, config=final_config,
tools=[GeminiGoogleSearch()],
) )

View File

@ -445,11 +445,12 @@ class AI:
instruction=instruction, instruction=instruction,
template_vars=template_vars, template_vars=template_vars,
config=search_config, config=search_config,
tools=[GeminiGoogleSearch()],
) )
async def generate_structured( async def generate_structured(
self, self,
message: str | LLMMessage | list[LLMContentPart] | None, message: str | UniMessage | LLMMessage | list[LLMContentPart] | None,
response_model: type[T], response_model: type[T],
*, *,
model: ModelName = None, model: ModelName = None,