nonebot-plugin-llmchat/nonebot_plugin_llmchat/config.py
2026-03-10 23:34:50 +08:00

80 lines
No EOL
3.7 KiB
Python
Executable file
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

from dataclasses import dataclass
from pydantic import BaseModel, Field
class PresetConfig(BaseModel):
"""API预设配置"""
name: str = Field(..., description="预设名称(唯一标识)")
api_base: str = Field(..., description="API基础地址")
api_key: str = Field(..., description="API密钥")
model_name: str = Field(..., description="模型名称")
max_tokens: int = Field(2048, description="最大响应token数")
temperature: float = Field(0.7, description="生成温度0-2]")
proxy: str = Field("", description="HTTP代理服务器")
support_mcp: bool = Field(False, description="是否支持MCP")
support_image: bool = Field(False, description="是否支持图片输入")
# 子模型能力标记
support_to_image: bool = Field(False, description="是否支持生成图片")
support_to_voice: bool = Field(False, description="是否支持生成语音")
support_to_video: bool = Field(False, description="是否支持生成视频")
# 可调用的子模型列表
call_model_list: list[str] | None = Field(None, description="可调用的子模型名称列表")
class MCPServerConfig(BaseModel):
"""MCP服务器配置"""
command: str | None = Field(None, description="stdio模式下MCP命令")
args: list[str] | None = Field([], description="stdio模式下MCP命令参数")
env: dict[str, str] | None = Field({}, description="stdio模式下MCP命令环境变量")
url: str | None = Field(None, description="sse模式下MCP服务器地址")
headers: dict[str, str] | None = Field({}, description="sse模式下http请求头用于认证或其他设置")
transport_type: str | None = Field(None, description="请求类型 sse、stdio 或 streamablehttp")
# 额外字段
friendly_name: str | None = Field(None, description="MCP服务器友好名称")
addtional_prompt: str | None = Field(None, description="额外提示词")
class ScopedConfig(BaseModel):
"""LLM Chat Plugin配置"""
api_presets: list[PresetConfig] = Field(
..., description="API预设列表至少配置1个预设"
)
history_size: int = Field(20, description="LLM上下文消息保留数量")
past_events_size: int = Field(10, description="触发回复时发送的群消息数量")
request_timeout: int = Field(30, description="API请求超时时间")
default_preset: str = Field("off", description="默认使用的预设名称")
random_trigger_prob: float = Field(
0.05, ge=0.0, le=1.0, description="随机触发概率0-1]"
)
default_prompt: str = Field(
"你的回答应该尽量简洁、幽默、可以使用一些语气词、颜文字。你应该拒绝回答任何政治相关的问题。",
description="默认提示词",
)
mcp_servers: dict[str, MCPServerConfig] = Field({}, description="MCP服务器配置")
blacklist_user_ids: set[int] = Field(set(), description="黑名单用户ID列表")
ignore_prefixes: list[str] = Field(
default_factory=list,
description="需要忽略的消息前缀列表,匹配到这些前缀的消息不会处理"
)
enable_private_chat: bool = Field(False, description="是否启用私聊功能")
private_chat_preset: str = Field("off", description="私聊默认使用的预设名称")
scheduler_max_retry: int = Field(5, description="定时任务AI调用最大重试次数")
scheduler_default_reminder: str = Field(
"您设置的提醒时间到了:{description}",
description="AI调用失败时的默认提醒模板"
)
class Config(BaseModel):
llmchat: ScopedConfig
@dataclass
class transportType:
sse = "sse"
stdio = "stdio"
streamablehttp = "streamablehttp"