diff --git a/nonebot_plugin_llmchat/__init__.py b/nonebot_plugin_llmchat/__init__.py index 5903779..d953d5c 100644 --- a/nonebot_plugin_llmchat/__init__.py +++ b/nonebot_plugin_llmchat/__init__.py @@ -204,7 +204,7 @@ async def process_messages(group_id: int): preset = get_preset(group_id) # 初始化OpenAI客户端 - if preset.proxy: + if preset.proxy != "": client = AsyncOpenAI( base_url=preset.api_base, api_key=preset.api_key, diff --git a/nonebot_plugin_llmchat/config.py b/nonebot_plugin_llmchat/config.py index 6b02ac2..3de419e 100644 --- a/nonebot_plugin_llmchat/config.py +++ b/nonebot_plugin_llmchat/config.py @@ -10,7 +10,7 @@ class PresetConfig(BaseModel): model_name: str = Field(..., description="模型名称") max_tokens: int = Field(2048, description="最大响应token数") temperature: float = Field(0.7, description="生成温度(0-2]") - proxy = Field(None, description="HTTP代理服务器") + proxy: str = Field("", description="HTTP代理服务器") class ScopedConfig(BaseModel):