mirror of
https://github.com/FuQuan233/nonebot-plugin-llmchat.git
synced 2025-09-04 10:20:45 +00:00
Compare commits
8 commits
Author | SHA1 | Date | |
---|---|---|---|
d640f16abe | |||
1600cba172 | |||
9f81a38d5b | |||
53d57beba3 | |||
ea635fd147 | |||
5014d3014b | |||
89baec6abc | |||
19ff0026c0 |
5 changed files with 25 additions and 6 deletions
|
@ -18,6 +18,7 @@ _✨ 支持多API预设、MCP协议、联网搜索、视觉模型的AI群聊插
|
|||
<img src="https://img.shields.io/pypi/v/nonebot-plugin-llmchat.svg" alt="pypi">
|
||||
</a>
|
||||
<img src="https://img.shields.io/badge/python-3.10+-blue.svg" alt="python">
|
||||
<a href="https://deepwiki.com/FuQuan233/nonebot-plugin-llmchat"><img src="https://deepwiki.com/badge.svg" alt="Ask DeepWiki"></a>
|
||||
|
||||
</div>
|
||||
|
||||
|
@ -108,6 +109,7 @@ _✨ 支持多API预设、MCP协议、联网搜索、视觉模型的AI群聊插
|
|||
| LLMCHAT__RANDOM_TRIGGER_PROB | 否 | 0.05 | 默认随机触发概率 [0, 1] |
|
||||
| LLMCHAT__DEFAULT_PROMPT | 否 | 你的回答应该尽量简洁、幽默、可以使用一些语气词、颜文字。你应该拒绝回答任何政治相关的问题。 | 默认提示词 |
|
||||
| LLMCHAT__BLACKLIST_USER_IDS | 否 | [] | 黑名单用户ID列表,机器人将不会处理黑名单用户的消息 |
|
||||
| LLMCHAT__IGNORE_PREFIXES | 否 | [] | 需要忽略的消息前缀列表,匹配到这些前缀的消息不会处理 |
|
||||
| LLMCHAT__MCP_SERVERS | 否 | {} | MCP服务器配置,具体见下表 |
|
||||
|
||||
其中LLMCHAT__API_PRESETS为一个列表,每项配置有以下的配置项
|
||||
|
|
|
@ -169,6 +169,12 @@ async def is_triggered(event: GroupMessageEvent) -> bool:
|
|||
if event.user_id in plugin_config.blacklist_user_ids:
|
||||
return False
|
||||
|
||||
# 忽略特定前缀的消息
|
||||
msg_text = event.get_plaintext().strip()
|
||||
for prefix in plugin_config.ignore_prefixes:
|
||||
if msg_text.startswith(prefix):
|
||||
return False
|
||||
|
||||
state.past_events.append(event)
|
||||
|
||||
# 原有@触发条件
|
||||
|
@ -313,8 +319,10 @@ async def process_messages(group_id: int):
|
|||
content: list[ChatCompletionContentPartParam] = []
|
||||
|
||||
# 将机器人错过的消息推送给LLM
|
||||
for ev in state.past_events:
|
||||
content.append({"type": "text", "text": format_message(ev)})
|
||||
past_events_snapshot = list(state.past_events)
|
||||
for ev in past_events_snapshot:
|
||||
text_content = format_message(ev)
|
||||
content.append({"type": "text", "text": text_content})
|
||||
|
||||
# 将消息中的图片转成 base64
|
||||
if preset.support_image:
|
||||
|
@ -378,7 +386,7 @@ async def process_messages(group_id: int):
|
|||
new_messages.append({
|
||||
"role": "tool",
|
||||
"tool_call_id": tool_call.id,
|
||||
"content": str(result.content)
|
||||
"content": str(result)
|
||||
})
|
||||
|
||||
# 将工具调用的结果交给 LLM
|
||||
|
|
|
@ -44,6 +44,10 @@ class ScopedConfig(BaseModel):
|
|||
)
|
||||
mcp_servers: dict[str, MCPServerConfig] = Field({}, description="MCP服务器配置")
|
||||
blacklist_user_ids: set[int] = Field(set(), description="黑名单用户ID列表")
|
||||
ignore_prefixes: list[str] = Field(
|
||||
default_factory=list,
|
||||
description="需要忽略的消息前缀列表,匹配到这些前缀的消息不会处理"
|
||||
)
|
||||
|
||||
|
||||
class Config(BaseModel):
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import asyncio
|
||||
from contextlib import AsyncExitStack
|
||||
|
||||
from mcp import ClientSession, StdioServerParameters
|
||||
|
@ -64,9 +65,13 @@ class MCPClient:
|
|||
server_name, real_tool_name = tool_name.split("___")
|
||||
logger.info(f"正在服务器[{server_name}]上调用工具[{real_tool_name}]")
|
||||
session = self.sessions[server_name]
|
||||
response = await session.call_tool(real_tool_name, tool_args)
|
||||
try:
|
||||
response = await asyncio.wait_for(session.call_tool(real_tool_name, tool_args), timeout=30)
|
||||
except asyncio.TimeoutError:
|
||||
logger.error(f"调用工具[{real_tool_name}]超时")
|
||||
return f"调用工具[{real_tool_name}]超时"
|
||||
logger.debug(f"工具[{real_tool_name}]调用完成,响应: {response}")
|
||||
return response
|
||||
return response.content
|
||||
|
||||
def get_friendly_name(self, tool_name: str):
|
||||
server_name, real_tool_name = tool_name.split("___")
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[tool.poetry]
|
||||
name = "nonebot-plugin-llmchat"
|
||||
version = "0.2.3"
|
||||
version = "0.2.5"
|
||||
description = "Nonebot AI group chat plugin supporting multiple API preset configurations"
|
||||
license = "GPL"
|
||||
authors = ["FuQuan i@fuquan.moe"]
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue