From b4f7b2797ca7596d5095fab29dc0a7d0f66fbc93 Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Wed, 29 Oct 2025 11:55:16 +0800 Subject: [PATCH 01/24] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20=E7=BC=93=E5=AD=98MC?= =?UTF-8?q?P=E5=B7=A5=E5=85=B7=E5=88=97=E8=A1=A8=EF=BC=8C=E5=A4=A7?= =?UTF-8?q?=E5=B9=85=E6=8F=90=E5=8D=87=E5=93=8D=E5=BA=94=E9=80=9F=E5=BA=A6?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nonebot_plugin_llmchat/__init__.py | 8 +- nonebot_plugin_llmchat/mcpclient.py | 158 +++++++++++++++++++++++----- 2 files changed, 134 insertions(+), 32 deletions(-) diff --git a/nonebot_plugin_llmchat/__init__.py b/nonebot_plugin_llmchat/__init__.py index 79290a1..ab07224 100755 --- a/nonebot_plugin_llmchat/__init__.py +++ b/nonebot_plugin_llmchat/__init__.py @@ -279,7 +279,7 @@ async def process_messages(group_id: int): event = await state.queue.get() logger.debug(f"从队列获取消息 群号:{group_id} 消息ID:{event.message_id}") past_events_snapshot = [] - mcp_client = MCPClient(plugin_config.mcp_servers) + mcp_client = MCPClient.get_instance(plugin_config.mcp_servers) try: systemPrompt = f""" 我想要你帮我在群聊中闲聊,大家一般叫你{"、".join(list(driver.config.nickname))},我将会在后面的信息中告诉你每条群聊信息的发送者和发送时间,你可以直接称呼发送者为他对应的昵称。 @@ -349,7 +349,6 @@ async def process_messages(group_id: int): } if preset.support_mcp: - await mcp_client.connect_to_servers() available_tools = await mcp_client.get_available_tools() client_config["tools"] = available_tools @@ -455,7 +454,8 @@ async def process_messages(group_id: int): finally: state.processing = False state.queue.task_done() - await mcp_client.cleanup() + # 不再需要每次都清理MCPClient,因为它现在是单例 + # await mcp_client.cleanup() # 预设切换命令 @@ -621,3 +621,5 @@ async def init_plugin(): async def cleanup_plugin(): logger.info("插件关闭清理") await save_state() + # 销毁MCPClient单例 + await MCPClient.destroy_instance() diff --git a/nonebot_plugin_llmchat/mcpclient.py b/nonebot_plugin_llmchat/mcpclient.py index c3f3224..d0bc80e 100644 --- a/nonebot_plugin_llmchat/mcpclient.py +++ b/nonebot_plugin_llmchat/mcpclient.py @@ -10,12 +10,46 @@ from .config import MCPServerConfig class MCPClient: - def __init__(self, server_config: dict[str, MCPServerConfig]): - logger.info(f"正在初始化MCPClient,共有{len(server_config)}个服务器配置") + _instance = None + _initialized = False + + def __new__(cls, server_config: dict[str, MCPServerConfig] | None = None): + if cls._instance is None: + cls._instance = super().__new__(cls) + return cls._instance + + def __init__(self, server_config: dict[str, MCPServerConfig] | None = None): + if self._initialized: + return + + if server_config is None: + raise ValueError("server_config must be provided for first initialization") + + logger.info(f"正在初始化MCPClient单例,共有{len(server_config)}个服务器配置") self.server_config = server_config self.sessions = {} self.exit_stack = AsyncExitStack() - logger.debug("MCPClient初始化成功") + # 添加工具列表缓存 + self._tools_cache: list | None = None + self._cache_initialized = False + self._initialized = True + logger.debug("MCPClient单例初始化成功") + + @classmethod + def get_instance(cls, server_config: dict[str, MCPServerConfig] | None = None): + """获取MCPClient实例""" + if cls._instance is None: + if server_config is None: + raise ValueError("server_config must be provided for first initialization") + cls._instance = cls(server_config) + return cls._instance + + @classmethod + def instance(cls): + """快速获取已初始化的MCPClient实例,如果未初始化则抛出异常""" + if cls._instance is None: + raise RuntimeError("MCPClient has not been initialized. Call get_instance() first.") + return cls._instance async def connect_to_servers(self): logger.info(f"开始连接{len(self.server_config)}个MCP服务器") @@ -38,47 +72,113 @@ class MCPClient: logger.info(f"已成功连接到MCP服务器[{server_name}]") + def _create_session_context(self, server_name: str): + """创建临时会话的异步上下文管理器""" + config = self.server_config[server_name] + + class SessionContext: + def __init__(self): + self.session = None + self.exit_stack = AsyncExitStack() + + async def __aenter__(self): + if config.url: + transport = await self.exit_stack.enter_async_context( + sse_client(url=config.url, headers=config.headers) + ) + elif config.command: + transport = await self.exit_stack.enter_async_context( + stdio_client(StdioServerParameters(**config.model_dump())) + ) + else: + raise ValueError("Server config must have either url or command") + + read, write = transport + self.session = await self.exit_stack.enter_async_context(ClientSession(read, write)) + await self.session.initialize() + return self.session + + async def __aexit__(self, exc_type, exc_val, exc_tb): + await self.exit_stack.aclose() + + return SessionContext() + async def get_available_tools(self): - logger.info(f"正在从{len(self.sessions)}个已连接的服务器获取可用工具") + """获取可用工具列表,使用缓存机制""" + if self._tools_cache is not None: + logger.debug("返回缓存的工具列表") + return self._tools_cache + + logger.info(f"初始化工具列表缓存,需要连接{len(self.server_config)}个服务器") available_tools = [] - for server_name, session in self.sessions.items(): - logger.debug(f"正在列出服务器[{server_name}]中的工具") - response = await session.list_tools() - tools = response.tools - logger.debug(f"在服务器[{server_name}]中找到{len(tools)}个工具") + for server_name in self.server_config.keys(): + logger.debug(f"正在从服务器[{server_name}]获取工具列表") + async with self._create_session_context(server_name) as session: + response = await session.list_tools() + tools = response.tools + logger.debug(f"在服务器[{server_name}]中找到{len(tools)}个工具") - available_tools.extend( - { - "type": "function", - "function": { - "name": f"{server_name}___{tool.name}", - "description": tool.description, - "parameters": tool.inputSchema, - }, - } - for tool in tools - ) + available_tools.extend( + { + "type": "function", + "function": { + "name": f"{server_name}___{tool.name}", + "description": tool.description, + "parameters": tool.inputSchema, + }, + } + for tool in tools + ) + + # 缓存工具列表 + self._tools_cache = available_tools + self._cache_initialized = True + logger.info(f"工具列表缓存完成,共缓存{len(available_tools)}个工具") return available_tools async def call_tool(self, tool_name: str, tool_args: dict): + """按需连接调用工具,调用后立即断开""" server_name, real_tool_name = tool_name.split("___") - logger.info(f"正在服务器[{server_name}]上调用工具[{real_tool_name}]") - session = self.sessions[server_name] - try: - response = await asyncio.wait_for(session.call_tool(real_tool_name, tool_args), timeout=30) - except asyncio.TimeoutError: - logger.error(f"调用工具[{real_tool_name}]超时") - return f"调用工具[{real_tool_name}]超时" - logger.debug(f"工具[{real_tool_name}]调用完成,响应: {response}") - return response.content + logger.info(f"按需连接到服务器[{server_name}]调用工具[{real_tool_name}]") + + async with self._create_session_context(server_name) as session: + try: + response = await asyncio.wait_for(session.call_tool(real_tool_name, tool_args), timeout=30) + logger.debug(f"工具[{real_tool_name}]调用完成,响应: {response}") + return response.content + except asyncio.TimeoutError: + logger.error(f"调用工具[{real_tool_name}]超时") + return f"调用工具[{real_tool_name}]超时" def get_friendly_name(self, tool_name: str): logger.debug(tool_name) server_name, real_tool_name = tool_name.split("___") return (self.server_config[server_name].friendly_name or server_name) + " - " + real_tool_name + def clear_tools_cache(self): + """清除工具列表缓存""" + logger.info("清除工具列表缓存") + self._tools_cache = None + self._cache_initialized = False + async def cleanup(self): + """清理资源(不销毁单例)""" logger.debug("正在清理MCPClient资源") + # 只清除缓存,不销毁单例 + # self.clear_tools_cache() # 保留缓存,避免重复获取工具列表 await self.exit_stack.aclose() + # 重新初始化exit_stack以便后续使用 + self.exit_stack = AsyncExitStack() logger.debug("MCPClient资源清理完成") + + @classmethod + async def destroy_instance(cls): + """完全销毁单例实例(仅在应用关闭时使用)""" + if cls._instance is not None: + logger.info("销毁MCPClient单例") + await cls._instance.cleanup() + cls._instance.clear_tools_cache() + cls._instance = None + cls._initialized = False + logger.debug("MCPClient单例已销毁") From 63e446d5e4c4a1ac9ec268c07b504a93bfdcd6c0 Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Fri, 31 Oct 2025 17:07:10 +0800 Subject: [PATCH 02/24] =?UTF-8?q?=E2=9C=A8=20=E6=96=B0=E5=A2=9E=E5=86=85?= =?UTF-8?q?=E7=BD=AEOneBot=E5=B7=A5=E5=85=B7=E6=94=AF=E6=8C=81=EF=BC=8C?= =?UTF-8?q?=E5=8C=85=E6=8B=AC=E7=A6=81=E8=A8=80=E3=80=81=E8=8E=B7=E5=8F=96?= =?UTF-8?q?=E7=BE=A4=E4=BF=A1=E6=81=AF=E7=AD=89=E5=8A=9F=E8=83=BD=EF=BC=8C?= =?UTF-8?q?=E5=B9=B6=E4=BC=98=E5=8C=96=E5=B7=A5=E5=85=B7=E8=B0=83=E7=94=A8?= =?UTF-8?q?=E9=80=BB=E8=BE=91?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 23 ++- nonebot_plugin_llmchat/__init__.py | 9 +- nonebot_plugin_llmchat/mcpclient.py | 24 ++- nonebot_plugin_llmchat/onebottools.py | 215 ++++++++++++++++++++++++++ 4 files changed, 267 insertions(+), 4 deletions(-) create mode 100644 nonebot_plugin_llmchat/onebottools.py diff --git a/README.md b/README.md index f945db3..21ebf63 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ # nonebot-plugin-llmchat -_✨ 支持多API预设、MCP协议、联网搜索、视觉模型的AI群聊插件 ✨_ +_✨ 支持多API预设、MCP协议、内置工具、联网搜索、视觉模型的AI群聊插件 ✨_ @@ -33,6 +33,11 @@ _✨ 支持多API预设、MCP协议、联网搜索、视觉模型的AI群聊插 - 通过连接一些搜索MCP服务器可以实现在线搜索 - 兼容 Claude.app 的配置格式 +1. **内置工具** + - 内置OneBot群操作工具,LLM可直接进行群管理操作(需模型支持tool_call) + - 支持禁言用户、获取群信息、查看群成员等功能 + - 支持戳一戳、撤回消息等互动功能 + 1. **多API预设支持** - 可配置多个LLM服务预设(如不同模型/API密钥) - 支持运行时通过`API预设`命令热切换API配置 @@ -116,6 +121,22 @@ _✨ 支持多API预设、MCP协议、联网搜索、视觉模型的AI群聊插 | LLMCHAT__IGNORE_PREFIXES | 否 | [] | 需要忽略的消息前缀列表,匹配到这些前缀的消息不会处理 | | LLMCHAT__MCP_SERVERS | 否 | {} | MCP服务器配置,具体见下表 | +### 内置OneBot工具 + +插件内置了以下工具,LLM可以直接调用这些工具进行群操作(需模型支持tool_call),这些工具不需要额外配置: + +| 工具名称 | 说明 | 权限要求 | +|:-----:|:----:|:----:| +| ob__mute_user | 禁言指定用户 | 机器人需要管理员权限 | +| ob__get_group_info | 获取群信息 | 无 | +| ob__get_group_member_info | 获取指定群成员信息 | 无 | +| ob__get_group_member_list | 获取群成员列表 | 无 | +| ob__poke_user | 戳一戳指定用户 | 无 | +| ob__recall_message | 撤回指定消息 | 机器人需要管理员权限或为消息发送者 | + + +### MCP服务器配置 + 其中LLMCHAT__API_PRESETS为一个列表,每项配置有以下的配置项 | 配置项 | 必填 | 默认值 | 说明 | |:-----:|:----:|:----:|:----:| diff --git a/nonebot_plugin_llmchat/__init__.py b/nonebot_plugin_llmchat/__init__.py index ab07224..acfa403 100755 --- a/nonebot_plugin_llmchat/__init__.py +++ b/nonebot_plugin_llmchat/__init__.py @@ -381,8 +381,13 @@ async def process_messages(group_id: int): # 发送工具调用提示 await handler.send(Message(f"正在使用{mcp_client.get_friendly_name(tool_name)}")) - # 执行工具调用 - result = await mcp_client.call_tool(tool_name, tool_args) + # 执行工具调用,传递群组和机器人信息用于QQ工具 + result = await mcp_client.call_tool( + tool_name, + tool_args, + group_id=event.group_id, + bot_id=str(event.self_id) + ) new_messages.append({ "role": "tool", diff --git a/nonebot_plugin_llmchat/mcpclient.py b/nonebot_plugin_llmchat/mcpclient.py index d0bc80e..09f5b73 100644 --- a/nonebot_plugin_llmchat/mcpclient.py +++ b/nonebot_plugin_llmchat/mcpclient.py @@ -7,6 +7,7 @@ from mcp.client.stdio import stdio_client from nonebot import logger from .config import MCPServerConfig +from .onebottools import OneBotTools class MCPClient: @@ -32,6 +33,8 @@ class MCPClient: # 添加工具列表缓存 self._tools_cache: list | None = None self._cache_initialized = False + # 初始化OneBot工具 + self.onebot_tools = OneBotTools() self._initialized = True logger.debug("MCPClient单例初始化成功") @@ -112,6 +115,12 @@ class MCPClient: logger.info(f"初始化工具列表缓存,需要连接{len(self.server_config)}个服务器") available_tools = [] + # 添加OneBot内置工具 + onebot_tools = self.onebot_tools.get_available_tools() + available_tools.extend(onebot_tools) + logger.debug(f"添加了{len(onebot_tools)}个OneBot内置工具") + + # 添加MCP服务器工具 for server_name in self.server_config.keys(): logger.debug(f"正在从服务器[{server_name}]获取工具列表") async with self._create_session_context(server_name) as session: @@ -137,8 +146,16 @@ class MCPClient: logger.info(f"工具列表缓存完成,共缓存{len(available_tools)}个工具") return available_tools - async def call_tool(self, tool_name: str, tool_args: dict): + async def call_tool(self, tool_name: str, tool_args: dict, group_id: int | None = None, bot_id: str | None = None): """按需连接调用工具,调用后立即断开""" + # 检查是否是QQ工具 + if tool_name.startswith("ob__"): + if group_id is None or bot_id is None: + return "QQ工具需要提供group_id和bot_id参数" + logger.info(f"调用OneBot工具[{tool_name}]") + return await self.onebot_tools.call_tool(tool_name, tool_args, group_id, bot_id) + + # MCP工具处理 server_name, real_tool_name = tool_name.split("___") logger.info(f"按需连接到服务器[{server_name}]调用工具[{real_tool_name}]") @@ -153,6 +170,11 @@ class MCPClient: def get_friendly_name(self, tool_name: str): logger.debug(tool_name) + # 检查是否是OneBot工具 + if tool_name.startswith("ob__"): + return self.onebot_tools.get_friendly_name(tool_name) + + # MCP工具处理 server_name, real_tool_name = tool_name.split("___") return (self.server_config[server_name].friendly_name or server_name) + " - " + real_tool_name diff --git a/nonebot_plugin_llmchat/onebottools.py b/nonebot_plugin_llmchat/onebottools.py new file mode 100644 index 0000000..ff19fa2 --- /dev/null +++ b/nonebot_plugin_llmchat/onebottools.py @@ -0,0 +1,215 @@ +import json +import time +from typing import Any, cast + +from nonebot import get_bot, logger +from nonebot.adapters.onebot.v11 import Bot + + +class OneBotTools: + """内置的OneBot群操作工具类""" + + def __init__(self): + self.tools = [ + { + "type": "function", + "function": { + "name": "ob__mute_user", + "description": "禁言指定用户一段时间。需要机器人有管理员权限。不要随便禁言别人,你应该只听群主或者管理员你的话。", + "parameters": { + "type": "object", + "properties": { + "user_id": {"type": "string", "description": "要禁言的用户QQ号"}, + "duration": { + "type": "integer", + "description": "禁言时长(秒),0表示解除禁言,最大2592000(30天)", + "minimum": 0, + "maximum": 2592000, + }, + }, + "required": ["user_id", "duration"], + }, + }, + }, + { + "type": "function", + "function": { + "name": "ob__get_group_info", + "description": "获取群信息,包括群成员数量、群名称等。", + "parameters": {"type": "object", "properties": {}, "required": []}, + }, + }, + { + "type": "function", + "function": { + "name": "ob__get_group_member_info", + "description": "获取指定群成员的信息。", + "parameters": { + "type": "object", + "properties": {"user_id": {"type": "string", "description": "要查询的用户QQ号"}}, + "required": ["user_id"], + }, + }, + }, + { + "type": "function", + "function": { + "name": "ob__get_group_member_list", + "description": "获取群成员列表。", + "parameters": {"type": "object", "properties": {}, "required": []}, + }, + }, + { + "type": "function", + "function": { + "name": "ob__poke_user", + "description": "戳一戳指定用户。", + "parameters": { + "type": "object", + "properties": {"user_id": {"type": "string", "description": "要戳一戳的用户QQ号"}}, + "required": ["user_id"], + }, + }, + }, + { + "type": "function", + "function": { + "name": "ob__recall_message", + "description": "撤回指定消息。需要机器人有管理员权限或者是消息发送者。", + "parameters": { + "type": "object", + "properties": {"message_id": {"type": "integer", "description": "要撤回的消息ID"}}, + "required": ["message_id"], + }, + }, + }, + ] + + def get_friendly_name(self, tool_name: str) -> str: + """获取工具的友好名称""" + friendly_names = { + "ob__mute_user": "OneBot - 禁言用户", + "ob__get_group_info": "OneBot - 获取群信息", + "ob__get_group_member_info": "OneBot - 获取成员信息", + "ob__get_group_member_list": "OneBot - 获取成员列表", + "ob__poke_user": "OneBot - 戳一戳用户", + "ob__recall_message": "OneBot - 撤回消息", + } + return friendly_names.get(tool_name, tool_name) + + def get_available_tools(self) -> list[dict[str, Any]]: + """获取可用的工具列表""" + return self.tools + + async def call_tool(self, tool_name: str, tool_args: dict[str, Any], group_id: int, bot_id: str) -> str: + """调用指定的工具""" + try: + bot = cast(Bot, get_bot(bot_id)) + + if tool_name == "ob__mute_user": + return await self._mute_user(bot, group_id, tool_args) + elif tool_name == "ob__get_group_info": + return await self._get_group_info(bot, group_id, tool_args) + elif tool_name == "ob__get_group_member_info": + return await self._get_group_member_info(bot, group_id, tool_args) + elif tool_name == "ob__get_group_member_list": + return await self._get_group_member_list(bot, group_id, tool_args) + elif tool_name == "ob__poke_user": + return await self._poke_user(bot, group_id, tool_args) + elif tool_name == "ob__recall_message": + return await self._recall_message(bot, group_id, tool_args) + else: + return f"未知的工具: {tool_name}" + + except Exception as e: + logger.error(f"调用OneBot工具 {tool_name} 时出错: {e}") + return f"执行失败: {e!s}" + + async def _mute_user(self, bot: Bot, group_id: int, args: dict[str, Any]) -> str: + """禁言用户""" + user_id = int(args["user_id"]) + duration = args["duration"] + + try: + await bot.set_group_ban(group_id=group_id, user_id=user_id, duration=duration) + if duration > 0: + return f"成功禁言用户 {user_id},时长 {duration} 秒" + else: + return f"成功解除用户 {user_id} 的禁言" + except Exception as e: + return f"禁言操作失败: {e!s}" + + async def _get_group_info(self, bot: Bot, group_id: int, _args: dict[str, Any]) -> str: + """获取群信息""" + try: + group_info = await bot.get_group_info(group_id=group_id) + info = { + "群号": group_info["group_id"], + "群名称": group_info["group_name"], + "群成员数": group_info["member_count"], + "群上限": group_info["max_member_count"], + } + return json.dumps(info, ensure_ascii=False, indent=2) + except Exception as e: + return f"获取群信息失败: {e!s}" + + async def _get_group_member_info(self, bot: Bot, group_id: int, args: dict[str, Any]) -> str: + """获取群成员信息""" + user_id = int(args["user_id"]) + + try: + member_info = await bot.get_group_member_info(group_id=group_id, user_id=user_id) + info = { + "用户QQ": member_info["user_id"], + "昵称": member_info["nickname"], + "群名片": member_info["card"], + "性别": member_info["sex"], + "年龄": member_info["age"], + "地区": member_info["area"], + "加群时间": time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(member_info["join_time"])), + "最后发言时间": time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(member_info["last_sent_time"])), + "群内等级": member_info["level"], + "角色": member_info["role"], + "专属头衔": member_info["title"], + } + return json.dumps(info, ensure_ascii=False, indent=2) + except Exception as e: + return f"获取成员信息失败: {e!s}" + + async def _get_group_member_list(self, bot: Bot, group_id: int, _args: dict[str, Any]) -> str: + """获取群成员列表""" + try: + member_list = await bot.get_group_member_list(group_id=group_id) + members = [] + for member in member_list: + members.append( + {"QQ": member["user_id"], "昵称": member["nickname"], "群名片": member["card"], "角色": member["role"]} + ) + + result = {"群成员总数": len(members), "成员列表": members} + return json.dumps(result, ensure_ascii=False, indent=2) + except Exception as e: + return f"获取群成员列表失败: {e!s}" + + async def _poke_user(self, bot: Bot, group_id: int, args: dict[str, Any]) -> str: + """戳一戳用户""" + user_id = int(args["user_id"]) + + try: + # 使用OneBot的戳一戳API + await bot.call_api("group_poke", group_id=group_id, user_id=user_id) + return f"成功戳了戳用户 {user_id}" + except Exception as e: + return f"戳一戳失败: {e!s}" + + async def _recall_message(self, bot: Bot, group_id: int, args: dict[str, Any]) -> str: + """撤回消息""" + message_id = int(args["message_id"]) + + try: + await bot.delete_msg(message_id=message_id) + return f"成功撤回消息 {message_id}" + except Exception as e: + return f"撤回消息失败: {e!s}" + + From 5bd92dfda6fde5f8cda847013a845b78672df222 Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Fri, 31 Oct 2025 17:12:05 +0800 Subject: [PATCH 03/24] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20fix=20lint?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nonebot_plugin_llmchat/mcpclient.py | 2 +- nonebot_plugin_llmchat/onebottools.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nonebot_plugin_llmchat/mcpclient.py b/nonebot_plugin_llmchat/mcpclient.py index 09f5b73..929caca 100644 --- a/nonebot_plugin_llmchat/mcpclient.py +++ b/nonebot_plugin_llmchat/mcpclient.py @@ -173,7 +173,7 @@ class MCPClient: # 检查是否是OneBot工具 if tool_name.startswith("ob__"): return self.onebot_tools.get_friendly_name(tool_name) - + # MCP工具处理 server_name, real_tool_name = tool_name.split("___") return (self.server_config[server_name].friendly_name or server_name) + " - " + real_tool_name diff --git a/nonebot_plugin_llmchat/onebottools.py b/nonebot_plugin_llmchat/onebottools.py index ff19fa2..166e0d4 100644 --- a/nonebot_plugin_llmchat/onebottools.py +++ b/nonebot_plugin_llmchat/onebottools.py @@ -15,7 +15,7 @@ class OneBotTools: "type": "function", "function": { "name": "ob__mute_user", - "description": "禁言指定用户一段时间。需要机器人有管理员权限。不要随便禁言别人,你应该只听群主或者管理员你的话。", + "description": "禁言指定用户一段时间。需要机器人有管理员权限。不能随便禁言成员,你应该听从管理员的指令。", "parameters": { "type": "object", "properties": { From 21421c4754d69ccfe856f6597035ff4d1efb00d0 Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Fri, 31 Oct 2025 17:13:25 +0800 Subject: [PATCH 04/24] =?UTF-8?q?=F0=9F=94=96=20=E6=9B=B4=E6=96=B0?= =?UTF-8?q?=E7=89=88=E6=9C=AC=E5=8F=B7=E8=87=B30.4.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 15b4200..9b3eab7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nonebot-plugin-llmchat" -version = "0.3.1" +version = "0.4.0" description = "Nonebot AI group chat plugin supporting multiple API preset configurations" license = "GPL" authors = ["FuQuan i@fuquan.moe"] From ca1b5e75ece59e000816e16bdbeffad9291c74d9 Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Sat, 1 Nov 2025 23:23:23 +0800 Subject: [PATCH 05/24] =?UTF-8?q?=E2=9C=A8=20=E4=BF=AE=E6=94=B9=E5=B7=A5?= =?UTF-8?q?=E5=85=B7=E5=90=8D=E7=A7=B0=E6=A0=BC=E5=BC=8F=EF=BC=8C=E4=BC=98?= =?UTF-8?q?=E5=8C=96OneBot=E5=92=8CMCP=E5=B7=A5=E5=85=B7=E7=9A=84=E8=B0=83?= =?UTF-8?q?=E7=94=A8=E9=80=BB=E8=BE=91=EF=BC=8C=E5=A2=9E=E5=BC=BA=E9=94=99?= =?UTF-8?q?=E8=AF=AF=E5=A4=84=E7=90=86?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nonebot_plugin_llmchat/mcpclient.py | 54 ++++++++++++++++++++--------- 1 file changed, 37 insertions(+), 17 deletions(-) diff --git a/nonebot_plugin_llmchat/mcpclient.py b/nonebot_plugin_llmchat/mcpclient.py index 929caca..4d38f0a 100644 --- a/nonebot_plugin_llmchat/mcpclient.py +++ b/nonebot_plugin_llmchat/mcpclient.py @@ -132,7 +132,7 @@ class MCPClient: { "type": "function", "function": { - "name": f"{server_name}___{tool.name}", + "name": f"mcp__{server_name}__{tool.name}", "description": tool.description, "parameters": tool.inputSchema, }, @@ -148,35 +148,55 @@ class MCPClient: async def call_tool(self, tool_name: str, tool_args: dict, group_id: int | None = None, bot_id: str | None = None): """按需连接调用工具,调用后立即断开""" - # 检查是否是QQ工具 + # 检查是否是OneBot内置工具 if tool_name.startswith("ob__"): if group_id is None or bot_id is None: return "QQ工具需要提供group_id和bot_id参数" logger.info(f"调用OneBot工具[{tool_name}]") return await self.onebot_tools.call_tool(tool_name, tool_args, group_id, bot_id) - # MCP工具处理 - server_name, real_tool_name = tool_name.split("___") - logger.info(f"按需连接到服务器[{server_name}]调用工具[{real_tool_name}]") + # 检查是否是MCP工具 + if tool_name.startswith("mcp__"): + # MCP工具处理:mcp__server_name__tool_name + parts = tool_name.split("__") + if len(parts) != 3 or parts[0] != "mcp": + return f"MCP工具名称格式错误: {tool_name}" - async with self._create_session_context(server_name) as session: - try: - response = await asyncio.wait_for(session.call_tool(real_tool_name, tool_args), timeout=30) - logger.debug(f"工具[{real_tool_name}]调用完成,响应: {response}") - return response.content - except asyncio.TimeoutError: - logger.error(f"调用工具[{real_tool_name}]超时") - return f"调用工具[{real_tool_name}]超时" + server_name = parts[1] + real_tool_name = parts[2] + logger.info(f"按需连接到服务器[{server_name}]调用工具[{real_tool_name}]") + + async with self._create_session_context(server_name) as session: + try: + response = await asyncio.wait_for(session.call_tool(real_tool_name, tool_args), timeout=30) + logger.debug(f"工具[{real_tool_name}]调用完成,响应: {response}") + return response.content + except asyncio.TimeoutError: + logger.error(f"调用工具[{real_tool_name}]超时") + return f"调用工具[{real_tool_name}]超时" + + # 未知工具类型 + return f"未知的工具类型: {tool_name}" def get_friendly_name(self, tool_name: str): logger.debug(tool_name) - # 检查是否是OneBot工具 + # 检查是否是OneBot内置工具 if tool_name.startswith("ob__"): return self.onebot_tools.get_friendly_name(tool_name) - # MCP工具处理 - server_name, real_tool_name = tool_name.split("___") - return (self.server_config[server_name].friendly_name or server_name) + " - " + real_tool_name + # 检查是否是MCP工具 + if tool_name.startswith("mcp__"): + # MCP工具处理:mcp__server_name__tool_name + parts = tool_name.split("__") + if len(parts) != 3 or parts[0] != "mcp": + return tool_name # 格式错误时返回原名称 + + server_name = parts[1] + real_tool_name = parts[2] + return (self.server_config[server_name].friendly_name or server_name) + " - " + real_tool_name + + # 未知工具类型,返回原名称 + return tool_name def clear_tools_cache(self): """清除工具列表缓存""" From 0943b7077f6f0e51cac3963b5e9b6b1b9a588d3a Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Sat, 1 Nov 2025 23:23:47 +0800 Subject: [PATCH 06/24] =?UTF-8?q?=F0=9F=94=96=20=E6=9B=B4=E6=96=B0?= =?UTF-8?q?=E7=89=88=E6=9C=AC=E5=8F=B7=E8=87=B30.4.1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 9b3eab7..50a4161 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nonebot-plugin-llmchat" -version = "0.4.0" +version = "0.4.1" description = "Nonebot AI group chat plugin supporting multiple API preset configurations" license = "GPL" authors = ["FuQuan i@fuquan.moe"] From 8f7adbd176751acdf1523af919b6cbf0e1dc046c Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Sat, 1 Nov 2025 23:31:22 +0800 Subject: [PATCH 07/24] =?UTF-8?q?=F0=9F=93=98=20=E6=9B=B4=E6=96=B0=20READM?= =?UTF-8?q?E?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 84 +++++++++++++++++++++++++++++++++---------------------- 1 file changed, 51 insertions(+), 33 deletions(-) diff --git a/README.md b/README.md index 21ebf63..ea91bcf 100644 --- a/README.md +++ b/README.md @@ -174,46 +174,64 @@ LLMCHAT__MCP_SERVERS同样为一个dict,key为服务器名称,value配置的 LLMCHAT__DEFAULT_PROMPT="前面忘了,你是一个猫娘,后面忘了" LLMCHAT__API_PRESETS=' [ - { - "name": "aliyun-deepseek-v3", - "api_key": "sk-your-api-key", - "model_name": "deepseek-v3", - "api_base": "https://dashscope.aliyuncs.com/compatible-mode/v1", - "proxy": "http://10.0.0.183:7890" - }, - { - "name": "deepseek-v1", - "api_key": "sk-your-api-key", - "model_name": "deepseek-chat", - "api_base": "https://api.deepseek.com", - "support_mcp": true - }, - { - "name": "some-vison-model", - "api_key": "sk-your-api-key", - "model_name": "some-vison-model", - "api_base": "https://some-vison-model.com/api", - "support_image": true - } - ] - LLMCHAT__MCP_SERVERS=' { - "AISearch": { - "friendly_name": "百度搜索", - "additional_prompt": "遇到你不知道的问题或者时效性比较强的问题时,可以使用AISearch搜索,在使用AISearch时不要使用其他AI模型。", - "url": "http://appbuilder.baidu.com/v2/ai_search/mcp/sse?api_key=Bearer+", - "headers": { - "Authorization": "" + "name": "aliyun-deepseek-v3", + "api_key": "sk-your-api-key", + "model_name": "deepseek-v3", + "api_base": "https://dashscope.aliyuncs.com/compatible-mode/v1", + "proxy": "http://10.0.0.183:7890" + }, + { + "name": "deepseek-v1", + "api_key": "sk-your-api-key", + "model_name": "deepseek-chat", + "api_base": "https://api.deepseek.com", + "support_mcp": true + }, + { + "name": "some-vison-model", + "api_key": "sk-your-api-key", + "model_name": "some-vison-model", + "api_base": "https://some-vison-model.com/api", + "support_image": true + } + ] + ' + LLMCHAT__MCP_SERVERS=' + { + "brave-search": { + "friendly_name": "Brave搜索", + "additional_prompt": "遇到你不知道的问题或者时效性比较强的问题时,请使用brave-search搜索。", + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-brave-search"], + "env": { + "BRAVE_API_KEY": "" } }, "fetch": { - "friendly_name": "网页浏览", + "friendly_name": "浏览网页", + "additional_prompt": "搜索到的链接可以通过fetch打开进一步了解。", "command": "uvx", - "args": ["mcp-server-fetch"] - } + "args": ["mcp-server-fetch", "--ignore-robots-txt", "--user-agent=\"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36\""] + }, + "hefeng-weather": { + "friendly_name": "和风天气", + "command": "npx", + "args": ["hefeng-mcp-weather@latest", "--apiKey="] + }, + "mcp-server-code-runner": { + "friendly_name": "代码运行器", + "additional_prompt": "在使用的时候你需要将你需要的结果输出出来,用户看不到你的代码,如果你需要给用户展示,你需要将代码以文字的形式发送出来。", + "command": "docker", + "args": [ + "run", + "--rm", + "-i", + "formulahendry/mcp-server-code-runner" + ] + }, } ' - ' From 2f6236546042d8d98cd56e54fa697a0dd36a9d8a Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Sat, 1 Nov 2025 23:32:55 +0800 Subject: [PATCH 08/24] =?UTF-8?q?=F0=9F=93=98=20=E6=9B=B4=E6=96=B0=20READM?= =?UTF-8?q?E?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index ea91bcf..9239110 100644 --- a/README.md +++ b/README.md @@ -205,7 +205,7 @@ LLMCHAT__MCP_SERVERS同样为一个dict,key为服务器名称,value配置的 "command": "npx", "args": ["-y", "@modelcontextprotocol/server-brave-search"], "env": { - "BRAVE_API_KEY": "" + "BRAVE_API_KEY": "" } }, "fetch": { @@ -224,10 +224,10 @@ LLMCHAT__MCP_SERVERS同样为一个dict,key为服务器名称,value配置的 "additional_prompt": "在使用的时候你需要将你需要的结果输出出来,用户看不到你的代码,如果你需要给用户展示,你需要将代码以文字的形式发送出来。", "command": "docker", "args": [ - "run", - "--rm", - "-i", - "formulahendry/mcp-server-code-runner" + "run", + "--rm", + "-i", + "formulahendry/mcp-server-code-runner" ] }, } From 36a47fa5e20ca5a8e7e08f69a7f4f797a8a7ebf9 Mon Sep 17 00:00:00 2001 From: XokoukioX <3282076201@qq.com> Date: Thu, 6 Nov 2025 00:31:17 +0800 Subject: [PATCH 09/24] =?UTF-8?q?WIP::=E7=A7=81=E8=81=8A=E5=8A=9F=E8=83=BD?= =?UTF-8?q?=E5=AE=9E=E7=8E=B0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- PRIVATE_CHAT_CHANGES.md | 183 ++++++++++++++++ README.md | 46 +++- nonebot_plugin_llmchat/__init__.py | 336 ++++++++++++++++++++++++----- nonebot_plugin_llmchat/config.py | 2 + 4 files changed, 510 insertions(+), 57 deletions(-) create mode 100644 PRIVATE_CHAT_CHANGES.md diff --git a/PRIVATE_CHAT_CHANGES.md b/PRIVATE_CHAT_CHANGES.md new file mode 100644 index 0000000..701ed86 --- /dev/null +++ b/PRIVATE_CHAT_CHANGES.md @@ -0,0 +1,183 @@ +# 私聊功能实现总结 + +## 📝 概览 + +已成功为 nonebot-plugin-llmchat 项目添加了完整的私聊功能支持。用户现在可以在私聊中与机器人进行对话,同时保持群聊功能完全不变。 + +--- + +## 🔧 主要改动 + +### 1. **config.py** - 配置模块 + +#### 新增配置项: +- `LLMCHAT__ENABLE_PRIVATE_CHAT` (bool, 默认值: False) + - 是否启用私聊功能 + +- `LLMCHAT__PRIVATE_CHAT_PRESET` (str, 默认值: "off") + - 私聊默认使用的预设名称 + +### 2. **__init__.py** - 主程序模块 + +#### 新增导入: +```python +from typing import Union +from nonebot.adapters.onebot.v11 import PrivateMessageEvent +``` + +#### 新增数据结构: + +**PrivateChatState 类** +- 用于管理每个用户的私聊状态 +- 结构与 GroupState 类似,但针对单个用户独立管理 +- 包含:preset_name、history、queue、processing 等属性 + +**private_chat_states 字典** +- 类型:`dict[int, PrivateChatState]` +- 按用户ID存储私聊状态 + +#### 修改的函数: + +1. **format_message()** + - 参数改为:`event: Union[GroupMessageEvent, PrivateMessageEvent]` + - 支持两种消息事件类型的格式化 + +2. **is_triggered()** + - 参数改为:`event: Union[GroupMessageEvent, PrivateMessageEvent]` + - 新增私聊事件检测逻辑 + - 私聊消息在启用且预设不为"off"时自动触发 + +3. **get_preset()** + - 新增参数:`is_group: bool = True` + - 支持从群组或私聊状态获取预设配置 + +4. **process_messages()** + - 新增参数:`context_id: int, is_group: bool = True` + - 支持处理群组和私聊消息 + - 私聊时跳过OneBot群操作工具(ob__开头的工具) + +5. **handle_message()** + - 参数改为:`event: Union[GroupMessageEvent, PrivateMessageEvent]` + - 支持路由到不同的处理逻辑 + +6. **save_state()** / **load_state()** + - 新增私聊状态的持久化 + - 私聊状态保存到单独的文件:`llmchat_private_state.json` + +#### 新增命令处理器(私聊相关): + +所有私聊命令需要主人权限,且仅在启用私聊功能时可用: + +1. **私聊API预设** + - 查看或修改私聊使用的API预设 + - 用法:`私聊API预设 [预设名]` + +2. **私聊修改设定** + - 修改私聊机器人的性格设定 + - 用法:`私聊修改设定 [新设定]` + +3. **私聊记忆清除** + - 清除私聊的对话历史记录 + - 用法:`私聊记忆清除` + +4. **私聊切换思维输出** + - 切换是否输出AI的思维过程 + - 用法:`私聊切换思维输出` + +### 3. **README.md** - 文档更新 + +#### 更新的章节: + +1. **项目介绍** + - 更新标题为"群聊&私聊的AI对话插件" + - 添加"群聊和私聊支持"功能说明 + +2. **配置表格** + - 添加两个新配置项的说明 + +3. **使用指南** + - 将原"指令表"改名为"群聊指令表" + - 新增"私聊指令表" + - 添加"私聊功能启用示例"部分 + +--- + +## 🚀 使用指南 + +### 启用私聊功能 + +在 `.env` 文件中添加: + +```bash +# 启用私聊功能 +LLMCHAT__ENABLE_PRIVATE_CHAT=true + +# 设置私聊默认预设 +LLMCHAT__PRIVATE_CHAT_PRESET="deepseek-v1" +``` + +### 私聊命令示例 + +``` +# 主人私聊机器人 + +私聊API预设 # 查看当前预设 +私聊API预设 aliyun-deepseek-v3 # 切换预设 + +私聊修改设定 你是一个有趣的AI # 修改性格设定 + +私聊记忆清除 # 清除对话记忆 + +私聊切换思维输出 # 开关思维过程输出 +``` + +--- + +## 🔑 关键特性 + +✅ **独立管理** - 群聊和私聊拥有完全独立的对话记忆和配置 + +✅ **灵活控制** - 可单独启用/禁用私聊功能,无需影响群聊 + +✅ **自动触发** - 私聊消息自动触发回复,无需@机器人 + +✅ **权限隔离** - 私聊命令仅主人可用 + +✅ **工具适配** - 私聊时自动跳过不适用的群操作工具 + +✅ **状态持久化** - 私聊状态独立保存和恢复 + +--- + +## 📊 文件对比 + +| 文件 | 变更类型 | 主要改动 | +|------|--------|--------| +| config.py | 修改 | 新增2个配置项 | +| __init__.py | 修改 | 新增私聊类、处理器、命令 | +| README.md | 修改 | 更新文档说明 | + +--- + +## ⚠️ 注意事项 + +1. **默认禁用** - 私聊功能默认为禁用状态,需要在配置文件中显式启用 + +2. **群操作工具** - OneBot群操作工具(禁言、撤回等)在私聊中不可用 + +3. **状态文件** - 私聊状态存储在 `llmchat_private_state.json` 文件中 + +4. **权限限制** - 所有私聊命令都需要主人权限 + +5. **独立预设** - 私聊和群聊可以使用不同的API预设 + +--- + +## ✨ 后续改进建议 + +- [ ] 支持多用户私聊会话管理面板 +- [ ] 添加私聊消息转发到管理员功能 +- [ ] 实现私聊速率限制 +- [ ] 添加私聊用户黑名单 +- [ ] 支持私聊消息加密存储 + diff --git a/README.md b/README.md index 9239110..4e645e1 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ # nonebot-plugin-llmchat -_✨ 支持多API预设、MCP协议、内置工具、联网搜索、视觉模型的AI群聊插件 ✨_ +_✨ 支持多API预设、MCP协议、内置工具、联网搜索、视觉模型、群聊&私聊的AI对话插件 ✨_ @@ -48,6 +48,12 @@ _✨ 支持多API预设、MCP协议、内置工具、联网搜索、视觉模型 - 支持处理回复消息 - 群聊消息顺序处理,防止消息错乱 +1. **群聊和私聊支持** + - 支持群聊场景(默认启用) + - 支持私聊场景(可选启用) + - 分别管理群聊和私聊的对话记忆 + - 灵活的权限配置 + 1. **分群聊上下文记忆管理** - 分群聊保留对话历史记录(可配置保留条数) - 自动合并未处理消息,降低API用量 @@ -120,6 +126,8 @@ _✨ 支持多API预设、MCP协议、内置工具、联网搜索、视觉模型 | LLMCHAT__BLACKLIST_USER_IDS | 否 | [] | 黑名单用户ID列表,机器人将不会处理黑名单用户的消息 | | LLMCHAT__IGNORE_PREFIXES | 否 | [] | 需要忽略的消息前缀列表,匹配到这些前缀的消息不会处理 | | LLMCHAT__MCP_SERVERS | 否 | {} | MCP服务器配置,具体见下表 | +| LLMCHAT__ENABLE_PRIVATE_CHAT | 否 | False | 是否启用私聊功能 | +| LLMCHAT__PRIVATE_CHAT_PRESET | 否 | off | 私聊默认使用的预设名称 | ### 内置OneBot工具 @@ -241,7 +249,7 @@ LLMCHAT__MCP_SERVERS同样为一个dict,key为服务器名称,value配置的 配置完成后@机器人即可手动触发回复,另外在机器人收到群聊消息时会根据`LLMCHAT__RANDOM_TRIGGER_PROB`配置的概率或群聊中使用指令设置的概率随机自动触发回复。 -### 指令表 +### 群聊指令表 以下指令均仅对发送的群聊生效,不同群聊配置不互通。 @@ -253,6 +261,40 @@ LLMCHAT__MCP_SERVERS同样为一个dict,key为服务器名称,value配置的 | 切换思维输出 | 管理 | 否 | 群聊 | 无 | 切换是否输出AI的思维过程的开关(需模型支持) | | 设置主动回复概率 | 管理 | 否 | 群聊 | 主动回复概率 | 主动回复概率需为 [0, 1] 的浮点数,0为完全关闭主动回复 | +### 私聊指令表 + +以下指令仅在启用私聊功能(`LLMCHAT__ENABLE_PRIVATE_CHAT=true`)后可用,这些指令均只对发送者的私聊生效。 + +| 指令 | 权限 | 参数 | 说明 | +|:-----:|:----:|:----:|:----:| +| 私聊API预设 | 主人 | [预设名] | 查看或修改私聊使用的API预设 | +| 私聊修改设定 | 主人 | 设定 | 修改私聊机器人的设定 | +| 私聊记忆清除 | 主人 | 无 | 清除私聊的机器人记忆 | +| 私聊切换思维输出 | 主人 | 无 | 切换是否输出私聊AI的思维过程的开关(需模型支持) | + +**私聊功能说明:** + +- 私聊消息默认触发回复(无需@或随机触发) +- 私聊和群聊的对话记忆独立管理 +- OneBot群操作工具(如禁言、撤回等)在私聊中不可用 + +## 📝 私聊功能启用示例 + +在 `.env` 文件中添加以下配置以启用私聊功能: + +```bash +LLMCHAT__ENABLE_PRIVATE_CHAT=true +LLMCHAT__PRIVATE_CHAT_PRESET="deepseek-v1" +``` + +然后你可以在私聊中与机器人交互。使用以下命令管理私聊: + +- 切换预设:`私聊API预设 aliyun-deepseek-v3` +- 清除记忆:`私聊记忆清除` +- 修改设定:`私聊修改设定 你是一个有趣的AI助手` +| 切换思维输出 | 管理 | 否 | 群聊 | 无 | 切换是否输出AI的思维过程的开关(需模型支持) | +| 设置主动回复概率 | 管理 | 否 | 群聊 | 主动回复概率 | 主动回复概率需为 [0, 1] 的浮点数,0为完全关闭主动回复 | + ### 效果图 ![](img/mcp_demo.jpg) ![](img/demo.png) diff --git a/nonebot_plugin_llmchat/__init__.py b/nonebot_plugin_llmchat/__init__.py index acfa403..caac2a8 100755 --- a/nonebot_plugin_llmchat/__init__.py +++ b/nonebot_plugin_llmchat/__init__.py @@ -8,7 +8,7 @@ import random import re import ssl import time -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Union import aiofiles import httpx @@ -21,7 +21,7 @@ from nonebot import ( on_message, require, ) -from nonebot.adapters.onebot.v11 import GroupMessageEvent, Message, MessageSegment +from nonebot.adapters.onebot.v11 import GroupMessageEvent, Message, MessageSegment, PrivateMessageEvent from nonebot.adapters.onebot.v11.permission import GROUP_ADMIN, GROUP_OWNER from nonebot.params import CommandArg from nonebot.permission import SUPERUSER @@ -86,16 +86,36 @@ class GroupState: self.last_active = time.time() self.past_events = deque(maxlen=plugin_config.past_events_size) self.group_prompt: str | None = None + self.user_prompt: str | None = None self.output_reasoning_content = False self.random_trigger_prob = plugin_config.random_trigger_prob +# 初始化私聊状态 +class PrivateChatState: + def __init__(self): + self.preset_name = plugin_config.private_chat_preset + self.history = deque(maxlen=plugin_config.history_size * 2) + self.queue = asyncio.Queue() + self.processing = False + self.last_active = time.time() + self.past_events = deque(maxlen=plugin_config.past_events_size) + self.group_prompt: str | None = None + self.user_prompt: str | None = None + self.output_reasoning_content = False + + group_states: dict[int, GroupState] = defaultdict(GroupState) +private_chat_states: dict[int, PrivateChatState] = defaultdict(PrivateChatState) # 获取当前预设配置 -def get_preset(group_id: int) -> PresetConfig: - state = group_states[group_id] +def get_preset(context_id: int, is_group: bool = True) -> PresetConfig: + if is_group: + state = group_states[context_id] + else: + state = private_chat_states[context_id] + for preset in plugin_config.api_presets: if preset.name == state.preset_name: return preset @@ -103,12 +123,12 @@ def get_preset(group_id: int) -> PresetConfig: # 消息格式转换 -def format_message(event: GroupMessageEvent) -> str: +def format_message(event: Union[GroupMessageEvent, PrivateMessageEvent]) -> str: text_message = "" - if event.reply is not None: + if isinstance(event, GroupMessageEvent) and event.reply is not None: text_message += f"[回复 {event.reply.sender.nickname} 的消息 {event.reply.message.extract_plain_text()}]\n" - if event.is_tome(): + if isinstance(event, GroupMessageEvent) and event.is_tome(): text_message += f"@{next(iter(driver.config.nickname))} " for msgseg in event.get_message(): @@ -123,13 +143,22 @@ def format_message(event: GroupMessageEvent) -> str: elif msgseg.type == "text": text_message += msgseg.data.get("text", "") - message = { - "SenderNickname": str(event.sender.card or event.sender.nickname), - "SenderUserId": str(event.user_id), - "Message": text_message, - "MessageID": event.message_id, - "SendTime": datetime.fromtimestamp(event.time).isoformat(), - } + if isinstance(event, GroupMessageEvent): + message = { + "SenderNickname": str(event.sender.card or event.sender.nickname), + "SenderUserId": str(event.user_id), + "Message": text_message, + "MessageID": event.message_id, + "SendTime": datetime.fromtimestamp(event.time).isoformat(), + } + else: # PrivateMessageEvent + message = { + "SenderNickname": str(event.sender.nickname), + "SenderUserId": str(event.user_id), + "Message": text_message, + "MessageID": event.message_id, + "SendTime": datetime.fromtimestamp(event.time).isoformat(), + } return json.dumps(message, ensure_ascii=False) @@ -157,32 +186,60 @@ def build_reasoning_forward_nodes(self_id: str, reasoning_content: str): return nodes -async def is_triggered(event: GroupMessageEvent) -> bool: +async def is_triggered(event: Union[GroupMessageEvent, PrivateMessageEvent]) -> bool: """扩展后的消息处理规则""" - state = group_states[event.group_id] + if isinstance(event, GroupMessageEvent): + state = group_states[event.group_id] - if state.preset_name == "off": - return False - - # 黑名单用户 - if event.user_id in plugin_config.blacklist_user_ids: - return False - - # 忽略特定前缀的消息 - msg_text = event.get_plaintext().strip() - for prefix in plugin_config.ignore_prefixes: - if msg_text.startswith(prefix): + if state.preset_name == "off": return False - state.past_events.append(event) + # 黑名单用户 + if event.user_id in plugin_config.blacklist_user_ids: + return False - # 原有@触发条件 - if event.is_tome(): - return True + # 忽略特定前缀的消息 + msg_text = event.get_plaintext().strip() + for prefix in plugin_config.ignore_prefixes: + if msg_text.startswith(prefix): + return False - # 随机触发条件 - if random.random() < state.random_trigger_prob: + state.past_events.append(event) + + # 原有@触发条件 + if event.is_tome(): + return True + + # 随机触发条件 + if random.random() < state.random_trigger_prob: + return True + + return False + + elif isinstance(event, PrivateMessageEvent): + # 检查私聊功能是否启用 + if not plugin_config.enable_private_chat: + return False + + state = private_chat_states[event.user_id] + + if state.preset_name == "off": + return False + + # 黑名单用户 + if event.user_id in plugin_config.blacklist_user_ids: + return False + + # 忽略特定前缀的消息 + msg_text = event.get_plaintext().strip() + for prefix in plugin_config.ignore_prefixes: + if msg_text.startswith(prefix): + return False + + state.past_events.append(event) + + # 私聊默认触发 return True return False @@ -197,18 +254,27 @@ handler = on_message( @handler.handle() -async def handle_message(event: GroupMessageEvent): - group_id = event.group_id - logger.debug( - f"收到群聊消息 群号:{group_id} 用户:{event.user_id} 内容:{event.get_plaintext()}" - ) - - state = group_states[group_id] +async def handle_message(event: Union[GroupMessageEvent, PrivateMessageEvent]): + if isinstance(event, GroupMessageEvent): + group_id = event.group_id + logger.debug( + f"收到群聊消息 群号:{group_id} 用户:{event.user_id} 内容:{event.get_plaintext()}" + ) + state = group_states[group_id] + context_id = group_id + else: # PrivateMessageEvent + user_id = event.user_id + logger.debug( + f"收到私聊消息 用户:{user_id} 内容:{event.get_plaintext()}" + ) + state = private_chat_states[user_id] + context_id = user_id await state.queue.put(event) if not state.processing: state.processing = True - task = asyncio.create_task(process_messages(group_id)) + is_group = isinstance(event, GroupMessageEvent) + task = asyncio.create_task(process_messages(context_id, is_group)) task.add_done_callback(tasks.discard) tasks.add(task) @@ -253,9 +319,16 @@ async def send_split_messages(message_handler, content: str): logger.debug(f"发送消息分段 内容:{segment[:50]}...") # 只记录前50个字符避免日志过大 await message_handler.send(Message(segment)) -async def process_messages(group_id: int): - state = group_states[group_id] - preset = get_preset(group_id) +async def process_messages(context_id: int, is_group: bool = True): + if is_group: + group_id = context_id + state = group_states[group_id] + else: + user_id = context_id + state = private_chat_states[user_id] + group_id = None + + preset = get_preset(context_id, is_group) # 初始化OpenAI客户端 if preset.proxy != "": @@ -273,16 +346,21 @@ async def process_messages(group_id: int): ) logger.info( - f"开始处理群聊消息 群号:{group_id} 当前队列长度:{state.queue.qsize()}" + f"开始处理{'群聊' if is_group else '私聊'}消息 {'群号' if is_group else '用户'}:{context_id} 当前队列长度:{state.queue.qsize()}" ) while not state.queue.empty(): event = await state.queue.get() - logger.debug(f"从队列获取消息 群号:{group_id} 消息ID:{event.message_id}") + if is_group: + logger.debug(f"从队列获取消息 群号:{context_id} 消息ID:{event.message_id}") + group_id = context_id + else: + logger.debug(f"从队列获取消息 用户:{context_id} 消息ID:{event.message_id}") + group_id = None past_events_snapshot = [] mcp_client = MCPClient.get_instance(plugin_config.mcp_servers) try: systemPrompt = f""" -我想要你帮我在群聊中闲聊,大家一般叫你{"、".join(list(driver.config.nickname))},我将会在后面的信息中告诉你每条群聊信息的发送者和发送时间,你可以直接称呼发送者为他对应的昵称。 +我想要你帮我在{"群聊" if is_group else "私聊"}中闲聊,大家一般叫你{"、".join(list(driver.config.nickname))},我将会在后面的信息中告诉你每条{"群聊" if is_group else "私聊"}信息的发送者和发送时间,你可以直接称呼发送者为他对应的昵称。 你的回复需要遵守以下几点规则: - 你可以使用多条消息回复,每两条消息之间使用分隔,前后不需要包含额外的换行和空格。 - 除外,消息中不应该包含其他类似的标记。 @@ -296,7 +374,7 @@ async def process_messages(group_id: int): - 如果你选择完全不回复,你只需要直接输出一个。 - 如果你需要思考的话,你应该思考尽量少,以节省时间。 下面是关于你性格的设定,如果设定中提到让你扮演某个人,或者设定中有提到名字,则优先使用设定中的名字。 -{state.group_prompt or plugin_config.default_prompt} +{(state.group_prompt if is_group else state.user_prompt) or plugin_config.default_prompt} """ if preset.support_mcp: systemPrompt += "你也可以使用一些工具,下面是关于这些工具的额外说明:\n" @@ -382,12 +460,24 @@ async def process_messages(group_id: int): await handler.send(Message(f"正在使用{mcp_client.get_friendly_name(tool_name)}")) # 执行工具调用,传递群组和机器人信息用于QQ工具 - result = await mcp_client.call_tool( - tool_name, - tool_args, - group_id=event.group_id, - bot_id=str(event.self_id) - ) + if is_group: + result = await mcp_client.call_tool( + tool_name, + tool_args, + group_id=event.group_id, + bot_id=str(event.self_id) + ) + else: + # 私聊时某些工具不可用(如群操作工具),跳过这些工具 + if tool_name.startswith("ob__"): + result = f"私聊不支持{mcp_client.get_friendly_name(tool_name)}工具" + else: + result = await mcp_client.call_tool( + tool_name, + tool_args, + group_id=None, + bot_id=str(event.self_id) + ) new_messages.append({ "role": "tool", @@ -452,7 +542,7 @@ async def process_messages(group_id: int): await handler.send(image_msg) except Exception as e: - logger.opt(exception=e).error(f"API请求失败 群号:{group_id}") + logger.opt(exception=e).error(f"API请求失败 {'群号' if is_group else '用户'}:{context_id}") # 如果在处理过程中出现异常,恢复未处理的消息到state中 state.past_events.extendleft(reversed(past_events_snapshot)) await handler.send(Message(f"服务暂时不可用,请稍后再试\n{e!s}")) @@ -564,12 +654,113 @@ async def handle_think(event: GroupMessageEvent, args: Message = CommandArg()): ) +# region 私聊相关指令 + +# 私聊预设切换命令 +private_preset_handler = on_command( + "私聊API预设", + priority=1, + block=True, + permission=SUPERUSER, +) + + +@private_preset_handler.handle() +async def handle_private_preset(event: PrivateMessageEvent, args: Message = CommandArg()): + if not plugin_config.enable_private_chat: + await private_preset_handler.finish("私聊功能未启用") + + user_id = event.user_id + preset_name = args.extract_plain_text().strip() + + if preset_name == "off": + private_chat_states[user_id].preset_name = preset_name + await private_preset_handler.finish("已关闭llmchat私聊功能") + + available_presets = {p.name for p in plugin_config.api_presets} + if preset_name not in available_presets: + available_presets_str = "\n- ".join(available_presets) + await private_preset_handler.finish( + f"当前API预设:{private_chat_states[user_id].preset_name}\n可用API预设:\n- {available_presets_str}" + ) + + private_chat_states[user_id].preset_name = preset_name + await private_preset_handler.finish(f"已切换至API预设:{preset_name}") + + +# 私聊设定修改命令 +private_edit_preset_handler = on_command( + "私聊修改设定", + priority=1, + block=True, + permission=SUPERUSER, +) + + +@private_edit_preset_handler.handle() +async def handle_private_edit_preset(event: PrivateMessageEvent, args: Message = CommandArg()): + if not plugin_config.enable_private_chat: + await private_edit_preset_handler.finish("私聊功能未启用") + + user_id = event.user_id + user_prompt = args.extract_plain_text().strip() + + private_chat_states[user_id].group_prompt = user_prompt + await private_edit_preset_handler.finish("修改成功") + + +# 私聊记忆清除命令 +private_reset_handler = on_command( + "私聊记忆清除", + priority=1, + block=True, + permission=SUPERUSER, +) + + +@private_reset_handler.handle() +async def handle_private_reset(event: PrivateMessageEvent, args: Message = CommandArg()): + if not plugin_config.enable_private_chat: + await private_reset_handler.finish("私聊功能未启用") + + user_id = event.user_id + + private_chat_states[user_id].past_events.clear() + private_chat_states[user_id].history.clear() + await private_reset_handler.finish("记忆已清空") + + +# 私聊思维输出切换命令 +private_think_handler = on_command( + "私聊切换思维输出", + priority=1, + block=True, + permission=SUPERUSER, +) + + +@private_think_handler.handle() +async def handle_private_think(event: PrivateMessageEvent, args: Message = CommandArg()): + if not plugin_config.enable_private_chat: + await private_think_handler.finish("私聊功能未启用") + + state = private_chat_states[event.user_id] + state.output_reasoning_content = not state.output_reasoning_content + + await private_think_handler.finish( + f"已{(state.output_reasoning_content and '开启') or '关闭'}思维输出" + ) + +# endregion + + # region 持久化与定时任务 # 获取插件数据目录 data_dir = store.get_plugin_data_dir() # 获取插件数据文件 data_file = store.get_plugin_data_file("llmchat_state.json") +private_data_file = store.get_plugin_data_file("llmchat_private_state.json") async def save_state(): @@ -590,6 +781,24 @@ async def save_state(): os.makedirs(os.path.dirname(data_file), exist_ok=True) async with aiofiles.open(data_file, "w", encoding="utf8") as f: await f.write(json.dumps(data, ensure_ascii=False)) + + # 保存私聊状态 + if plugin_config.enable_private_chat: + logger.info(f"开始保存私聊状态到文件:{private_data_file}") + private_data = { + uid: { + "preset": state.preset_name, + "history": list(state.history), + "last_active": state.last_active, + "group_prompt": state.group_prompt, + "output_reasoning_content": state.output_reasoning_content, + } + for uid, state in private_chat_states.items() + } + + os.makedirs(os.path.dirname(private_data_file), exist_ok=True) + async with aiofiles.open(private_data_file, "w", encoding="utf8") as f: + await f.write(json.dumps(private_data, ensure_ascii=False)) async def load_state(): @@ -611,6 +820,23 @@ async def load_state(): state.output_reasoning_content = state_data["output_reasoning_content"] state.random_trigger_prob = state_data.get("random_trigger_prob", plugin_config.random_trigger_prob) group_states[int(gid)] = state + + # 加载私聊状态 + if plugin_config.enable_private_chat: + logger.info(f"从文件加载私聊状态:{private_data_file}") + if os.path.exists(private_data_file): + async with aiofiles.open(private_data_file, encoding="utf8") as f: + private_data = json.loads(await f.read()) + for uid, state_data in private_data.items(): + state = PrivateChatState() + state.preset_name = state_data["preset"] + state.history = deque( + state_data["history"], maxlen=plugin_config.history_size * 2 + ) + state.last_active = state_data["last_active"] + state.group_prompt = state_data["group_prompt"] + state.output_reasoning_content = state_data["output_reasoning_content"] + private_chat_states[int(uid)] = state # 注册生命周期事件 diff --git a/nonebot_plugin_llmchat/config.py b/nonebot_plugin_llmchat/config.py index ed88dd2..6ecebf1 100755 --- a/nonebot_plugin_llmchat/config.py +++ b/nonebot_plugin_llmchat/config.py @@ -49,6 +49,8 @@ class ScopedConfig(BaseModel): default_factory=list, description="需要忽略的消息前缀列表,匹配到这些前缀的消息不会处理" ) + enable_private_chat: bool = Field(False, description="是否启用私聊功能") + private_chat_preset: str = Field("off", description="私聊默认使用的预设名称") class Config(BaseModel): From 2fecb746b3be7186abf9e78aa559c19c8c9600ad Mon Sep 17 00:00:00 2001 From: KawakazeNotFound Date: Thu, 6 Nov 2025 11:20:33 +0800 Subject: [PATCH 10/24] =?UTF-8?q?=E8=A7=A6=E5=8F=91=E6=A3=80=E6=9F=A5?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- PRIVATE_CHAT_CHANGES.md | 9 --------- 1 file changed, 9 deletions(-) diff --git a/PRIVATE_CHAT_CHANGES.md b/PRIVATE_CHAT_CHANGES.md index 701ed86..131ec9d 100644 --- a/PRIVATE_CHAT_CHANGES.md +++ b/PRIVATE_CHAT_CHANGES.md @@ -171,13 +171,4 @@ LLMCHAT__PRIVATE_CHAT_PRESET="deepseek-v1" 5. **独立预设** - 私聊和群聊可以使用不同的API预设 ---- - -## ✨ 后续改进建议 - -- [ ] 支持多用户私聊会话管理面板 -- [ ] 添加私聊消息转发到管理员功能 -- [ ] 实现私聊速率限制 -- [ ] 添加私聊用户黑名单 -- [ ] 支持私聊消息加密存储 From 2c04afc86ae982ae98104a7618858c5f27b3326f Mon Sep 17 00:00:00 2001 From: KawakazeNotFound Date: Thu, 6 Nov 2025 11:27:41 +0800 Subject: [PATCH 11/24] =?UTF-8?q?=E5=B0=9D=E8=AF=95=E8=A7=A3=E5=86=B3Ruff?= =?UTF-8?q?=20Lint=E9=97=AE=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nonebot_plugin_llmchat/__init__.py | 50 +++++++++++++++++------------- 1 file changed, 29 insertions(+), 21 deletions(-) diff --git a/nonebot_plugin_llmchat/__init__.py b/nonebot_plugin_llmchat/__init__.py index caac2a8..68d97c3 100755 --- a/nonebot_plugin_llmchat/__init__.py +++ b/nonebot_plugin_llmchat/__init__.py @@ -8,7 +8,7 @@ import random import re import ssl import time -from typing import TYPE_CHECKING, Union +from typing import TYPE_CHECKING import aiofiles import httpx @@ -123,7 +123,7 @@ def get_preset(context_id: int, is_group: bool = True) -> PresetConfig: # 消息格式转换 -def format_message(event: Union[GroupMessageEvent, PrivateMessageEvent]) -> str: +def format_message(event: GroupMessageEvent | PrivateMessageEvent) -> str: text_message = "" if isinstance(event, GroupMessageEvent) and event.reply is not None: text_message += f"[回复 {event.reply.sender.nickname} 的消息 {event.reply.message.extract_plain_text()}]\n" @@ -186,7 +186,7 @@ def build_reasoning_forward_nodes(self_id: str, reasoning_content: str): return nodes -async def is_triggered(event: Union[GroupMessageEvent, PrivateMessageEvent]) -> bool: +async def is_triggered(event: GroupMessageEvent | PrivateMessageEvent) -> bool: """扩展后的消息处理规则""" if isinstance(event, GroupMessageEvent): @@ -254,7 +254,7 @@ handler = on_message( @handler.handle() -async def handle_message(event: Union[GroupMessageEvent, PrivateMessageEvent]): +async def handle_message(event: GroupMessageEvent | PrivateMessageEvent): if isinstance(event, GroupMessageEvent): group_id = event.group_id logger.debug( @@ -359,23 +359,31 @@ async def process_messages(context_id: int, is_group: bool = True): past_events_snapshot = [] mcp_client = MCPClient.get_instance(plugin_config.mcp_servers) try: - systemPrompt = f""" -我想要你帮我在{"群聊" if is_group else "私聊"}中闲聊,大家一般叫你{"、".join(list(driver.config.nickname))},我将会在后面的信息中告诉你每条{"群聊" if is_group else "私聊"}信息的发送者和发送时间,你可以直接称呼发送者为他对应的昵称。 -你的回复需要遵守以下几点规则: -- 你可以使用多条消息回复,每两条消息之间使用分隔,前后不需要包含额外的换行和空格。 -- 除外,消息中不应该包含其他类似的标记。 -- 不要使用markdown或者html,聊天软件不支持解析,换行请用换行符。 -- 你应该以普通人的方式发送消息,每条消息字数要尽量少一些,应该倾向于使用更多条的消息回复。 -- 代码则不需要分段,用单独的一条消息发送。 -- 请使用发送者的昵称称呼发送者,你可以礼貌地问候发送者,但只需要在第一次回答这位发送者的问题时问候他。 -- 你有at群成员的能力,只需要在某条消息中插入[CQ:at,qq=(QQ号)],也就是CQ码。at发送者是非必要的,你可以根据你自己的想法at某个人。 -- 你有引用某条消息的能力,使用[CQ:reply,id=(消息id)]来引用。 -- 如果有多条消息,你应该优先回复提到你的,一段时间之前的就不要回复了,也可以直接选择不回复。 -- 如果你选择完全不回复,你只需要直接输出一个。 -- 如果你需要思考的话,你应该思考尽量少,以节省时间。 -下面是关于你性格的设定,如果设定中提到让你扮演某个人,或者设定中有提到名字,则优先使用设定中的名字。 -{(state.group_prompt if is_group else state.user_prompt) or plugin_config.default_prompt} -""" + # 构建系统提示,分成多行以满足行长限制 + chat_type = "群聊" if is_group else "私聊" + bot_names = "、".join(list(driver.config.nickname)) + default_prompt = (state.group_prompt if is_group else state.user_prompt) or plugin_config.default_prompt + + system_lines = [ + f"我想要你帮我在{chat_type}中闲聊,大家一般叫你{bot_names}。", + "我将会在后面的信息中告诉你每条信息的发送者和发送时间,你可以直接称呼发送者为他对应的昵称。", + "你的回复需要遵守以下几点规则:", + "- 你可以使用多条消息回复,每两条消息之间使用分隔,前后不需要包含额外的换行和空格。", + "- 除外,消息中不应该包含其他类似的标记。", + "- 不要使用markdown或者html,聊天软件不支持解析,换行请用换行符。", + "- 你应该以普通人的方式发送消息,每条消息字数要尽量少一些,应该倾向于使用更多条的消息回复。", + "- 代码则不需要分段,用单独的一条消息发送。", + "- 请使用发送者的昵称称呼发送者,你可以礼貌地问候发送者,但只需要在第一次回答这位发送者的问题时问候他。", + "- 你有at群成员的能力,只需要在某条消息中插入[CQ:at,qq=(QQ号)],也就是CQ码。at发送者是非必要的,你可以根据你自己的想法at某个人。", + "- 你有引用某条消息的能力,使用[CQ:reply,id=(消息id)]来引用。", + "- 如果有多条消息,你应该优先回复提到你的,一段时间之前的就不要回复了,也可以直接选择不回复。", + "- 如果你选择完全不回复,你只需要直接输出一个。", + "- 如果你需要思考的话,你应该尽量少思考,以节省时间。", + "下面是关于你性格的设定,如果设定中提到让你扮演某个人,或者设定中有提到名字,则优先使用设定中的名字。", + default_prompt, + ] + + systemPrompt = "\n".join(system_lines) if preset.support_mcp: systemPrompt += "你也可以使用一些工具,下面是关于这些工具的额外说明:\n" for mcp_name, mcp_config in plugin_config.mcp_servers.items(): From 7ea7a26681160704507abe9108455f119d052d3f Mon Sep 17 00:00:00 2001 From: KawakazeNotFound Date: Thu, 6 Nov 2025 14:45:25 +0800 Subject: [PATCH 12/24] =?UTF-8?q?=E5=8F=96=E6=B6=88=E8=B7=9F=E8=B8=AA.DS?= =?UTF-8?q?=5FStore?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index dcd5a0a..83ace41 100644 --- a/.gitignore +++ b/.gitignore @@ -174,3 +174,4 @@ pyrightconfig.json !.vscode/launch.json !.vscode/extensions.json !.vscode/*.code-snippets +.DS_Store From a8d3213e485c1481702276bada332ecc33ea27b5 Mon Sep 17 00:00:00 2001 From: KawakazeNotFound Date: Thu, 6 Nov 2025 14:52:44 +0800 Subject: [PATCH 13/24] Update Poerty --- poetry.lock | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/poetry.lock b/poetry.lock index 50d98ce..dcadb71 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. [[package]] name = "aiofiles" @@ -662,14 +662,14 @@ typing-extensions = ">=4.0.0,<5.0.0" [[package]] name = "nonebot2" -version = "2.4.1" +version = "2.4.4" description = "An asynchronous python bot framework." optional = false python-versions = "<4.0,>=3.9" groups = ["main"] files = [ - {file = "nonebot2-2.4.1-py3-none-any.whl", hash = "sha256:fec95f075efc89dbe9ce148618b413b02f46ba284200367749b035e794695111"}, - {file = "nonebot2-2.4.1.tar.gz", hash = "sha256:8fea364318501ed79721403a8ecd76587bc884d58c356260f691a8bbda9b05e6"}, + {file = "nonebot2-2.4.4-py3-none-any.whl", hash = "sha256:8885d02906f1def83c138f298a7aa99ca1975351f44d8d290ea0eeec5aec1f0b"}, + {file = "nonebot2-2.4.4.tar.gz", hash = "sha256:b367c17f31ae0d548e374bb80b719ed12885620f29f3cbc305a5a88a6175f4e3"}, ] [package.dependencies] @@ -679,17 +679,17 @@ loguru = ">=0.6.0,<1.0.0" pydantic = ">=1.10.0,<2.5.0 || >2.5.0,<2.5.1 || >2.5.1,<2.10.0 || >2.10.0,<2.10.1 || >2.10.1,<3.0.0" pygtrie = ">=2.4.1,<3.0.0" python-dotenv = ">=0.21.0,<2.0.0" -tomli = {version = ">=2.0.1,<3.0.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.4.0,<5.0.0" +tomli = {version = ">=2.0.1,<3.0.0", markers = "python_full_version < \"3.11.0\""} +typing-extensions = ">=4.6.0,<5.0.0" yarl = ">=1.7.2,<2.0.0" [package.extras] aiohttp = ["aiohttp[speedups] (>=3.11.0,<4.0.0)"] -all = ["Quart (>=0.18.0,<1.0.0)", "aiohttp[speedups] (>=3.11.0,<4.0.0)", "fastapi (>=0.93.0,<1.0.0)", "httpx[http2] (>=0.26.0,<1.0.0)", "uvicorn[standard] (>=0.20.0,<1.0.0)", "websockets (>=10.0)"] +all = ["aiohttp[speedups] (>=3.11.0,<4.0.0)", "fastapi (>=0.93.0,<1.0.0)", "httpx[http2] (>=0.26.0,<1.0.0)", "uvicorn[standard] (>=0.20.0,<1.0.0)", "websockets (>=15.0)"] fastapi = ["fastapi (>=0.93.0,<1.0.0)", "uvicorn[standard] (>=0.20.0,<1.0.0)"] httpx = ["httpx[http2] (>=0.26.0,<1.0.0)"] -quart = ["Quart (>=0.18.0,<1.0.0)", "uvicorn[standard] (>=0.20.0,<1.0.0)"] -websockets = ["websockets (>=10.0)"] +quart = ["quart (>=0.18.0,<1.0.0)", "uvicorn[standard] (>=0.20.0,<1.0.0)"] +websockets = ["websockets (>=15.0)"] [[package]] name = "nonemoji" From e293b05fa12cfb102e564ea15b128d3d3a96686e Mon Sep 17 00:00:00 2001 From: KawakazeNotFound Date: Thu, 6 Nov 2025 15:11:49 +0800 Subject: [PATCH 14/24] =?UTF-8?q?=E7=A7=81=E8=81=8A=E5=88=9D=E6=AD=A5?= =?UTF-8?q?=E6=B5=8B=E8=AF=95=E9=80=9A=E8=BF=87?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- PRIVATE_CHAT_CHANGES.md | 174 ---------------------------------------- 1 file changed, 174 deletions(-) delete mode 100644 PRIVATE_CHAT_CHANGES.md diff --git a/PRIVATE_CHAT_CHANGES.md b/PRIVATE_CHAT_CHANGES.md deleted file mode 100644 index 131ec9d..0000000 --- a/PRIVATE_CHAT_CHANGES.md +++ /dev/null @@ -1,174 +0,0 @@ -# 私聊功能实现总结 - -## 📝 概览 - -已成功为 nonebot-plugin-llmchat 项目添加了完整的私聊功能支持。用户现在可以在私聊中与机器人进行对话,同时保持群聊功能完全不变。 - ---- - -## 🔧 主要改动 - -### 1. **config.py** - 配置模块 - -#### 新增配置项: -- `LLMCHAT__ENABLE_PRIVATE_CHAT` (bool, 默认值: False) - - 是否启用私聊功能 - -- `LLMCHAT__PRIVATE_CHAT_PRESET` (str, 默认值: "off") - - 私聊默认使用的预设名称 - -### 2. **__init__.py** - 主程序模块 - -#### 新增导入: -```python -from typing import Union -from nonebot.adapters.onebot.v11 import PrivateMessageEvent -``` - -#### 新增数据结构: - -**PrivateChatState 类** -- 用于管理每个用户的私聊状态 -- 结构与 GroupState 类似,但针对单个用户独立管理 -- 包含:preset_name、history、queue、processing 等属性 - -**private_chat_states 字典** -- 类型:`dict[int, PrivateChatState]` -- 按用户ID存储私聊状态 - -#### 修改的函数: - -1. **format_message()** - - 参数改为:`event: Union[GroupMessageEvent, PrivateMessageEvent]` - - 支持两种消息事件类型的格式化 - -2. **is_triggered()** - - 参数改为:`event: Union[GroupMessageEvent, PrivateMessageEvent]` - - 新增私聊事件检测逻辑 - - 私聊消息在启用且预设不为"off"时自动触发 - -3. **get_preset()** - - 新增参数:`is_group: bool = True` - - 支持从群组或私聊状态获取预设配置 - -4. **process_messages()** - - 新增参数:`context_id: int, is_group: bool = True` - - 支持处理群组和私聊消息 - - 私聊时跳过OneBot群操作工具(ob__开头的工具) - -5. **handle_message()** - - 参数改为:`event: Union[GroupMessageEvent, PrivateMessageEvent]` - - 支持路由到不同的处理逻辑 - -6. **save_state()** / **load_state()** - - 新增私聊状态的持久化 - - 私聊状态保存到单独的文件:`llmchat_private_state.json` - -#### 新增命令处理器(私聊相关): - -所有私聊命令需要主人权限,且仅在启用私聊功能时可用: - -1. **私聊API预设** - - 查看或修改私聊使用的API预设 - - 用法:`私聊API预设 [预设名]` - -2. **私聊修改设定** - - 修改私聊机器人的性格设定 - - 用法:`私聊修改设定 [新设定]` - -3. **私聊记忆清除** - - 清除私聊的对话历史记录 - - 用法:`私聊记忆清除` - -4. **私聊切换思维输出** - - 切换是否输出AI的思维过程 - - 用法:`私聊切换思维输出` - -### 3. **README.md** - 文档更新 - -#### 更新的章节: - -1. **项目介绍** - - 更新标题为"群聊&私聊的AI对话插件" - - 添加"群聊和私聊支持"功能说明 - -2. **配置表格** - - 添加两个新配置项的说明 - -3. **使用指南** - - 将原"指令表"改名为"群聊指令表" - - 新增"私聊指令表" - - 添加"私聊功能启用示例"部分 - ---- - -## 🚀 使用指南 - -### 启用私聊功能 - -在 `.env` 文件中添加: - -```bash -# 启用私聊功能 -LLMCHAT__ENABLE_PRIVATE_CHAT=true - -# 设置私聊默认预设 -LLMCHAT__PRIVATE_CHAT_PRESET="deepseek-v1" -``` - -### 私聊命令示例 - -``` -# 主人私聊机器人 - -私聊API预设 # 查看当前预设 -私聊API预设 aliyun-deepseek-v3 # 切换预设 - -私聊修改设定 你是一个有趣的AI # 修改性格设定 - -私聊记忆清除 # 清除对话记忆 - -私聊切换思维输出 # 开关思维过程输出 -``` - ---- - -## 🔑 关键特性 - -✅ **独立管理** - 群聊和私聊拥有完全独立的对话记忆和配置 - -✅ **灵活控制** - 可单独启用/禁用私聊功能,无需影响群聊 - -✅ **自动触发** - 私聊消息自动触发回复,无需@机器人 - -✅ **权限隔离** - 私聊命令仅主人可用 - -✅ **工具适配** - 私聊时自动跳过不适用的群操作工具 - -✅ **状态持久化** - 私聊状态独立保存和恢复 - ---- - -## 📊 文件对比 - -| 文件 | 变更类型 | 主要改动 | -|------|--------|--------| -| config.py | 修改 | 新增2个配置项 | -| __init__.py | 修改 | 新增私聊类、处理器、命令 | -| README.md | 修改 | 更新文档说明 | - ---- - -## ⚠️ 注意事项 - -1. **默认禁用** - 私聊功能默认为禁用状态,需要在配置文件中显式启用 - -2. **群操作工具** - OneBot群操作工具(禁言、撤回等)在私聊中不可用 - -3. **状态文件** - 私聊状态存储在 `llmchat_private_state.json` 文件中 - -4. **权限限制** - 所有私聊命令都需要主人权限 - -5. **独立预设** - 私聊和群聊可以使用不同的API预设 - - From 3089bb51ae6d8416c34ec226e764a372b076cf14 Mon Sep 17 00:00:00 2001 From: KawakazeNotFound Date: Thu, 6 Nov 2025 15:14:44 +0800 Subject: [PATCH 15/24] =?UTF-8?q?=E4=BF=AE=E6=AD=A3ruff=E9=97=AE=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nonebot_plugin_llmchat/__init__.py | 36 +++++++++++++++++------------- 1 file changed, 20 insertions(+), 16 deletions(-) diff --git a/nonebot_plugin_llmchat/__init__.py b/nonebot_plugin_llmchat/__init__.py index 68d97c3..a6246f8 100755 --- a/nonebot_plugin_llmchat/__init__.py +++ b/nonebot_plugin_llmchat/__init__.py @@ -115,7 +115,7 @@ def get_preset(context_id: int, is_group: bool = True) -> PresetConfig: state = group_states[context_id] else: state = private_chat_states[context_id] - + for preset in plugin_config.api_presets: if preset.name == state.preset_name: return preset @@ -216,14 +216,14 @@ async def is_triggered(event: GroupMessageEvent | PrivateMessageEvent) -> bool: return True return False - + elif isinstance(event, PrivateMessageEvent): # 检查私聊功能是否启用 if not plugin_config.enable_private_chat: return False - + state = private_chat_states[event.user_id] - + if state.preset_name == "off": return False @@ -238,7 +238,7 @@ async def is_triggered(event: GroupMessageEvent | PrivateMessageEvent) -> bool: return False state.past_events.append(event) - + # 私聊默认触发 return True @@ -327,7 +327,7 @@ async def process_messages(context_id: int, is_group: bool = True): user_id = context_id state = private_chat_states[user_id] group_id = None - + preset = get_preset(context_id, is_group) # 初始化OpenAI客户端 @@ -345,8 +345,10 @@ async def process_messages(context_id: int, is_group: bool = True): timeout=plugin_config.request_timeout, ) + chat_type = "群聊" if is_group else "私聊" + context_type = "群号" if is_group else "用户" logger.info( - f"开始处理{'群聊' if is_group else '私聊'}消息 {'群号' if is_group else '用户'}:{context_id} 当前队列长度:{state.queue.qsize()}" + f"开始处理{chat_type}消息 {context_type}:{context_id} 当前队列长度:{state.queue.qsize()}" ) while not state.queue.empty(): event = await state.queue.get() @@ -373,8 +375,10 @@ async def process_messages(context_id: int, is_group: bool = True): "- 不要使用markdown或者html,聊天软件不支持解析,换行请用换行符。", "- 你应该以普通人的方式发送消息,每条消息字数要尽量少一些,应该倾向于使用更多条的消息回复。", "- 代码则不需要分段,用单独的一条消息发送。", - "- 请使用发送者的昵称称呼发送者,你可以礼貌地问候发送者,但只需要在第一次回答这位发送者的问题时问候他。", - "- 你有at群成员的能力,只需要在某条消息中插入[CQ:at,qq=(QQ号)],也就是CQ码。at发送者是非必要的,你可以根据你自己的想法at某个人。", + "- 请使用发送者的昵称称呼发送者,你可以礼貌地问候发送者,但只需要在" + "第一次回答这位发送者的问题时问候他。", + "- 你有at群成员的能力,只需要在某条消息中插入[CQ:at,qq=(QQ号)]," + "也就是CQ码。at发送者是非必要的,你可以根据你自己的想法at某个人。", "- 你有引用某条消息的能力,使用[CQ:reply,id=(消息id)]来引用。", "- 如果有多条消息,你应该优先回复提到你的,一段时间之前的就不要回复了,也可以直接选择不回复。", "- 如果你选择完全不回复,你只需要直接输出一个。", @@ -677,7 +681,7 @@ private_preset_handler = on_command( async def handle_private_preset(event: PrivateMessageEvent, args: Message = CommandArg()): if not plugin_config.enable_private_chat: await private_preset_handler.finish("私聊功能未启用") - + user_id = event.user_id preset_name = args.extract_plain_text().strip() @@ -709,7 +713,7 @@ private_edit_preset_handler = on_command( async def handle_private_edit_preset(event: PrivateMessageEvent, args: Message = CommandArg()): if not plugin_config.enable_private_chat: await private_edit_preset_handler.finish("私聊功能未启用") - + user_id = event.user_id user_prompt = args.extract_plain_text().strip() @@ -730,7 +734,7 @@ private_reset_handler = on_command( async def handle_private_reset(event: PrivateMessageEvent, args: Message = CommandArg()): if not plugin_config.enable_private_chat: await private_reset_handler.finish("私聊功能未启用") - + user_id = event.user_id private_chat_states[user_id].past_events.clear() @@ -751,7 +755,7 @@ private_think_handler = on_command( async def handle_private_think(event: PrivateMessageEvent, args: Message = CommandArg()): if not plugin_config.enable_private_chat: await private_think_handler.finish("私聊功能未启用") - + state = private_chat_states[event.user_id] state.output_reasoning_content = not state.output_reasoning_content @@ -789,7 +793,7 @@ async def save_state(): os.makedirs(os.path.dirname(data_file), exist_ok=True) async with aiofiles.open(data_file, "w", encoding="utf8") as f: await f.write(json.dumps(data, ensure_ascii=False)) - + # 保存私聊状态 if plugin_config.enable_private_chat: logger.info(f"开始保存私聊状态到文件:{private_data_file}") @@ -803,7 +807,7 @@ async def save_state(): } for uid, state in private_chat_states.items() } - + os.makedirs(os.path.dirname(private_data_file), exist_ok=True) async with aiofiles.open(private_data_file, "w", encoding="utf8") as f: await f.write(json.dumps(private_data, ensure_ascii=False)) @@ -828,7 +832,7 @@ async def load_state(): state.output_reasoning_content = state_data["output_reasoning_content"] state.random_trigger_prob = state_data.get("random_trigger_prob", plugin_config.random_trigger_prob) group_states[int(gid)] = state - + # 加载私聊状态 if plugin_config.enable_private_chat: logger.info(f"从文件加载私聊状态:{private_data_file}") From 53f3f185e73c00e998690abe9f035705f1fab9ef Mon Sep 17 00:00:00 2001 From: KawakazeNotFound Date: Thu, 6 Nov 2025 16:33:41 +0800 Subject: [PATCH 16/24] =?UTF-8?q?=E4=BF=AE=E5=A4=8D=E5=9C=A8=E9=95=BF?= =?UTF-8?q?=E6=96=87=E6=9C=AC=E4=B8=8B=E5=AE=B9=E6=98=93=E8=A7=A6=E5=8F=91?= =?UTF-8?q?tool=5Fcall=E9=94=99=E8=AF=AF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nonebot_plugin_llmchat/__init__.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/nonebot_plugin_llmchat/__init__.py b/nonebot_plugin_llmchat/__init__.py index a6246f8..947c3b3 100755 --- a/nonebot_plugin_llmchat/__init__.py +++ b/nonebot_plugin_llmchat/__init__.py @@ -453,7 +453,7 @@ async def process_messages(context_id: int, is_group: bool = True): message = response.choices[0].message # 处理响应并处理工具调用 - while preset.support_mcp and message.tool_calls: + while preset.support_mcp and message and message.tool_calls: new_messages.append({ "role": "assistant", "tool_calls": [tool_call.model_dump() for tool_call in message.tool_calls] @@ -505,11 +505,17 @@ async def process_messages(context_id: int, is_group: bool = True): message = response.choices[0].message + # 安全检查:确保 message 不为 None + if not message: + logger.error("API 响应中的 message 为 None") + await handler.send(Message("服务暂时不可用,请稍后再试")) + return + reply, matched_reasoning_content = pop_reasoning_content( - response.choices[0].message.content + message.content ) reasoning_content: str | None = ( - getattr(response.choices[0].message, "reasoning_content", None) + getattr(message, "reasoning_content", None) or matched_reasoning_content ) @@ -518,7 +524,7 @@ async def process_messages(context_id: int, is_group: bool = True): "content": reply, } - reply_images = getattr(response.choices[0].message, "images", None) + reply_images = getattr(message, "images", None) if reply_images: # openai的sdk里的assistant消息暂时没有images字段,需要单独处理 From 1f41ed084ebf19d7ca7bedf7b0107ca53f05262d Mon Sep 17 00:00:00 2001 From: FuQuan <87348379+FuQuan233@users.noreply.github.com> Date: Fri, 7 Nov 2025 11:56:34 +0800 Subject: [PATCH 17/24] =?UTF-8?q?=E2=9C=A8=20=E5=90=88=E5=B9=B6=E7=A7=81?= =?UTF-8?q?=E8=81=8A=E6=8C=87=E4=BB=A4=EF=BC=8C=E6=9B=B4=E6=96=B0=E7=9B=B8?= =?UTF-8?q?=E5=85=B3=E6=9D=83=E9=99=90=E5=92=8C=E7=8A=B6=E6=80=81=E7=AE=A1?= =?UTF-8?q?=E7=90=86?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nonebot_plugin_llmchat/__init__.py | 225 ++++++++++------------------- 1 file changed, 75 insertions(+), 150 deletions(-) diff --git a/nonebot_plugin_llmchat/__init__.py b/nonebot_plugin_llmchat/__init__.py index 947c3b3..32dbd5e 100755 --- a/nonebot_plugin_llmchat/__init__.py +++ b/nonebot_plugin_llmchat/__init__.py @@ -22,7 +22,7 @@ from nonebot import ( require, ) from nonebot.adapters.onebot.v11 import GroupMessageEvent, Message, MessageSegment, PrivateMessageEvent -from nonebot.adapters.onebot.v11.permission import GROUP_ADMIN, GROUP_OWNER +from nonebot.adapters.onebot.v11.permission import GROUP_ADMIN, GROUP_OWNER, PRIVATE from nonebot.params import CommandArg from nonebot.permission import SUPERUSER from nonebot.plugin import PluginMetadata @@ -377,12 +377,19 @@ async def process_messages(context_id: int, is_group: bool = True): "- 代码则不需要分段,用单独的一条消息发送。", "- 请使用发送者的昵称称呼发送者,你可以礼貌地问候发送者,但只需要在" "第一次回答这位发送者的问题时问候他。", - "- 你有at群成员的能力,只需要在某条消息中插入[CQ:at,qq=(QQ号)]," - "也就是CQ码。at发送者是非必要的,你可以根据你自己的想法at某个人。", "- 你有引用某条消息的能力,使用[CQ:reply,id=(消息id)]来引用。", "- 如果有多条消息,你应该优先回复提到你的,一段时间之前的就不要回复了,也可以直接选择不回复。", "- 如果你选择完全不回复,你只需要直接输出一个。", "- 如果你需要思考的话,你应该尽量少思考,以节省时间。", + ] + + if is_group: + system_lines += [ + "- 你有at群成员的能力,只需要在某条消息中插入[CQ:at,qq=(QQ号)]," + "也就是CQ码。at发送者是非必要的,你可以根据你自己的想法at某个人。", + ] + + system_lines += [ "下面是关于你性格的设定,如果设定中提到让你扮演某个人,或者设定中有提到名字,则优先使用设定中的名字。", default_prompt, ] @@ -439,7 +446,7 @@ async def process_messages(context_id: int, is_group: bool = True): } if preset.support_mcp: - available_tools = await mcp_client.get_available_tools() + available_tools = await mcp_client.get_available_tools(is_group) client_config["tools"] = available_tools response = await client.chat.completions.create( @@ -471,25 +478,12 @@ async def process_messages(context_id: int, is_group: bool = True): # 发送工具调用提示 await handler.send(Message(f"正在使用{mcp_client.get_friendly_name(tool_name)}")) - # 执行工具调用,传递群组和机器人信息用于QQ工具 - if is_group: - result = await mcp_client.call_tool( - tool_name, - tool_args, - group_id=event.group_id, - bot_id=str(event.self_id) - ) - else: - # 私聊时某些工具不可用(如群操作工具),跳过这些工具 - if tool_name.startswith("ob__"): - result = f"私聊不支持{mcp_client.get_friendly_name(tool_name)}工具" - else: - result = await mcp_client.call_tool( - tool_name, - tool_args, - group_id=None, - bot_id=str(event.self_id) - ) + result = await mcp_client.call_tool( + tool_name, + tool_args, + group_id=event.group_id, + bot_id=str(event.self_id) + ) new_messages.append({ "role": "tool", @@ -576,22 +570,33 @@ preset_handler = on_command("API预设", priority=1, block=True, permission=SUPE @preset_handler.handle() -async def handle_preset(event: GroupMessageEvent, args: Message = CommandArg()): - group_id = event.group_id +async def handle_preset(event: GroupMessageEvent | PrivateMessageEvent, args: Message = CommandArg()): + if isinstance(event, GroupMessageEvent): + context_id = event.group_id + state = group_states[context_id] + else: # PrivateMessageEvent + if not plugin_config.enable_private_chat: + return + context_id = event.user_id + state = private_chat_states[context_id] + preset_name = args.extract_plain_text().strip() if preset_name == "off": - group_states[group_id].preset_name = preset_name - await preset_handler.finish("已关闭llmchat") + state.preset_name = preset_name + if isinstance(event, GroupMessageEvent): + await preset_handler.finish("已关闭llmchat群聊功能") + else: + await preset_handler.finish("已关闭llmchat私聊功能") available_presets = {p.name for p in plugin_config.api_presets} if preset_name not in available_presets: available_presets_str = "\n- ".join(available_presets) await preset_handler.finish( - f"当前API预设:{group_states[group_id].preset_name}\n可用API预设:\n- {available_presets_str}" + f"当前API预设:{state.preset_name}\n可用API预设:\n- {available_presets_str}" ) - group_states[group_id].preset_name = preset_name + state.preset_name = preset_name await preset_handler.finish(f"已切换至API预设:{preset_name}") @@ -599,16 +604,23 @@ edit_preset_handler = on_command( "修改设定", priority=1, block=True, - permission=(SUPERUSER | GROUP_ADMIN | GROUP_OWNER), + permission=(SUPERUSER | GROUP_ADMIN | GROUP_OWNER | PRIVATE), ) @edit_preset_handler.handle() -async def handle_edit_preset(event: GroupMessageEvent, args: Message = CommandArg()): - group_id = event.group_id - group_prompt = args.extract_plain_text().strip() +async def handle_edit_preset(event: GroupMessageEvent | PrivateMessageEvent, args: Message = CommandArg()): + if isinstance(event, GroupMessageEvent): + context_id = event.group_id + state = group_states[context_id] + else: # PrivateMessageEvent + if not plugin_config.enable_private_chat: + return + context_id = event.user_id + state = private_chat_states[context_id] - group_states[group_id].group_prompt = group_prompt + group_prompt = args.extract_plain_text().strip() + state.group_prompt = group_prompt await edit_preset_handler.finish("修改成功") @@ -616,16 +628,23 @@ reset_handler = on_command( "记忆清除", priority=1, block=True, - permission=(SUPERUSER | GROUP_ADMIN | GROUP_OWNER), + permission=(SUPERUSER | GROUP_ADMIN | GROUP_OWNER | PRIVATE), ) @reset_handler.handle() -async def handle_reset(event: GroupMessageEvent, args: Message = CommandArg()): - group_id = event.group_id +async def handle_reset(event: GroupMessageEvent | PrivateMessageEvent, args: Message = CommandArg()): + if isinstance(event, GroupMessageEvent): + context_id = event.group_id + state = group_states[context_id] + else: # PrivateMessageEvent + if not plugin_config.enable_private_chat: + return + context_id = event.user_id + state = private_chat_states[context_id] - group_states[group_id].past_events.clear() - group_states[group_id].history.clear() + state.past_events.clear() + state.history.clear() await reset_handler.finish("记忆已清空") @@ -639,32 +658,38 @@ set_prob_handler = on_command( @set_prob_handler.handle() async def handle_set_prob(event: GroupMessageEvent, args: Message = CommandArg()): - group_id = event.group_id - prob = 0 + context_id = event.group_id + state = group_states[context_id] try: prob = float(args.extract_plain_text().strip()) if prob < 0 or prob > 1: - raise ValueError - except Exception as e: - await reset_handler.finish(f"输入有误,请使用 [0,1] 的浮点数\n{e!s}") + raise ValueError("概率值必须在0-1之间") + except ValueError as e: + await set_prob_handler.finish(f"输入有误,请使用 [0,1] 的浮点数\n{e!s}") - group_states[group_id].random_trigger_prob = prob - await reset_handler.finish(f"主动回复概率已设为 {prob}") + state.random_trigger_prob = prob + await set_prob_handler.finish(f"主动回复概率已设为 {prob}") -# 预设切换命令 +# 思维输出切换命令 think_handler = on_command( "切换思维输出", priority=1, block=True, - permission=(SUPERUSER | GROUP_ADMIN | GROUP_OWNER), + permission=(SUPERUSER | GROUP_ADMIN | GROUP_OWNER | PRIVATE), ) @think_handler.handle() -async def handle_think(event: GroupMessageEvent, args: Message = CommandArg()): - state = group_states[event.group_id] +async def handle_think(event: GroupMessageEvent | PrivateMessageEvent, args: Message = CommandArg()): + if isinstance(event, GroupMessageEvent): + state = group_states[event.group_id] + else: # PrivateMessageEvent + if not plugin_config.enable_private_chat: + return + state = private_chat_states[event.user_id] + state.output_reasoning_content = not state.output_reasoning_content await think_handler.finish( @@ -672,106 +697,6 @@ async def handle_think(event: GroupMessageEvent, args: Message = CommandArg()): ) -# region 私聊相关指令 - -# 私聊预设切换命令 -private_preset_handler = on_command( - "私聊API预设", - priority=1, - block=True, - permission=SUPERUSER, -) - - -@private_preset_handler.handle() -async def handle_private_preset(event: PrivateMessageEvent, args: Message = CommandArg()): - if not plugin_config.enable_private_chat: - await private_preset_handler.finish("私聊功能未启用") - - user_id = event.user_id - preset_name = args.extract_plain_text().strip() - - if preset_name == "off": - private_chat_states[user_id].preset_name = preset_name - await private_preset_handler.finish("已关闭llmchat私聊功能") - - available_presets = {p.name for p in plugin_config.api_presets} - if preset_name not in available_presets: - available_presets_str = "\n- ".join(available_presets) - await private_preset_handler.finish( - f"当前API预设:{private_chat_states[user_id].preset_name}\n可用API预设:\n- {available_presets_str}" - ) - - private_chat_states[user_id].preset_name = preset_name - await private_preset_handler.finish(f"已切换至API预设:{preset_name}") - - -# 私聊设定修改命令 -private_edit_preset_handler = on_command( - "私聊修改设定", - priority=1, - block=True, - permission=SUPERUSER, -) - - -@private_edit_preset_handler.handle() -async def handle_private_edit_preset(event: PrivateMessageEvent, args: Message = CommandArg()): - if not plugin_config.enable_private_chat: - await private_edit_preset_handler.finish("私聊功能未启用") - - user_id = event.user_id - user_prompt = args.extract_plain_text().strip() - - private_chat_states[user_id].group_prompt = user_prompt - await private_edit_preset_handler.finish("修改成功") - - -# 私聊记忆清除命令 -private_reset_handler = on_command( - "私聊记忆清除", - priority=1, - block=True, - permission=SUPERUSER, -) - - -@private_reset_handler.handle() -async def handle_private_reset(event: PrivateMessageEvent, args: Message = CommandArg()): - if not plugin_config.enable_private_chat: - await private_reset_handler.finish("私聊功能未启用") - - user_id = event.user_id - - private_chat_states[user_id].past_events.clear() - private_chat_states[user_id].history.clear() - await private_reset_handler.finish("记忆已清空") - - -# 私聊思维输出切换命令 -private_think_handler = on_command( - "私聊切换思维输出", - priority=1, - block=True, - permission=SUPERUSER, -) - - -@private_think_handler.handle() -async def handle_private_think(event: PrivateMessageEvent, args: Message = CommandArg()): - if not plugin_config.enable_private_chat: - await private_think_handler.finish("私聊功能未启用") - - state = private_chat_states[event.user_id] - state.output_reasoning_content = not state.output_reasoning_content - - await private_think_handler.finish( - f"已{(state.output_reasoning_content and '开启') or '关闭'}思维输出" - ) - -# endregion - - # region 持久化与定时任务 # 获取插件数据目录 From e542deabdb71a86431dc1a9cc43344c9545a1ed7 Mon Sep 17 00:00:00 2001 From: FuQuan <87348379+FuQuan233@users.noreply.github.com> Date: Fri, 7 Nov 2025 11:58:16 +0800 Subject: [PATCH 18/24] =?UTF-8?q?=E2=9C=A8=20=E5=90=88=E5=B9=B6=E7=A7=81?= =?UTF-8?q?=E8=81=8A=E6=8C=87=E4=BB=A4=EF=BC=8C=E6=9B=B4=E6=96=B0=E7=9B=B8?= =?UTF-8?q?=E5=85=B3=E6=9D=83=E9=99=90=E5=92=8C=E7=8A=B6=E6=80=81=E7=AE=A1?= =?UTF-8?q?=E7=90=86?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nonebot_plugin_llmchat/mcpclient.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/nonebot_plugin_llmchat/mcpclient.py b/nonebot_plugin_llmchat/mcpclient.py index 4d38f0a..9a53865 100644 --- a/nonebot_plugin_llmchat/mcpclient.py +++ b/nonebot_plugin_llmchat/mcpclient.py @@ -106,7 +106,7 @@ class MCPClient: return SessionContext() - async def get_available_tools(self): + async def get_available_tools(self, is_group: bool): """获取可用工具列表,使用缓存机制""" if self._tools_cache is not None: logger.debug("返回缓存的工具列表") @@ -115,10 +115,11 @@ class MCPClient: logger.info(f"初始化工具列表缓存,需要连接{len(self.server_config)}个服务器") available_tools = [] - # 添加OneBot内置工具 - onebot_tools = self.onebot_tools.get_available_tools() - available_tools.extend(onebot_tools) - logger.debug(f"添加了{len(onebot_tools)}个OneBot内置工具") + if is_group: + # 添加OneBot内置工具,仅在群聊中可用 + onebot_tools = self.onebot_tools.get_available_tools() + available_tools.extend(onebot_tools) + logger.debug(f"添加了{len(onebot_tools)}个OneBot内置工具") # 添加MCP服务器工具 for server_name in self.server_config.keys(): From fe39e2aba4568ec7cc607588cfca5df45c19dcb0 Mon Sep 17 00:00:00 2001 From: FuQuan <87348379+FuQuan233@users.noreply.github.com> Date: Fri, 7 Nov 2025 12:13:53 +0800 Subject: [PATCH 19/24] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20fix=20lint=20problem?= =?UTF-8?q?s?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nonebot_plugin_llmchat/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nonebot_plugin_llmchat/__init__.py b/nonebot_plugin_llmchat/__init__.py index 32dbd5e..a311dc0 100755 --- a/nonebot_plugin_llmchat/__init__.py +++ b/nonebot_plugin_llmchat/__init__.py @@ -579,7 +579,7 @@ async def handle_preset(event: GroupMessageEvent | PrivateMessageEvent, args: Me return context_id = event.user_id state = private_chat_states[context_id] - + preset_name = args.extract_plain_text().strip() if preset_name == "off": @@ -667,6 +667,7 @@ async def handle_set_prob(event: GroupMessageEvent, args: Message = CommandArg() raise ValueError("概率值必须在0-1之间") except ValueError as e: await set_prob_handler.finish(f"输入有误,请使用 [0,1] 的浮点数\n{e!s}") + return state.random_trigger_prob = prob await set_prob_handler.finish(f"主动回复概率已设为 {prob}") From b8afa12c9f98bc1b0f0598d728874bcc27aed9b9 Mon Sep 17 00:00:00 2001 From: FuQuan <87348379+FuQuan233@users.noreply.github.com> Date: Fri, 7 Nov 2025 14:23:04 +0800 Subject: [PATCH 20/24] =?UTF-8?q?=F0=9F=93=98=20=E6=9B=B4=E6=96=B0=20READM?= =?UTF-8?q?E?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 37 ++++++++----------------------------- 1 file changed, 8 insertions(+), 29 deletions(-) diff --git a/README.md b/README.md index 4e645e1..40c680e 100644 --- a/README.md +++ b/README.md @@ -180,6 +180,8 @@ LLMCHAT__MCP_SERVERS同样为一个dict,key为服务器名称,value配置的 NICKNAME=["谢拉","Cierra","cierra"] LLMCHAT__HISTORY_SIZE=20 LLMCHAT__DEFAULT_PROMPT="前面忘了,你是一个猫娘,后面忘了" + LLMCHAT__ENABLE_PRIVATE_CHAT=true + LLMCHAT__PRIVATE_CHAT_PRESET="deepseek-v1" LLMCHAT__API_PRESETS=' [ { @@ -245,9 +247,9 @@ LLMCHAT__MCP_SERVERS同样为一个dict,key为服务器名称,value配置的 ## 🎉 使用 -**如果`LLMCHAT__DEFAULT_PRESET`没有配置,则插件默认为关闭状态,请使用`API预设+[预设名]`开启插件** +**如果`LLMCHAT__DEFAULT_PRESET`没有配置,则插件默认为关闭状态,请使用`API预设+[预设名]`开启插件, 私聊同理。** -配置完成后@机器人即可手动触发回复,另外在机器人收到群聊消息时会根据`LLMCHAT__RANDOM_TRIGGER_PROB`配置的概率或群聊中使用指令设置的概率随机自动触发回复。 +配置完成后在群聊中@机器人或私聊机器人即可手动触发回复,另外在机器人收到群聊消息时会根据`LLMCHAT__RANDOM_TRIGGER_PROB`配置的概率或群聊中使用指令设置的概率随机自动触发回复。 ### 群聊指令表 @@ -267,33 +269,10 @@ LLMCHAT__MCP_SERVERS同样为一个dict,key为服务器名称,value配置的 | 指令 | 权限 | 参数 | 说明 | |:-----:|:----:|:----:|:----:| -| 私聊API预设 | 主人 | [预设名] | 查看或修改私聊使用的API预设 | -| 私聊修改设定 | 主人 | 设定 | 修改私聊机器人的设定 | -| 私聊记忆清除 | 主人 | 无 | 清除私聊的机器人记忆 | -| 私聊切换思维输出 | 主人 | 无 | 切换是否输出私聊AI的思维过程的开关(需模型支持) | - -**私聊功能说明:** - -- 私聊消息默认触发回复(无需@或随机触发) -- 私聊和群聊的对话记忆独立管理 -- OneBot群操作工具(如禁言、撤回等)在私聊中不可用 - -## 📝 私聊功能启用示例 - -在 `.env` 文件中添加以下配置以启用私聊功能: - -```bash -LLMCHAT__ENABLE_PRIVATE_CHAT=true -LLMCHAT__PRIVATE_CHAT_PRESET="deepseek-v1" -``` - -然后你可以在私聊中与机器人交互。使用以下命令管理私聊: - -- 切换预设:`私聊API预设 aliyun-deepseek-v3` -- 清除记忆:`私聊记忆清除` -- 修改设定:`私聊修改设定 你是一个有趣的AI助手` -| 切换思维输出 | 管理 | 否 | 群聊 | 无 | 切换是否输出AI的思维过程的开关(需模型支持) | -| 设置主动回复概率 | 管理 | 否 | 群聊 | 主动回复概率 | 主动回复概率需为 [0, 1] 的浮点数,0为完全关闭主动回复 | +| API预设 | 主人 | [预设名] | 查看或修改私聊使用的API预设 | +| 修改设定 | 所有人 | 设定 | 修改私聊机器人的设定 | +| 记忆清除 | 所有人 | 无 | 清除私聊的机器人记忆 | +| 切换思维输出 | 所有人 | 无 | 切换是否输出私聊AI的思维过程的开关(需模型支持) | ### 效果图 ![](img/mcp_demo.jpg) From a47ae8ef16f20c0f9872d7019496948d61aa25a5 Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Fri, 7 Nov 2025 16:59:19 +0800 Subject: [PATCH 21/24] =?UTF-8?q?=F0=9F=90=9B=20=E6=9B=B4=E6=96=B0?= =?UTF-8?q?=E5=B7=A5=E5=85=B7=E5=88=97=E8=A1=A8=E7=BC=93=E5=AD=98=E6=9C=BA?= =?UTF-8?q?=E5=88=B6=EF=BC=8C=E4=BF=AE=E5=A4=8D=E5=B7=A5=E5=85=B7=E8=B0=83?= =?UTF-8?q?=E7=94=A8=E9=97=AE=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nonebot_plugin_llmchat/__init__.py | 21 ++++++--- nonebot_plugin_llmchat/mcpclient.py | 73 +++++++++++++++-------------- 2 files changed, 51 insertions(+), 43 deletions(-) diff --git a/nonebot_plugin_llmchat/__init__.py b/nonebot_plugin_llmchat/__init__.py index a311dc0..9317a6b 100755 --- a/nonebot_plugin_llmchat/__init__.py +++ b/nonebot_plugin_llmchat/__init__.py @@ -278,7 +278,7 @@ async def handle_message(event: GroupMessageEvent | PrivateMessageEvent): task.add_done_callback(tasks.discard) tasks.add(task) -async def process_images(event: GroupMessageEvent) -> list[str]: +async def process_images(event: GroupMessageEvent | PrivateMessageEvent) -> list[str]: base64_images = [] for segement in event.get_message(): if segement.type == "image": @@ -478,12 +478,19 @@ async def process_messages(context_id: int, is_group: bool = True): # 发送工具调用提示 await handler.send(Message(f"正在使用{mcp_client.get_friendly_name(tool_name)}")) - result = await mcp_client.call_tool( - tool_name, - tool_args, - group_id=event.group_id, - bot_id=str(event.self_id) - ) + if is_group: + result = await mcp_client.call_tool( + tool_name, + tool_args, + group_id=event.group_id, + bot_id=str(event.self_id) + ) + else: + result = await mcp_client.call_tool( + tool_name, + tool_args, + bot_id=str(event.self_id) + ) new_messages.append({ "role": "tool", diff --git a/nonebot_plugin_llmchat/mcpclient.py b/nonebot_plugin_llmchat/mcpclient.py index 9a53865..8861dd9 100644 --- a/nonebot_plugin_llmchat/mcpclient.py +++ b/nonebot_plugin_llmchat/mcpclient.py @@ -106,45 +106,46 @@ class MCPClient: return SessionContext() + async def init_tools_cache(self): + """初始化工具列表缓存""" + if not self._cache_initialized: + available_tools = [] + logger.info(f"初始化工具列表缓存,需要连接{len(self.server_config)}个服务器") + for server_name in self.server_config.keys(): + logger.debug(f"正在从服务器[{server_name}]获取工具列表") + async with self._create_session_context(server_name) as session: + response = await session.list_tools() + tools = response.tools + logger.debug(f"在服务器[{server_name}]中找到{len(tools)}个工具") + + available_tools.extend( + { + "type": "function", + "function": { + "name": f"mcp__{server_name}__{tool.name}", + "description": tool.description, + "parameters": tool.inputSchema, + }, + } + for tool in tools + ) + + # 缓存工具列表 + self._tools_cache = available_tools + self._cache_initialized = True + + logger.info(f"工具列表缓存完成,共缓存{len(available_tools)}个工具") + + + async def get_available_tools(self, is_group: bool): """获取可用工具列表,使用缓存机制""" - if self._tools_cache is not None: - logger.debug("返回缓存的工具列表") - return self._tools_cache - - logger.info(f"初始化工具列表缓存,需要连接{len(self.server_config)}个服务器") - available_tools = [] - + await self.init_tools_cache() + available_tools = self._tools_cache.copy() if self._tools_cache else [] if is_group: - # 添加OneBot内置工具,仅在群聊中可用 - onebot_tools = self.onebot_tools.get_available_tools() - available_tools.extend(onebot_tools) - logger.debug(f"添加了{len(onebot_tools)}个OneBot内置工具") - - # 添加MCP服务器工具 - for server_name in self.server_config.keys(): - logger.debug(f"正在从服务器[{server_name}]获取工具列表") - async with self._create_session_context(server_name) as session: - response = await session.list_tools() - tools = response.tools - logger.debug(f"在服务器[{server_name}]中找到{len(tools)}个工具") - - available_tools.extend( - { - "type": "function", - "function": { - "name": f"mcp__{server_name}__{tool.name}", - "description": tool.description, - "parameters": tool.inputSchema, - }, - } - for tool in tools - ) - - # 缓存工具列表 - self._tools_cache = available_tools - self._cache_initialized = True - logger.info(f"工具列表缓存完成,共缓存{len(available_tools)}个工具") + # 群聊场景,包含OneBot工具和MCP工具 + available_tools.extend(self.onebot_tools.get_available_tools()) + logger.debug(f"获取可用工具列表,共{len(available_tools)}个工具") return available_tools async def call_tool(self, tool_name: str, tool_args: dict, group_id: int | None = None, bot_id: str | None = None): From 2f78d4023a4820eb3ea0fb287c3bf903f32addd0 Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Fri, 7 Nov 2025 17:13:27 +0800 Subject: [PATCH 22/24] =?UTF-8?q?=E2=9C=A8=20=E6=94=AF=E6=8C=81=E4=B8=BB?= =?UTF-8?q?=E4=BA=BA=E5=9C=A8=E7=A7=81=E8=81=8A=E4=B8=AD=E4=BF=AE=E6=94=B9?= =?UTF-8?q?=E4=BB=96=E4=BA=BA=E9=A2=84=E8=AE=BE=E5=B9=B6=E6=8F=90=E4=BE=9B?= =?UTF-8?q?=E5=BD=93=E5=89=8D=E9=A2=84=E8=AE=BE=E4=BF=A1=E6=81=AF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nonebot_plugin_llmchat/__init__.py | 85 +++++++++++++++++++++++++----- 1 file changed, 73 insertions(+), 12 deletions(-) diff --git a/nonebot_plugin_llmchat/__init__.py b/nonebot_plugin_llmchat/__init__.py index 9317a6b..4dad9fd 100755 --- a/nonebot_plugin_llmchat/__init__.py +++ b/nonebot_plugin_llmchat/__init__.py @@ -578,33 +578,94 @@ preset_handler = on_command("API预设", priority=1, block=True, permission=SUPE @preset_handler.handle() async def handle_preset(event: GroupMessageEvent | PrivateMessageEvent, args: Message = CommandArg()): - if isinstance(event, GroupMessageEvent): - context_id = event.group_id - state = group_states[context_id] - else: # PrivateMessageEvent - if not plugin_config.enable_private_chat: - return - context_id = event.user_id - state = private_chat_states[context_id] + # 解析命令参数 + args_text = args.extract_plain_text().strip() + args_parts = args_text.split(maxsplit=1) - preset_name = args.extract_plain_text().strip() + target_id = None + preset_name = None + # 可用预设列表 + available_presets = {p.name for p in plugin_config.api_presets} + + # 只在私聊中允许 SUPERUSER 修改他人预设 + if isinstance(event, PrivateMessageEvent) and args_parts and args_parts[0].isdigit(): + # 第一个参数是纯数字,且不是预设名 + if args_parts[0] not in available_presets: + target_id = int(args_parts[0]) + + # 判断目标是群聊还是私聊 + if target_id in group_states: + state = group_states[target_id] + is_group_target = True + elif target_id in private_chat_states: + state = private_chat_states[target_id] + is_group_target = False + else: + # 默认创建私聊状态 + state = private_chat_states[target_id] + is_group_target = False + + # 如果只有目标 ID,没有预设名,返回当前预设 + if len(args_parts) == 1: + context_type = "群聊" if is_group_target else "私聊" + available_presets_str = "\n- ".join(available_presets) + await preset_handler.finish( + f"{context_type} {target_id} 当前API预设:{state.preset_name}\n可用API预设:\n- {available_presets_str}" + ) + + # 有预设名,进行修改 + preset_name = args_parts[1] + context_id = target_id + else: + # 第一个参数虽然是数字但也是预设名,按普通流程处理 + target_id = None + preset_name = args_text + if not plugin_config.enable_private_chat: + return + context_id = event.user_id + state = private_chat_states[context_id] + is_group_target = False + else: + # 普通情况:修改自己的预设 + preset_name = args_text + + if isinstance(event, GroupMessageEvent): + context_id = event.group_id + state = group_states[context_id] + is_group_target = True + else: # PrivateMessageEvent + if not plugin_config.enable_private_chat: + return + context_id = event.user_id + state = private_chat_states[context_id] + is_group_target = False + + # 处理关闭功能 if preset_name == "off": state.preset_name = preset_name - if isinstance(event, GroupMessageEvent): + if target_id: + context_type = "群聊" if is_group_target else "私聊" + await preset_handler.finish(f"已关闭 {context_type} {context_id} 的llmchat功能") + elif isinstance(event, GroupMessageEvent): await preset_handler.finish("已关闭llmchat群聊功能") else: await preset_handler.finish("已关闭llmchat私聊功能") - available_presets = {p.name for p in plugin_config.api_presets} + # 检查预设是否存在 if preset_name not in available_presets: available_presets_str = "\n- ".join(available_presets) await preset_handler.finish( f"当前API预设:{state.preset_name}\n可用API预设:\n- {available_presets_str}" ) + # 切换预设 state.preset_name = preset_name - await preset_handler.finish(f"已切换至API预设:{preset_name}") + if target_id: + context_type = "群聊" if is_group_target else "私聊" + await preset_handler.finish(f"已将 {context_type} {context_id} 切换至API预设:{preset_name}") + else: + await preset_handler.finish(f"已切换至API预设:{preset_name}") edit_preset_handler = on_command( From 162afd426c05081afee9cb96dea3816d545ca754 Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Fri, 7 Nov 2025 17:16:25 +0800 Subject: [PATCH 23/24] =?UTF-8?q?=F0=9F=93=98=20=E6=9B=B4=E6=96=B0=20READM?= =?UTF-8?q?E?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 40c680e..882feb0 100644 --- a/README.md +++ b/README.md @@ -269,7 +269,7 @@ LLMCHAT__MCP_SERVERS同样为一个dict,key为服务器名称,value配置的 | 指令 | 权限 | 参数 | 说明 | |:-----:|:----:|:----:|:----:| -| API预设 | 主人 | [预设名] | 查看或修改私聊使用的API预设 | +| API预设 | 主人 | [QQ号\|群号] [预设名] | 查看或修改使用的API预设,缺省[QQ号\|群号]则对当前聊天生效 | | 修改设定 | 所有人 | 设定 | 修改私聊机器人的设定 | | 记忆清除 | 所有人 | 无 | 清除私聊的机器人记忆 | | 切换思维输出 | 所有人 | 无 | 切换是否输出私聊AI的思维过程的开关(需模型支持) | From 7c7e2708519e2eb18753ad76a342da931438f180 Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Fri, 7 Nov 2025 17:17:00 +0800 Subject: [PATCH 24/24] =?UTF-8?q?=F0=9F=94=96=20=E6=9B=B4=E6=96=B0?= =?UTF-8?q?=E7=89=88=E6=9C=AC=E5=8F=B7=E8=87=B30.5.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 50a4161..7b45de2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nonebot-plugin-llmchat" -version = "0.4.1" +version = "0.5.0" description = "Nonebot AI group chat plugin supporting multiple API preset configurations" license = "GPL" authors = ["FuQuan i@fuquan.moe"]