From e3973baa37fe2af00abe2d76edddc2dffe630762 Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Sun, 27 Apr 2025 11:56:38 +0800 Subject: [PATCH 01/23] =?UTF-8?q?=F0=9F=90=9B=20=E4=BF=AE=E5=A4=8Dassistan?= =?UTF-8?q?t=E6=B6=88=E6=81=AF=E6=B2=A1=E6=9C=89=E6=AD=A3=E7=A1=AE?= =?UTF-8?q?=E6=B7=BB=E5=8A=A0=E5=88=B0=E5=8E=86=E5=8F=B2=E8=AE=B0=E5=BD=95?= =?UTF-8?q?=E7=9A=84=E9=97=AE=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nonebot_plugin_llmchat/__init__.py | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/nonebot_plugin_llmchat/__init__.py b/nonebot_plugin_llmchat/__init__.py index 3435f28..879953a 100644 --- a/nonebot_plugin_llmchat/__init__.py +++ b/nonebot_plugin_llmchat/__init__.py @@ -268,10 +268,6 @@ async def process_messages(group_id: int): logger.debug( f"发送API请求 模型:{preset.model_name} 历史消息数:{len(messages)}" ) - mcp_client = MCPClient(plugin_config.mcp_servers) - await mcp_client.connect_to_servers() - - available_tools = await mcp_client.get_available_tools() client_config = { "model": preset.model_name, @@ -280,7 +276,10 @@ async def process_messages(group_id: int): "timeout": 60, } + mcp_client = MCPClient(plugin_config.mcp_servers) if preset.support_mcp: + await mcp_client.connect_to_servers() + available_tools = await mcp_client.get_available_tools() client_config["tools"] = available_tools response = await client.chat.completions.create( @@ -291,10 +290,7 @@ async def process_messages(group_id: int): if response.usage is not None: logger.debug(f"收到API响应 使用token数:{response.usage.total_tokens}") - final_message = [] message = response.choices[0].message - if message.content: - final_message.append(message.content) # 处理响应并处理工具调用 while preset.support_mcp and message.tool_calls: @@ -302,6 +298,11 @@ async def process_messages(group_id: int): "role": "assistant", "tool_calls": [tool_call.model_dump() for tool_call in message.tool_calls] }) + + # 发送LLM调用工具时的回复,一般没有 + if message.content: + await handler.send(Message(message.content)) + # 处理每个工具调用 for tool_call in message.tool_calls: tool_name = tool_call.function.name @@ -326,8 +327,6 @@ async def process_messages(group_id: int): ) message = response.choices[0].message - if message.content: - final_message.append(message.content) await mcp_client.cleanup() @@ -339,6 +338,11 @@ async def process_messages(group_id: int): or matched_reasoning_content ) + new_messages.append({ + "role": "assistant", + "content": reply, + }) + # 请求成功后再保存历史记录,保证user和assistant穿插,防止R1模型报错 for message in new_messages: state.history.append(message) From 8013df564a97766cb19629c230eabab2a23f6652 Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Sun, 27 Apr 2025 11:57:34 +0800 Subject: [PATCH 02/23] =?UTF-8?q?=F0=9F=94=96=20bump=20llmchat=20version?= =?UTF-8?q?=200.2.1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index e7b4d58..4875a8f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nonebot-plugin-llmchat" -version = "0.2.0" +version = "0.2.1" description = "Nonebot AI group chat plugin supporting multiple API preset configurations" license = "GPL" authors = ["FuQuan i@fuquan.moe"] From c9c22a86302b4764d770eef88a317dcbb9aed626 Mon Sep 17 00:00:00 2001 From: FuQuan <87348379+FuQuan233@users.noreply.github.com> Date: Sun, 27 Apr 2025 18:08:50 +0800 Subject: [PATCH 03/23] =?UTF-8?q?=F0=9F=93=98=20=E6=9B=B4=E6=96=B0=20READM?= =?UTF-8?q?E?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 8bedf4a..30cbbcf 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ _✨ 支持多API预设、MCP协议、联网搜索的AI群聊插件 ✨_ pypi -python +python @@ -195,4 +195,4 @@ LLMCHAT__MCP_SERVERS同样为一个dict,key为服务器名称,value配置的 ### 效果图 ![](img/mcp_demo.jpg) -![](img/demo.png) \ No newline at end of file +![](img/demo.png) From db9794a18aa4e42851c5a4aff7d5db03fa19e1d2 Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Mon, 28 Apr 2025 20:19:47 +0800 Subject: [PATCH 04/23] =?UTF-8?q?=F0=9F=90=9B=20=E4=BF=AE=E5=A4=8D?= =?UTF-8?q?=E5=8F=AF=E8=83=BD=E5=87=BA=E7=8E=B0=E9=A6=96=E6=9D=A1=E6=B6=88?= =?UTF-8?q?=E6=81=AF=E4=B8=8D=E4=B8=BAuser=E6=B6=88=E6=81=AF=E5=AF=BC?= =?UTF-8?q?=E8=87=B4=E6=8A=A5=E9=94=99=E7=9A=84=E9=97=AE=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nonebot_plugin_llmchat/__init__.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/nonebot_plugin_llmchat/__init__.py b/nonebot_plugin_llmchat/__init__.py index 879953a..15f9e7a 100644 --- a/nonebot_plugin_llmchat/__init__.py +++ b/nonebot_plugin_llmchat/__init__.py @@ -252,6 +252,9 @@ async def process_messages(group_id: int): {"role": "system", "content": systemPrompt} ] + while len(state.history) > 0 and state.history[0]["role"] != "user": + state.history.popleft() + messages += list(state.history)[-plugin_config.history_size * 2 :] # 没有未处理的消息说明已经被处理了,跳过 From f2d1521158eb2b358cad523f6194911fd89a2374 Mon Sep 17 00:00:00 2001 From: duolanda Date: Sat, 10 May 2025 22:58:44 +0800 Subject: [PATCH 05/23] =?UTF-8?q?=E2=9C=A8=20support=20vision=20models?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nonebot_plugin_llmchat/__init__.py | 36 ++++++++++++++++++++++++++++++ nonebot_plugin_llmchat/config.py | 1 + 2 files changed, 37 insertions(+) mode change 100644 => 100755 nonebot_plugin_llmchat/__init__.py mode change 100644 => 100755 nonebot_plugin_llmchat/config.py diff --git a/nonebot_plugin_llmchat/__init__.py b/nonebot_plugin_llmchat/__init__.py old mode 100644 new mode 100755 index 15f9e7a..1d33e19 --- a/nonebot_plugin_llmchat/__init__.py +++ b/nonebot_plugin_llmchat/__init__.py @@ -6,6 +6,8 @@ import os import random import re import time +import base64 +import ssl from typing import TYPE_CHECKING import aiofiles @@ -197,6 +199,32 @@ async def handle_message(event: GroupMessageEvent): task.add_done_callback(tasks.discard) tasks.add(task) +async def process_images(event: GroupMessageEvent) -> list[str]: + base64_images = [] + for segement in event.get_message(): + if segement.type == "image": + image_url = segement.data.get("url") + if image_url: + try: + # 处理高版本 httpx 的 [SSL: SSLV3_ALERT_HANDSHAKE_FAILURE] 报错 + ssl_context = ssl.create_default_context() + ssl_context.check_hostname = False + ssl_context.verify_mode = ssl.CERT_NONE + ssl_context.set_ciphers('DEFAULT@SECLEVEL=2') + + # 下载图片并将图片转换为base64 + async with httpx.AsyncClient(verify=ssl_context) as client: + response = await client.get(image_url, timeout=10.0) + if response.status_code != 200: + logger.error(f"下载图片失败: {image_url}, 状态码: {response.status_code}") + continue + image_data = response.content + base64_data = base64.b64encode(image_data).decode('utf-8') + base64_images.append(base64_data) + except Exception as e: + logger.error(f"处理图片时出错: {e}") + logger.debug(f"共处理 {len(base64_images)} 张图片") + return base64_images async def process_messages(group_id: int): state = group_states[group_id] @@ -260,9 +288,17 @@ async def process_messages(group_id: int): # 没有未处理的消息说明已经被处理了,跳过 if state.past_events.__len__() < 1: break + + # 将消息中的图片转成 base64 + base64_images = [] + if preset.support_image: + base64_images = await process_images(event) # 将机器人错过的消息推送给LLM content = ",".join([format_message(ev) for ev in state.past_events]) + content = [{"type": "text", "text": content}] + for base64_image in base64_images: + content.append({"type": "image_url", "image_url": {"url": f"data:image/jpeg;base64,{base64_image}"}}) new_messages: list[ChatCompletionMessageParam] = [ {"role": "user", "content": content} diff --git a/nonebot_plugin_llmchat/config.py b/nonebot_plugin_llmchat/config.py old mode 100644 new mode 100755 index c802a8d..26f9b67 --- a/nonebot_plugin_llmchat/config.py +++ b/nonebot_plugin_llmchat/config.py @@ -12,6 +12,7 @@ class PresetConfig(BaseModel): temperature: float = Field(0.7, description="生成温度(0-2]") proxy: str = Field("", description="HTTP代理服务器") support_mcp: bool = Field(False, description="是否支持MCP") + support_image: bool = Field(False, description="是否支持图片输入") class MCPServerConfig(BaseModel): """MCP服务器配置""" From 5e048c947289873424a0d49919bc5ed611fc9283 Mon Sep 17 00:00:00 2001 From: duolanda Date: Sun, 11 May 2025 00:41:05 +0800 Subject: [PATCH 06/23] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20fix=20lint=20problem?= =?UTF-8?q?s?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nonebot_plugin_llmchat/__init__.py | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/nonebot_plugin_llmchat/__init__.py b/nonebot_plugin_llmchat/__init__.py index 1d33e19..0b14e1c 100755 --- a/nonebot_plugin_llmchat/__init__.py +++ b/nonebot_plugin_llmchat/__init__.py @@ -1,13 +1,13 @@ import asyncio +import base64 from collections import defaultdict, deque from datetime import datetime import json import os import random import re -import time -import base64 import ssl +import time from typing import TYPE_CHECKING import aiofiles @@ -39,7 +39,11 @@ require("nonebot_plugin_apscheduler") from nonebot_plugin_apscheduler import scheduler if TYPE_CHECKING: - from openai.types.chat import ChatCompletionMessageParam + from openai.types.chat import ( + ChatCompletionContentPartImageParam, + ChatCompletionContentPartTextParam, + ChatCompletionMessageParam, + ) __plugin_meta__ = PluginMetadata( name="llmchat", @@ -210,7 +214,7 @@ async def process_images(event: GroupMessageEvent) -> list[str]: ssl_context = ssl.create_default_context() ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE - ssl_context.set_ciphers('DEFAULT@SECLEVEL=2') + ssl_context.set_ciphers("DEFAULT@SECLEVEL=2") # 下载图片并将图片转换为base64 async with httpx.AsyncClient(verify=ssl_context) as client: @@ -219,7 +223,7 @@ async def process_images(event: GroupMessageEvent) -> list[str]: logger.error(f"下载图片失败: {image_url}, 状态码: {response.status_code}") continue image_data = response.content - base64_data = base64.b64encode(image_data).decode('utf-8') + base64_data = base64.b64encode(image_data).decode("utf-8") base64_images.append(base64_data) except Exception as e: logger.error(f"处理图片时出错: {e}") @@ -288,15 +292,17 @@ async def process_messages(group_id: int): # 没有未处理的消息说明已经被处理了,跳过 if state.past_events.__len__() < 1: break - + # 将消息中的图片转成 base64 base64_images = [] if preset.support_image: base64_images = await process_images(event) # 将机器人错过的消息推送给LLM - content = ",".join([format_message(ev) for ev in state.past_events]) - content = [{"type": "text", "text": content}] + text_content = ",".join([format_message(ev) for ev in state.past_events]) + content: list[ChatCompletionContentPartTextParam | ChatCompletionContentPartImageParam] = [ + {"type": "text", "text": text_content} + ] for base64_image in base64_images: content.append({"type": "image_url", "image_url": {"url": f"data:image/jpeg;base64,{base64_image}"}}) From ed1b9792e72e66a91b04a72f3d27c82af6b6c97e Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Sun, 11 May 2025 15:05:26 +0800 Subject: [PATCH 07/23] =?UTF-8?q?=F0=9F=93=98=20=E6=9B=B4=E6=96=B0=20READM?= =?UTF-8?q?E?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 30cbbcf..89f71d9 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ # nonebot-plugin-llmchat -_✨ 支持多API预设、MCP协议、联网搜索的AI群聊插件 ✨_ +_✨ 支持多API预设、MCP协议、联网搜索、视觉模型的AI群聊插件 ✨_ @@ -119,6 +119,8 @@ _✨ 支持多API预设、MCP协议、联网搜索的AI群聊插件 ✨_ | max_tokens | 否 | 2048 | 最大响应token数 | | temperature | 否 | 0.7 | 生成温度 | | proxy | 否 | 无 | 请求API时使用的HTTP代理 | +| support_mcp | 否 | False | 是否支持MCP协议 | +| support_image | 否 | False | 是否支持图片输入 | LLMCHAT__MCP_SERVERS同样为一个dict,key为服务器名称,value配置的格式基本兼容 Claude.app 的配置格式,具体支持如下 @@ -151,10 +153,18 @@ LLMCHAT__MCP_SERVERS同样为一个dict,key为服务器名称,value配置的 "proxy": "http://10.0.0.183:7890" }, { - "name": "deepseek-r1", + "name": "deepseek-v1", "api_key": "sk-your-api-key", - "model_name": "deepseek-reasoner", - "api_base": "https://api.deepseek.com" + "model_name": "deepseek-chat", + "api_base": "https://api.deepseek.com", + "support_mcp": true + }, + { + "name": "some-vison-model", + "api_key": "sk-your-api-key", + "model_name": "some-vison-model", + "api_base": "https://some-vison-model.com/api", + "support_image": true } ] LLMCHAT__MCP_SERVERS=' From 6f69cc3cff0888e9e7fbb2ec7115a782370c15a9 Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Sun, 11 May 2025 15:42:13 +0800 Subject: [PATCH 08/23] =?UTF-8?q?=E2=9C=A8=20=E6=94=AF=E6=8C=81=E7=94=A8?= =?UTF-8?q?=E6=88=B7=E9=BB=91=E5=90=8D=E5=8D=95=20#20?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 1 + nonebot_plugin_llmchat/__init__.py | 4 ++++ nonebot_plugin_llmchat/config.py | 1 + 3 files changed, 6 insertions(+) diff --git a/README.md b/README.md index 89f71d9..2f41e31 100644 --- a/README.md +++ b/README.md @@ -107,6 +107,7 @@ _✨ 支持多API预设、MCP协议、联网搜索、视觉模型的AI群聊插 | LLMCHAT__DEFAULT_PRESET | 否 | off | 默认使用的预设名称,配置为off则为关闭 | | LLMCHAT__RANDOM_TRIGGER_PROB | 否 | 0.05 | 默认随机触发概率 [0, 1] | | LLMCHAT__DEFAULT_PROMPT | 否 | 你的回答应该尽量简洁、幽默、可以使用一些语气词、颜文字。你应该拒绝回答任何政治相关的问题。 | 默认提示词 | +| LLMCHAT__BLACKLIST_USER_IDS | 否 | [] | 黑名单用户ID列表,机器人将不会处理黑名单用户的消息 | | LLMCHAT__MCP_SERVERS | 否 | {} | MCP服务器配置,具体见下表 | 其中LLMCHAT__API_PRESETS为一个列表,每项配置有以下的配置项 diff --git a/nonebot_plugin_llmchat/__init__.py b/nonebot_plugin_llmchat/__init__.py index 0b14e1c..b2021c2 100755 --- a/nonebot_plugin_llmchat/__init__.py +++ b/nonebot_plugin_llmchat/__init__.py @@ -166,6 +166,10 @@ async def is_triggered(event: GroupMessageEvent) -> bool: if state.preset_name == "off": return False + # 黑名单用户 + if event.user_id in plugin_config.blacklist_user_ids: + return False + state.past_events.append(event) # 原有@触发条件 diff --git a/nonebot_plugin_llmchat/config.py b/nonebot_plugin_llmchat/config.py index 26f9b67..fa873d5 100755 --- a/nonebot_plugin_llmchat/config.py +++ b/nonebot_plugin_llmchat/config.py @@ -43,6 +43,7 @@ class ScopedConfig(BaseModel): description="默认提示词", ) mcp_servers: dict[str, MCPServerConfig] = Field({}, description="MCP服务器配置") + blacklist_user_ids: set[int] = Field(set(), description="黑名单用户ID列表") class Config(BaseModel): From ee2a0451169bb80299566e724187adf1d1fd1776 Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Sun, 11 May 2025 15:45:57 +0800 Subject: [PATCH 09/23] =?UTF-8?q?=F0=9F=94=96=20bump=20llmchat=20version?= =?UTF-8?q?=200.2.2?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 4875a8f..b396caa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nonebot-plugin-llmchat" -version = "0.2.1" +version = "0.2.2" description = "Nonebot AI group chat plugin supporting multiple API preset configurations" license = "GPL" authors = ["FuQuan i@fuquan.moe"] From 84d385193645e16846dcdb169a4471f062be0e27 Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Mon, 12 May 2025 15:26:39 +0800 Subject: [PATCH 10/23] =?UTF-8?q?=F0=9F=90=9B=20=E4=BF=AE=E5=A4=8D?= =?UTF-8?q?=E6=9F=90=E4=BA=9B=E5=8D=8F=E8=AE=AE=E7=AB=AF=E6=89=BE=E4=B8=8D?= =?UTF-8?q?=E5=88=B0=E5=9B=BE=E7=89=87url=E7=9A=84=E9=97=AE=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nonebot_plugin_llmchat/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nonebot_plugin_llmchat/__init__.py b/nonebot_plugin_llmchat/__init__.py index b2021c2..8cb350a 100755 --- a/nonebot_plugin_llmchat/__init__.py +++ b/nonebot_plugin_llmchat/__init__.py @@ -211,7 +211,7 @@ async def process_images(event: GroupMessageEvent) -> list[str]: base64_images = [] for segement in event.get_message(): if segement.type == "image": - image_url = segement.data.get("url") + image_url = segement.data.get("url") or segement.data.get("file") if image_url: try: # 处理高版本 httpx 的 [SSL: SSLV3_ALERT_HANDSHAKE_FAILURE] 报错 From 7edd7c913e53362775612769dedf66fe69038e63 Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Tue, 13 May 2025 11:23:52 +0800 Subject: [PATCH 11/23] =?UTF-8?q?=F0=9F=90=9B=20=E4=BF=AE=E5=A4=8DMCP?= =?UTF-8?q?=E8=B0=83=E7=94=A8=E8=BF=87=E7=A8=8B=E4=B8=AD=E5=9B=9E=E5=A4=8D?= =?UTF-8?q?=E4=B8=8D=E5=88=86=E6=9D=A1=E7=9A=84=E9=97=AE=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nonebot_plugin_llmchat/__init__.py | 31 +++++++++++++++--------------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/nonebot_plugin_llmchat/__init__.py b/nonebot_plugin_llmchat/__init__.py index 8cb350a..dbaad07 100755 --- a/nonebot_plugin_llmchat/__init__.py +++ b/nonebot_plugin_llmchat/__init__.py @@ -234,6 +234,20 @@ async def process_images(event: GroupMessageEvent) -> list[str]: logger.debug(f"共处理 {len(base64_images)} 张图片") return base64_images +async def send_split_messages(message_handler, content: str): + """ + 将消息按分隔符分段并发送 + """ + logger.info(f"准备发送分段消息,分段数:{len(content.split(''))}") + for segment in content.split(""): + # 跳过空消息 + if not segment.strip(): + continue + segment = segment.strip() # 删除前后多余的换行和空格 + await asyncio.sleep(2) # 避免发送过快 + logger.debug(f"发送消息分段 内容:{segment[:50]}...") # 只记录前50个字符避免日志过大 + await message_handler.send(Message(segment)) + async def process_messages(group_id: int): state = group_states[group_id] preset = get_preset(group_id) @@ -350,7 +364,7 @@ async def process_messages(group_id: int): # 发送LLM调用工具时的回复,一般没有 if message.content: - await handler.send(Message(message.content)) + await send_split_messages(handler, message.content) # 处理每个工具调用 for tool_call in message.tool_calls: @@ -410,20 +424,7 @@ async def process_messages(group_id: int): logger.error(f"合并转发消息发送失败:\n{e!s}\n") assert reply is not None - logger.info( - f"准备发送回复消息 群号:{group_id} 消息分段数:{len(reply.split(''))}" - ) - for r in reply.split(""): - # 似乎会有空消息的情况导致string index out of range异常 - if len(r) == 0 or r.isspace(): - continue - # 删除前后多余的换行和空格 - r = r.strip() - await asyncio.sleep(2) - logger.debug( - f"发送消息分段 内容:{r[:50]}..." - ) # 只记录前50个字符避免日志过大 - await handler.send(Message(r)) + await send_split_messages(handler, reply) except Exception as e: logger.opt(exception=e).error(f"API请求失败 群号:{group_id}") From 3d85ea90ef3d84cc885477b68726eb32c65fa1e2 Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Tue, 13 May 2025 13:41:28 +0800 Subject: [PATCH 12/23] =?UTF-8?q?=F0=9F=90=9B=20=E4=BF=AE=E5=A4=8D?= =?UTF-8?q?=E5=A4=9A=E6=9D=A1=E6=B6=88=E6=81=AF=E4=B8=AD=E5=8F=AA=E5=A4=84?= =?UTF-8?q?=E7=90=86=E6=9C=80=E5=90=8E=E4=B8=80=E6=9D=A1=E6=B6=88=E6=81=AF?= =?UTF-8?q?=E7=9A=84=E5=9B=BE=E7=89=87=E7=9A=84=E9=97=AE=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nonebot_plugin_llmchat/__init__.py | 22 ++++++++++------------ 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/nonebot_plugin_llmchat/__init__.py b/nonebot_plugin_llmchat/__init__.py index dbaad07..600ae9e 100755 --- a/nonebot_plugin_llmchat/__init__.py +++ b/nonebot_plugin_llmchat/__init__.py @@ -40,8 +40,7 @@ from nonebot_plugin_apscheduler import scheduler if TYPE_CHECKING: from openai.types.chat import ( - ChatCompletionContentPartImageParam, - ChatCompletionContentPartTextParam, + ChatCompletionContentPartParam, ChatCompletionMessageParam, ) @@ -311,18 +310,17 @@ async def process_messages(group_id: int): if state.past_events.__len__() < 1: break - # 将消息中的图片转成 base64 - base64_images = [] - if preset.support_image: - base64_images = await process_images(event) + content: list[ChatCompletionContentPartParam] = [] # 将机器人错过的消息推送给LLM - text_content = ",".join([format_message(ev) for ev in state.past_events]) - content: list[ChatCompletionContentPartTextParam | ChatCompletionContentPartImageParam] = [ - {"type": "text", "text": text_content} - ] - for base64_image in base64_images: - content.append({"type": "image_url", "image_url": {"url": f"data:image/jpeg;base64,{base64_image}"}}) + for ev in state.past_events: + content.append({"type": "text", "text": format_message(ev)}) + + # 将消息中的图片转成 base64 + if preset.support_image: + base64_images = await process_images(ev) + for base64_image in base64_images: + content.append({"type": "image_url", "image_url": {"url": f"data:image/jpeg;base64,{base64_image}"}}) new_messages: list[ChatCompletionMessageParam] = [ {"role": "user", "content": content} From 6c27cf56fabeee636f3790ea9de7598dba638320 Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Tue, 13 May 2025 13:43:06 +0800 Subject: [PATCH 13/23] =?UTF-8?q?=F0=9F=90=9B=20=E4=BF=AE=E5=A4=8D?= =?UTF-8?q?=E5=91=BD=E4=BB=A4=E6=9C=AC=E8=BA=AB=E4=BC=9A=E8=A7=A6=E5=8F=91?= =?UTF-8?q?=E5=9B=9E=E5=A4=8D=E7=9A=84=E9=97=AE=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nonebot_plugin_llmchat/__init__.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nonebot_plugin_llmchat/__init__.py b/nonebot_plugin_llmchat/__init__.py index 600ae9e..2a8bf85 100755 --- a/nonebot_plugin_llmchat/__init__.py +++ b/nonebot_plugin_llmchat/__init__.py @@ -185,7 +185,7 @@ async def is_triggered(event: GroupMessageEvent) -> bool: # 消息处理器 handler = on_message( rule=Rule(is_triggered), - priority=10, + priority=99, block=False, ) @@ -459,7 +459,7 @@ async def handle_preset(event: GroupMessageEvent, args: Message = CommandArg()): edit_preset_handler = on_command( "修改设定", - priority=99, + priority=1, block=True, permission=(SUPERUSER | GROUP_ADMIN | GROUP_OWNER), ) @@ -476,7 +476,7 @@ async def handle_edit_preset(event: GroupMessageEvent, args: Message = CommandAr reset_handler = on_command( "记忆清除", - priority=99, + priority=1, block=True, permission=(SUPERUSER | GROUP_ADMIN | GROUP_OWNER), ) @@ -493,7 +493,7 @@ async def handle_reset(event: GroupMessageEvent, args: Message = CommandArg()): set_prob_handler = on_command( "设置主动回复概率", - priority=99, + priority=1, block=True, permission=(SUPERUSER | GROUP_ADMIN | GROUP_OWNER), ) From cf2d549f021d9b0faf522ae0c08530270305a34e Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Tue, 13 May 2025 14:02:03 +0800 Subject: [PATCH 14/23] =?UTF-8?q?=F0=9F=93=98=20=E6=9B=B4=E6=96=B0meta?= =?UTF-8?q?=E4=BF=A1=E6=81=AF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nonebot_plugin_llmchat/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nonebot_plugin_llmchat/__init__.py b/nonebot_plugin_llmchat/__init__.py index 2a8bf85..2fb78fb 100755 --- a/nonebot_plugin_llmchat/__init__.py +++ b/nonebot_plugin_llmchat/__init__.py @@ -46,7 +46,7 @@ if TYPE_CHECKING: __plugin_meta__ = PluginMetadata( name="llmchat", - description="支持多API预设、MCP协议、联网搜索的AI群聊插件", + description="支持多API预设、MCP协议、联网搜索、视觉模型的AI群聊插件", usage="""@机器人 + 消息 开启对话""", type="application", homepage="https://github.com/FuQuan233/nonebot-plugin-llmchat", From 52ada66616b13f4b8b391f1b5f88154fedf6a588 Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Tue, 13 May 2025 14:02:23 +0800 Subject: [PATCH 15/23] =?UTF-8?q?=F0=9F=94=96=20bump=20llmchat=20version?= =?UTF-8?q?=200.2.3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index b396caa..02c7bcf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nonebot-plugin-llmchat" -version = "0.2.2" +version = "0.2.3" description = "Nonebot AI group chat plugin supporting multiple API preset configurations" license = "GPL" authors = ["FuQuan i@fuquan.moe"] From 19ff0026c07bbd68bac2464484e9169925074460 Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Fri, 16 May 2025 21:43:08 +0800 Subject: [PATCH 16/23] =?UTF-8?q?=F0=9F=90=9B=20=E4=BF=AE=E5=A4=8Ddeque=20?= =?UTF-8?q?mutated=20during=20iteration?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nonebot_plugin_llmchat/__init__.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/nonebot_plugin_llmchat/__init__.py b/nonebot_plugin_llmchat/__init__.py index 2fb78fb..c9dda84 100755 --- a/nonebot_plugin_llmchat/__init__.py +++ b/nonebot_plugin_llmchat/__init__.py @@ -313,8 +313,10 @@ async def process_messages(group_id: int): content: list[ChatCompletionContentPartParam] = [] # 将机器人错过的消息推送给LLM - for ev in state.past_events: - content.append({"type": "text", "text": format_message(ev)}) + past_events_snapshot = list(state.past_events) + for ev in past_events_snapshot: + text_content = ",".join([format_message(ev) for ev in past_events_snapshot]) + content.append({"type": "text", "text": text_content}) # 将消息中的图片转成 base64 if preset.support_image: From 89baec6abcc842af4f153f1d2a824db8c921a5aa Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Mon, 19 May 2025 14:17:25 +0800 Subject: [PATCH 17/23] =?UTF-8?q?=F0=9F=93=98=20=E6=9B=B4=E6=96=B0=20READM?= =?UTF-8?q?E?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 2f41e31..ceef771 100644 --- a/README.md +++ b/README.md @@ -18,6 +18,7 @@ _✨ 支持多API预设、MCP协议、联网搜索、视觉模型的AI群聊插 pypi python +Ask DeepWiki From 5014d3014bb38d36df7851d091a74111b9ee837c Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Wed, 20 Aug 2025 11:40:54 +0800 Subject: [PATCH 18/23] =?UTF-8?q?=F0=9F=90=9B=20=E4=BF=AE=E5=A4=8Dmcp?= =?UTF-8?q?=E6=9C=8D=E5=8A=A1=E5=99=A8=E5=8D=A1=E4=BD=8F=E5=AF=BC=E8=87=B4?= =?UTF-8?q?=E7=9A=84=E5=8D=A1=E6=AD=BB?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nonebot_plugin_llmchat/__init__.py | 2 +- nonebot_plugin_llmchat/mcpclient.py | 11 +++++++++-- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/nonebot_plugin_llmchat/__init__.py b/nonebot_plugin_llmchat/__init__.py index c9dda84..d0d5857 100755 --- a/nonebot_plugin_llmchat/__init__.py +++ b/nonebot_plugin_llmchat/__init__.py @@ -380,7 +380,7 @@ async def process_messages(group_id: int): new_messages.append({ "role": "tool", "tool_call_id": tool_call.id, - "content": str(result.content) + "content": str(result) }) # 将工具调用的结果交给 LLM diff --git a/nonebot_plugin_llmchat/mcpclient.py b/nonebot_plugin_llmchat/mcpclient.py index 7031d34..9ed9fe0 100644 --- a/nonebot_plugin_llmchat/mcpclient.py +++ b/nonebot_plugin_llmchat/mcpclient.py @@ -1,4 +1,5 @@ from contextlib import AsyncExitStack +import asyncio from mcp import ClientSession, StdioServerParameters from mcp.client.sse import sse_client @@ -64,9 +65,15 @@ class MCPClient: server_name, real_tool_name = tool_name.split("___") logger.info(f"正在服务器[{server_name}]上调用工具[{real_tool_name}]") session = self.sessions[server_name] - response = await session.call_tool(real_tool_name, tool_args) + try: + response = await asyncio.wait_for( + session.call_tool(real_tool_name, tool_args), timeout=10 + ) + except asyncio.TimeoutError: + logger.error(f"调用工具[{real_tool_name}]超时") + return f"调用工具[{real_tool_name}]超时" logger.debug(f"工具[{real_tool_name}]调用完成,响应: {response}") - return response + return response.content def get_friendly_name(self, tool_name: str): server_name, real_tool_name = tool_name.split("___") From ea635fd147a381bb6cc814b5e8708b5e5837122d Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Wed, 20 Aug 2025 12:38:39 +0800 Subject: [PATCH 19/23] =?UTF-8?q?=F0=9F=90=9B=20=E4=BF=AE=E5=A4=8D?= =?UTF-8?q?=E9=87=8D=E5=A4=8D=E5=8F=91=E9=80=81=E6=B6=88=E6=81=AF=E7=BB=99?= =?UTF-8?q?llm=E7=9A=84=E9=97=AE=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nonebot_plugin_llmchat/__init__.py | 2 +- nonebot_plugin_llmchat/mcpclient.py | 6 ++---- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/nonebot_plugin_llmchat/__init__.py b/nonebot_plugin_llmchat/__init__.py index d0d5857..0d0feae 100755 --- a/nonebot_plugin_llmchat/__init__.py +++ b/nonebot_plugin_llmchat/__init__.py @@ -315,7 +315,7 @@ async def process_messages(group_id: int): # 将机器人错过的消息推送给LLM past_events_snapshot = list(state.past_events) for ev in past_events_snapshot: - text_content = ",".join([format_message(ev) for ev in past_events_snapshot]) + text_content = format_message(ev) content.append({"type": "text", "text": text_content}) # 将消息中的图片转成 base64 diff --git a/nonebot_plugin_llmchat/mcpclient.py b/nonebot_plugin_llmchat/mcpclient.py index 9ed9fe0..a6c6ff3 100644 --- a/nonebot_plugin_llmchat/mcpclient.py +++ b/nonebot_plugin_llmchat/mcpclient.py @@ -1,5 +1,5 @@ -from contextlib import AsyncExitStack import asyncio +from contextlib import AsyncExitStack from mcp import ClientSession, StdioServerParameters from mcp.client.sse import sse_client @@ -66,9 +66,7 @@ class MCPClient: logger.info(f"正在服务器[{server_name}]上调用工具[{real_tool_name}]") session = self.sessions[server_name] try: - response = await asyncio.wait_for( - session.call_tool(real_tool_name, tool_args), timeout=10 - ) + response = await asyncio.wait_for(session.call_tool(real_tool_name, tool_args), timeout=10) except asyncio.TimeoutError: logger.error(f"调用工具[{real_tool_name}]超时") return f"调用工具[{real_tool_name}]超时" From 53d57beba3f4e30df393cd052acfebb789376175 Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Wed, 20 Aug 2025 12:48:13 +0800 Subject: [PATCH 20/23] =?UTF-8?q?=F0=9F=94=96=20bump=20llmchat=20version?= =?UTF-8?q?=200.2.4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 02c7bcf..61e73fa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nonebot-plugin-llmchat" -version = "0.2.3" +version = "0.2.4" description = "Nonebot AI group chat plugin supporting multiple API preset configurations" license = "GPL" authors = ["FuQuan i@fuquan.moe"] From 9f81a38d5b27d5f8693d918a916dc400b87f4a2a Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Mon, 1 Sep 2025 10:45:18 +0800 Subject: [PATCH 21/23] =?UTF-8?q?=F0=9F=90=9B=20=E5=B0=86mcp=E8=B6=85?= =?UTF-8?q?=E6=97=B6=E5=BB=B6=E9=95=BF=E5=88=B030=E7=A7=92=EF=BC=8C?= =?UTF-8?q?=E9=81=BF=E5=85=8D=E6=89=A7=E8=A1=8C=E5=A4=B1=E8=B4=A5?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nonebot_plugin_llmchat/mcpclient.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nonebot_plugin_llmchat/mcpclient.py b/nonebot_plugin_llmchat/mcpclient.py index a6c6ff3..55e1b44 100644 --- a/nonebot_plugin_llmchat/mcpclient.py +++ b/nonebot_plugin_llmchat/mcpclient.py @@ -66,7 +66,7 @@ class MCPClient: logger.info(f"正在服务器[{server_name}]上调用工具[{real_tool_name}]") session = self.sessions[server_name] try: - response = await asyncio.wait_for(session.call_tool(real_tool_name, tool_args), timeout=10) + response = await asyncio.wait_for(session.call_tool(real_tool_name, tool_args), timeout=30) except asyncio.TimeoutError: logger.error(f"调用工具[{real_tool_name}]超时") return f"调用工具[{real_tool_name}]超时" From 1600cba1720f186c7b2098dd1b6014e5425bc560 Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Mon, 1 Sep 2025 10:51:30 +0800 Subject: [PATCH 22/23] =?UTF-8?q?=E2=9C=A8=20=E6=94=AF=E6=8C=81=E5=BF=BD?= =?UTF-8?q?=E7=95=A5=E7=89=B9=E5=AE=9A=E5=89=8D=E7=BC=80=E7=9A=84=E6=B6=88?= =?UTF-8?q?=E6=81=AF=20#21?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 1 + nonebot_plugin_llmchat/__init__.py | 6 ++++++ nonebot_plugin_llmchat/config.py | 4 ++++ 3 files changed, 11 insertions(+) diff --git a/README.md b/README.md index ceef771..356effc 100644 --- a/README.md +++ b/README.md @@ -109,6 +109,7 @@ _✨ 支持多API预设、MCP协议、联网搜索、视觉模型的AI群聊插 | LLMCHAT__RANDOM_TRIGGER_PROB | 否 | 0.05 | 默认随机触发概率 [0, 1] | | LLMCHAT__DEFAULT_PROMPT | 否 | 你的回答应该尽量简洁、幽默、可以使用一些语气词、颜文字。你应该拒绝回答任何政治相关的问题。 | 默认提示词 | | LLMCHAT__BLACKLIST_USER_IDS | 否 | [] | 黑名单用户ID列表,机器人将不会处理黑名单用户的消息 | +| LLMCHAT__IGNORE_PREFIXES | 否 | [] | 需要忽略的消息前缀列表,匹配到这些前缀的消息不会处理 | | LLMCHAT__MCP_SERVERS | 否 | {} | MCP服务器配置,具体见下表 | 其中LLMCHAT__API_PRESETS为一个列表,每项配置有以下的配置项 diff --git a/nonebot_plugin_llmchat/__init__.py b/nonebot_plugin_llmchat/__init__.py index 0d0feae..d3c6605 100755 --- a/nonebot_plugin_llmchat/__init__.py +++ b/nonebot_plugin_llmchat/__init__.py @@ -169,6 +169,12 @@ async def is_triggered(event: GroupMessageEvent) -> bool: if event.user_id in plugin_config.blacklist_user_ids: return False + # 忽略特定前缀的消息 + msg_text = event.get_plaintext().strip() + for prefix in plugin_config.ignore_prefixes: + if msg_text.startswith(prefix): + return False + state.past_events.append(event) # 原有@触发条件 diff --git a/nonebot_plugin_llmchat/config.py b/nonebot_plugin_llmchat/config.py index fa873d5..d658875 100755 --- a/nonebot_plugin_llmchat/config.py +++ b/nonebot_plugin_llmchat/config.py @@ -44,6 +44,10 @@ class ScopedConfig(BaseModel): ) mcp_servers: dict[str, MCPServerConfig] = Field({}, description="MCP服务器配置") blacklist_user_ids: set[int] = Field(set(), description="黑名单用户ID列表") + ignore_prefixes: list[str] = Field( + default_factory=list, + description="需要忽略的消息前缀列表,匹配到这些前缀的消息不会处理" + ) class Config(BaseModel): From d640f16abee05ee82eac8046273c0653d77e0b49 Mon Sep 17 00:00:00 2001 From: FuQuan233 Date: Mon, 1 Sep 2025 10:56:31 +0800 Subject: [PATCH 23/23] =?UTF-8?q?=F0=9F=94=96=20bump=20llmchat=20version?= =?UTF-8?q?=200.2.5?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 61e73fa..7c17df2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nonebot-plugin-llmchat" -version = "0.2.4" +version = "0.2.5" description = "Nonebot AI group chat plugin supporting multiple API preset configurations" license = "GPL" authors = ["FuQuan i@fuquan.moe"]