mirror of
https://github.com/FuQuan233/nonebot-plugin-llmchat.git
synced 2025-09-06 19:40:44 +00:00
commit
0ddf8e5626
2 changed files with 45 additions and 2 deletions
46
nonebot_plugin_llmchat/__init__.py
Normal file → Executable file
46
nonebot_plugin_llmchat/__init__.py
Normal file → Executable file
|
@ -1,10 +1,12 @@
|
||||||
import asyncio
|
import asyncio
|
||||||
|
import base64
|
||||||
from collections import defaultdict, deque
|
from collections import defaultdict, deque
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
import re
|
import re
|
||||||
|
import ssl
|
||||||
import time
|
import time
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
@ -37,7 +39,11 @@ require("nonebot_plugin_apscheduler")
|
||||||
from nonebot_plugin_apscheduler import scheduler
|
from nonebot_plugin_apscheduler import scheduler
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from openai.types.chat import ChatCompletionMessageParam
|
from openai.types.chat import (
|
||||||
|
ChatCompletionContentPartImageParam,
|
||||||
|
ChatCompletionContentPartTextParam,
|
||||||
|
ChatCompletionMessageParam,
|
||||||
|
)
|
||||||
|
|
||||||
__plugin_meta__ = PluginMetadata(
|
__plugin_meta__ = PluginMetadata(
|
||||||
name="llmchat",
|
name="llmchat",
|
||||||
|
@ -197,6 +203,32 @@ async def handle_message(event: GroupMessageEvent):
|
||||||
task.add_done_callback(tasks.discard)
|
task.add_done_callback(tasks.discard)
|
||||||
tasks.add(task)
|
tasks.add(task)
|
||||||
|
|
||||||
|
async def process_images(event: GroupMessageEvent) -> list[str]:
|
||||||
|
base64_images = []
|
||||||
|
for segement in event.get_message():
|
||||||
|
if segement.type == "image":
|
||||||
|
image_url = segement.data.get("url")
|
||||||
|
if image_url:
|
||||||
|
try:
|
||||||
|
# 处理高版本 httpx 的 [SSL: SSLV3_ALERT_HANDSHAKE_FAILURE] 报错
|
||||||
|
ssl_context = ssl.create_default_context()
|
||||||
|
ssl_context.check_hostname = False
|
||||||
|
ssl_context.verify_mode = ssl.CERT_NONE
|
||||||
|
ssl_context.set_ciphers("DEFAULT@SECLEVEL=2")
|
||||||
|
|
||||||
|
# 下载图片并将图片转换为base64
|
||||||
|
async with httpx.AsyncClient(verify=ssl_context) as client:
|
||||||
|
response = await client.get(image_url, timeout=10.0)
|
||||||
|
if response.status_code != 200:
|
||||||
|
logger.error(f"下载图片失败: {image_url}, 状态码: {response.status_code}")
|
||||||
|
continue
|
||||||
|
image_data = response.content
|
||||||
|
base64_data = base64.b64encode(image_data).decode("utf-8")
|
||||||
|
base64_images.append(base64_data)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"处理图片时出错: {e}")
|
||||||
|
logger.debug(f"共处理 {len(base64_images)} 张图片")
|
||||||
|
return base64_images
|
||||||
|
|
||||||
async def process_messages(group_id: int):
|
async def process_messages(group_id: int):
|
||||||
state = group_states[group_id]
|
state = group_states[group_id]
|
||||||
|
@ -261,8 +293,18 @@ async def process_messages(group_id: int):
|
||||||
if state.past_events.__len__() < 1:
|
if state.past_events.__len__() < 1:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
# 将消息中的图片转成 base64
|
||||||
|
base64_images = []
|
||||||
|
if preset.support_image:
|
||||||
|
base64_images = await process_images(event)
|
||||||
|
|
||||||
# 将机器人错过的消息推送给LLM
|
# 将机器人错过的消息推送给LLM
|
||||||
content = ",".join([format_message(ev) for ev in state.past_events])
|
text_content = ",".join([format_message(ev) for ev in state.past_events])
|
||||||
|
content: list[ChatCompletionContentPartTextParam | ChatCompletionContentPartImageParam] = [
|
||||||
|
{"type": "text", "text": text_content}
|
||||||
|
]
|
||||||
|
for base64_image in base64_images:
|
||||||
|
content.append({"type": "image_url", "image_url": {"url": f"data:image/jpeg;base64,{base64_image}"}})
|
||||||
|
|
||||||
new_messages: list[ChatCompletionMessageParam] = [
|
new_messages: list[ChatCompletionMessageParam] = [
|
||||||
{"role": "user", "content": content}
|
{"role": "user", "content": content}
|
||||||
|
|
1
nonebot_plugin_llmchat/config.py
Normal file → Executable file
1
nonebot_plugin_llmchat/config.py
Normal file → Executable file
|
@ -12,6 +12,7 @@ class PresetConfig(BaseModel):
|
||||||
temperature: float = Field(0.7, description="生成温度(0-2]")
|
temperature: float = Field(0.7, description="生成温度(0-2]")
|
||||||
proxy: str = Field("", description="HTTP代理服务器")
|
proxy: str = Field("", description="HTTP代理服务器")
|
||||||
support_mcp: bool = Field(False, description="是否支持MCP")
|
support_mcp: bool = Field(False, description="是否支持MCP")
|
||||||
|
support_image: bool = Field(False, description="是否支持图片输入")
|
||||||
|
|
||||||
class MCPServerConfig(BaseModel):
|
class MCPServerConfig(BaseModel):
|
||||||
"""MCP服务器配置"""
|
"""MCP服务器配置"""
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue