mirror of
https://github.com/FuQuan233/nonebot-plugin-llmchat.git
synced 2025-09-04 10:20:45 +00:00
🐛 filter out think in normal content
This commit is contained in:
parent
65f66be04a
commit
7f284ec4c4
1 changed files with 9 additions and 1 deletions
|
@ -13,6 +13,7 @@ import asyncio
|
|||
from openai import AsyncOpenAI
|
||||
from .config import Config, PresetConfig
|
||||
import time
|
||||
import re
|
||||
import json
|
||||
import os
|
||||
import random
|
||||
|
@ -37,6 +38,10 @@ __plugin_meta__ = PluginMetadata(
|
|||
pluginConfig = get_plugin_config(Config).llmchat
|
||||
driver = get_driver()
|
||||
|
||||
def filter_think(content:str) -> str:
|
||||
filtered_content = re.sub(r"<think>.*?</think>", "", content, flags=re.DOTALL)
|
||||
return filtered_content.strip()
|
||||
|
||||
# 初始化群组状态
|
||||
class GroupState:
|
||||
def __init__(self):
|
||||
|
@ -192,7 +197,10 @@ f'''
|
|||
)
|
||||
logger.debug(f"收到API响应 使用token数:{response.usage.total_tokens}")
|
||||
|
||||
reply = response.choices[0].message.content
|
||||
if not state.output_reasoning_content:
|
||||
reply = filter_think(response.choices[0].message.content)
|
||||
else:
|
||||
reply = response.choices[0].message.content
|
||||
|
||||
# 请求成功后再保存历史记录,保证user和assistant穿插,防止R1模型报错
|
||||
state.history.append({"role": "user", "content": content})
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue