mirror of
https://github.com/FuQuan233/nonebot-plugin-llmchat.git
synced 2025-09-04 10:20:45 +00:00
🐛 Fix reasoning content output condition
This commit is contained in:
parent
634fad6eed
commit
78e7ac4e1d
1 changed files with 2 additions and 1 deletions
|
@ -247,7 +247,8 @@ async def process_messages(group_id: int):
|
||||||
state.history.append({"role": "user", "content": content})
|
state.history.append({"role": "user", "content": content})
|
||||||
state.past_events.clear()
|
state.past_events.clear()
|
||||||
|
|
||||||
if not state.output_reasoning_content:
|
reply = ""
|
||||||
|
if state.output_reasoning_content:
|
||||||
reasoning_content = getattr(
|
reasoning_content = getattr(
|
||||||
response.choices[0].message, "reasoning_content", None
|
response.choices[0].message, "reasoning_content", None
|
||||||
)
|
)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue