🚨 move tc type into type_checking guard

This commit is contained in:
StarHeartHunt 2025-02-15 16:13:51 +08:00
parent 5fd1c5ff40
commit 620245f420
4 changed files with 54 additions and 8 deletions

32
.editorconfig Normal file
View file

@ -0,0 +1,32 @@
# http://editorconfig.org
root = true
[*]
indent_style = space
indent_size = 2
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
# The JSON files contain newlines inconsistently
[*.json]
insert_final_newline = ignore
# Makefiles always use tabs for indentation
[Makefile]
indent_style = tab
# Batch files use tabs for indentation
[*.bat]
indent_style = tab
[*.md]
trim_trailing_whitespace = false
# Matches the exact files either package.json or .travis.yml
[{package.json,.travis.yml}]
indent_size = 2
[{*.py,*.pyi}]
indent_size = 4

View file

@ -1,13 +1,12 @@
import asyncio
from collections import deque
from collections.abc import Iterable
from datetime import datetime
import json
import os
import random
import re
import time
from typing import Optional
from typing import TYPE_CHECKING, Optional
import aiofiles
from nonebot import (
@ -25,7 +24,6 @@ from nonebot.permission import SUPERUSER
from nonebot.plugin import PluginMetadata
from nonebot.rule import Rule
from openai import AsyncOpenAI
from openai.types.chat import ChatCompletionMessageParam
from .config import Config, PresetConfig
@ -35,6 +33,11 @@ import nonebot_plugin_localstore as store
require("nonebot_plugin_apscheduler")
from nonebot_plugin_apscheduler import scheduler
if TYPE_CHECKING:
from collections.abc import Iterable
from openai.types.chat import ChatCompletionMessageParam
__plugin_meta__ = PluginMetadata(
name="llmchat",
description="支持多API预设配置的AI群聊插件",

View file

@ -1,9 +1,9 @@
from pydantic import BaseModel, Field
class PresetConfig(BaseModel):
"""API预设配置"""
name: str = Field(..., description="预设名称(唯一标识)")
api_base: str = Field(..., description="API基础地址")
api_key: str = Field(..., description="API密钥")
@ -11,15 +11,25 @@ class PresetConfig(BaseModel):
max_tokens: int = Field(2048, description="最大响应token数")
temperature: float = Field(0.7, description="生成温度0-2]")
class ScopedConfig(BaseModel):
"""LLM Chat Plugin配置"""
api_presets: list[PresetConfig] = Field(...,description="API预设列表至少配置1个预设")
api_presets: list[PresetConfig] = Field(
..., description="API预设列表至少配置1个预设"
)
history_size: int = Field(20, description="LLM上下文消息保留数量")
past_events_size : int = Field(10, description="触发回复时发送的群消息数量")
past_events_size: int = Field(10, description="触发回复时发送的群消息数量")
request_timeout: int = Field(30, description="API请求超时时间")
default_preset: str = Field("off", description="默认使用的预设名称")
random_trigger_prob: float = Field(0.05, ge=0.0, le=1.0, description="随机触发概率0-1]")
default_prompt: str = Field("你的回答应该尽量简洁、幽默、可以使用一些语气词、颜文字。你应该拒绝回答任何政治相关的问题。", description="默认提示词")
random_trigger_prob: float = Field(
0.05, ge=0.0, le=1.0, description="随机触发概率0-1]"
)
default_prompt: str = Field(
"你的回答应该尽量简洁、幽默、可以使用一些语气词、颜文字。你应该拒绝回答任何政治相关的问题。",
description="默认提示词",
)
class Config(BaseModel):
llmchat: ScopedConfig

View file

@ -46,6 +46,7 @@ select = [
"PT", # flake8-pytest-style
"Q", # flake8-quotes
"TID", # flake8-tidy-imports
"TC", # flake8-type-checking
"RUF", # Ruff-specific rules
]
ignore = [