From 8649aaaa5415dafac0d2897529fa56149cfdbf9c Mon Sep 17 00:00:00 2001 From: HibiKier <45528451+HibiKier@users.noreply.github.com> Date: Mon, 14 Jul 2025 22:35:29 +0800 Subject: [PATCH 1/4] =?UTF-8?q?:sparkles:=20=E5=BC=95=E5=85=A5=E7=BC=93?= =?UTF-8?q?=E5=AD=98=E6=9C=BA=E5=88=B6=20(#1889)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 添加全局cache * ✨ 构建缓存,hook使用缓存 * :sparkles: 新增数据库Model方法监控 * :sparkles: 数据库添加semaphore锁 * :adhesive_bandage: 优化webapi返回数据 * :sparkles: 添加增量缓存与缓存过期 * :art: 优化检测代码结构 * :zap: 优化hook权限检测性能 * :bug: 添加新异常判断跳过权限检测 * :sparkles: 添加插件limit缓存 * :art: 代码格式优化 * :bug: 修复代码导入 * :bug: 修复刷新时检查 * :alien: Rename exception for missing database URL in initialization * :wheelchair: Update default database URL to SQLite in configuration * :wrench: Update tortoise-orm and aiocache dependencies restrictions; add optional redis and asyncpg support * :bug: 修复ban检测 * :bug: 修复所有插件关闭时缓存更新 * :bug: 尝试迁移至aiocache * :bug: 完善aiocache缓存 * :zap: 代码性能优化 * :bug: 移除获取封禁缓存时的日志记录 * :bug: 修复缓存类型声明,优化封禁用户处理逻辑 * :bug: 优化LevelUser权限更新逻辑及数据库迁移 * :sparkles: cache支持redis连接 * :rotating_light: auto fix by pre-commit hooks * :zap: :增强获取群组的安全性和准确性。同时,优化了缓存管理中的相关逻辑,确保缓存操作的一致性。 * ✨ feat(auth_limit): 将插件初始化逻辑的启动装饰器更改为优先级管理器 * 🔧 修复日志记录级别 * 🔧 更新数据库连接字符串 * 🔧 更新数据库连接字符串为内存数据库,并优化权限检查逻辑 * ✨ feat(cache): 增加缓存功能配置项,并新增数据访问层以支持缓存逻辑 * :recycle: 重构cache * ✨ feat(cache): 增强缓存管理,新增缓存字典和缓存列表功能,支持过期时间管理 * 🔧 修复Notebook类中的viewport高度设置,将其从1000调整为10 * ✨ 更新插件管理逻辑,替换缓存服务为CacheRoot并优化缓存失效处理 * ✨ 更新RegisterConfig类中的type字段 * ✨ 修复清理重复记录逻辑,确保检查记录的id属性有效性 * :zap: 超级无敌大优化,解决延迟与卡死问题 * ✨ 更新封禁功能,增加封禁时长参数和描述,优化插件信息返回结构 * ✨ 更新zhenxun_help.py中的viewport高度,将其从453调整为10,以优化页面显示效果 * ✨ 优化插件分类逻辑,增加插件ID排序,并更新插件信息返回结构 --------- Co-authored-by: BalconyJH Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .env.dev | 14 +- poetry.lock | 951 +++++++-------- pyproject.toml | 10 +- zhenxun/builtin_plugins/admin/ban/__init__.py | 6 +- .../builtin_plugins/admin/ban/_data_source.py | 8 +- .../admin/plugin_switch/_data_source.py | 70 +- .../chat_history/chat_message.py | 34 +- zhenxun/builtin_plugins/help/_utils.py | 4 +- zhenxun/builtin_plugins/help/html_help.py | 11 +- zhenxun/builtin_plugins/help/normal_help.py | 4 +- zhenxun/builtin_plugins/help/zhenxun_help.py | 99 +- .../builtin_plugins/hooks/_auth_checker.py | 597 --------- .../builtin_plugins/hooks/auth/auth_admin.py | 99 ++ .../builtin_plugins/hooks/auth/auth_ban.py | 303 +++++ .../builtin_plugins/hooks/auth/auth_bot.py | 55 + .../builtin_plugins/hooks/auth/auth_cost.py | 41 + .../builtin_plugins/hooks/auth/auth_group.py | 68 ++ .../builtin_plugins/hooks/auth/auth_limit.py | 318 +++++ .../builtin_plugins/hooks/auth/auth_plugin.py | 242 ++++ .../builtin_plugins/hooks/auth/bot_filter.py | 35 + zhenxun/builtin_plugins/hooks/auth/config.py | 16 + .../builtin_plugins/hooks/auth/exception.py | 26 + zhenxun/builtin_plugins/hooks/auth/utils.py | 91 ++ zhenxun/builtin_plugins/hooks/auth_checker.py | 375 ++++++ zhenxun/builtin_plugins/hooks/auth_hook.py | 44 +- zhenxun/builtin_plugins/hooks/ban_hook.py | 84 -- zhenxun/builtin_plugins/hooks/call_hook.py | 8 +- zhenxun/builtin_plugins/init/__init__.py | 12 + zhenxun/builtin_plugins/init/__init_cache.py | 35 + zhenxun/builtin_plugins/init/init_plugin.py | 25 +- zhenxun/builtin_plugins/init/manager.py | 6 +- .../platform/qq/group_handle/data_source.py | 20 +- .../platform/qq_api/ug_watch.py | 37 +- zhenxun/builtin_plugins/scripts.py | 30 - .../statistics/_data_source.py | 4 +- .../builtin_plugins/superuser/group_manage.py | 6 +- .../web_ui/api/tabs/main/data_source.py | 2 +- .../web_ui/api/tabs/manage/data_source.py | 2 +- .../web_ui/api/tabs/system/__init__.py | 1 + zhenxun/configs/config.py | 4 +- zhenxun/configs/utils/models.py | 2 - zhenxun/models/ban_console.py | 46 +- zhenxun/models/bot_console.py | 6 + zhenxun/models/group_console.py | 71 +- zhenxun/models/level_user.py | 9 + zhenxun/models/plugin_info.py | 7 +- zhenxun/models/user_console.py | 15 +- zhenxun/services/cache/__init__.py | 1065 +++++++++++++++++ zhenxun/services/cache/cache_containers.py | 452 +++++++ zhenxun/services/cache/config.py | 35 + zhenxun/services/data_access.py | 653 ++++++++++ zhenxun/services/db_context.py | 390 +++++- zhenxun/utils/_image_template.py | 2 +- zhenxun/utils/common_utils.py | 4 +- zhenxun/utils/enum.py | 38 + zhenxun/utils/manager/priority_manager.py | 3 + zhenxun/utils/utils.py | 63 +- 57 files changed, 5179 insertions(+), 1479 deletions(-) delete mode 100644 zhenxun/builtin_plugins/hooks/_auth_checker.py create mode 100644 zhenxun/builtin_plugins/hooks/auth/auth_admin.py create mode 100644 zhenxun/builtin_plugins/hooks/auth/auth_ban.py create mode 100644 zhenxun/builtin_plugins/hooks/auth/auth_bot.py create mode 100644 zhenxun/builtin_plugins/hooks/auth/auth_cost.py create mode 100644 zhenxun/builtin_plugins/hooks/auth/auth_group.py create mode 100644 zhenxun/builtin_plugins/hooks/auth/auth_limit.py create mode 100644 zhenxun/builtin_plugins/hooks/auth/auth_plugin.py create mode 100644 zhenxun/builtin_plugins/hooks/auth/bot_filter.py create mode 100644 zhenxun/builtin_plugins/hooks/auth/config.py create mode 100644 zhenxun/builtin_plugins/hooks/auth/exception.py create mode 100644 zhenxun/builtin_plugins/hooks/auth/utils.py create mode 100644 zhenxun/builtin_plugins/hooks/auth_checker.py delete mode 100644 zhenxun/builtin_plugins/hooks/ban_hook.py create mode 100644 zhenxun/builtin_plugins/init/__init_cache.py delete mode 100644 zhenxun/builtin_plugins/scripts.py create mode 100644 zhenxun/services/cache/__init__.py create mode 100644 zhenxun/services/cache/cache_containers.py create mode 100644 zhenxun/services/cache/config.py create mode 100644 zhenxun/services/data_access.py diff --git a/.env.dev b/.env.dev index 3e1059c2..015a950c 100644 --- a/.env.dev +++ b/.env.dev @@ -27,6 +27,18 @@ QBOT_ID_DATA = '{ # 示例: "sqlite:data/db/zhenxun.db" 在data目录下建立db文件夹 DB_URL = "" +# NONE: 不使用缓存, MEMORY: 使用内存缓存, REDIS: 使用Redis缓存 +CACHE_MODE = NONE +# REDIS配置,使用REDIS替换Cache内存缓存 +# REDIS地址 +# REDIS_HOST = "127.0.0.1" +# REDIS端口 +# REDIS_PORT = 6379 +# REDIS密码 +# REDIS_PASSWORD = "" +# REDIS过期时间 +# REDIS_EXPIRE = 600 + # 系统代理 # SYSTEM_PROXY = "http://127.0.0.1:7890" @@ -40,7 +52,7 @@ PLATFORM_SUPERUSERS = ' DRIVER=~fastapi+~httpx+~websockets -# LOG_LEVEL=DEBUG +# LOG_LEVEL = DEBUG # 服务器和端口 HOST = 127.0.0.1 PORT = 8080 diff --git a/poetry.lock b/poetry.lock index 21748cd0..d2ddf30d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "aiocache" @@ -6,12 +6,14 @@ version = "0.12.3" description = "multi backend asyncio cache" optional = false python-versions = "*" -groups = ["main"] files = [ {file = "aiocache-0.12.3-py2.py3-none-any.whl", hash = "sha256:889086fc24710f431937b87ad3720a289f7fc31c4fd8b68e9f918b9bacd8270d"}, {file = "aiocache-0.12.3.tar.gz", hash = "sha256:f528b27bf4d436b497a1d0d1a8f59a542c153ab1e37c3621713cb376d44c4713"}, ] +[package.dependencies] +redis = {version = ">=4.2.0", optional = true, markers = "extra == \"redis\""} + [package.extras] memcached = ["aiomcache (>=0.5.2)"] msgpack = ["msgpack (>=0.5.5)"] @@ -28,7 +30,6 @@ version = "23.2.1" description = "File support for asyncio." optional = false python-versions = ">=3.7" -groups = ["main"] files = [ {file = "aiofiles-23.2.1-py3-none-any.whl", hash = "sha256:19297512c647d4b27a2cf7c34caa7e405c0d60b5560618a29a9fe027b18b0107"}, {file = "aiofiles-23.2.1.tar.gz", hash = "sha256:84ec2218d8419404abcb9f0c02df3f34c6e0a68ed41072acfb1cef5cbc29051a"}, @@ -45,7 +46,6 @@ version = "0.17.0" description = "asyncio bridge to the standard sqlite3 module" optional = false python-versions = ">=3.6" -groups = ["main"] files = [ {file = "aiosqlite-0.17.0-py3-none-any.whl", hash = "sha256:6c49dc6d3405929b1d08eeccc72306d3677503cc5e5e43771efc1e00232e8231"}, {file = "aiosqlite-0.17.0.tar.gz", hash = "sha256:f0e6acc24bc4864149267ac82fb46dfb3be4455f99fe21df82609cc6e6baee51"}, @@ -65,7 +65,6 @@ version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" -groups = ["main", "dev"] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -82,7 +81,6 @@ version = "4.9.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" -groups = ["main", "dev"] files = [ {file = "anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c"}, {file = "anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"}, @@ -96,7 +94,7 @@ typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] -test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] +test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] trio = ["trio (>=0.26.1)"] [package.source] @@ -110,7 +108,6 @@ version = "3.11.0" description = "In-process task scheduler with Cron-like capabilities" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "APScheduler-3.11.0-py3-none-any.whl", hash = "sha256:fc134ca32e50f5eadcc4938e3a4545ab19131435e851abb40b34d63d5141c6da"}, {file = "apscheduler-3.11.0.tar.gz", hash = "sha256:4c622d250b0955a65d5d0eb91c33e6d43fd879834bf541e0a18661ae60460133"}, @@ -127,7 +124,7 @@ mongodb = ["pymongo (>=3.0)"] redis = ["redis (>=3.0)"] rethinkdb = ["rethinkdb (>=2.4.0)"] sqlalchemy = ["sqlalchemy (>=1.4)"] -test = ["APScheduler[etcd,mongodb,redis,rethinkdb,sqlalchemy,tornado,zookeeper]", "PySide6 ; platform_python_implementation == \"CPython\" and python_version < \"3.14\"", "anyio (>=4.5.2)", "gevent ; python_version < \"3.14\"", "pytest", "pytz", "twisted ; python_version < \"3.14\""] +test = ["APScheduler[etcd,mongodb,redis,rethinkdb,sqlalchemy,tornado,zookeeper]", "PySide6", "anyio (>=4.5.2)", "gevent", "pytest", "pytz", "twisted"] tornado = ["tornado (>=4.3)"] twisted = ["twisted"] zookeeper = ["kazoo"] @@ -143,7 +140,6 @@ version = "1.8.36" description = "A High-performance, Generality, Humane Command Line Arguments Parser Library." optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "arclet_alconna-1.8.36-py3-none-any.whl", hash = "sha256:05912c6639a07959096ce4c6c3c1730b82343e154ceac72f8676661f1c5594fc"}, {file = "arclet_alconna-1.8.36.tar.gz", hash = "sha256:7d50b12e936ff7db37939921fc47a9bd68b90d8eee3ce0cfbbb5825248f14e70"}, @@ -168,7 +164,6 @@ version = "0.7.10" description = "Builtin Tools for Alconna" optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "arclet_alconna_tools-0.7.10-py3-none-any.whl", hash = "sha256:50e8b2f433fbc612dc8b99f4f5410006dcb1ef406c971c795071117a4eab8e20"}, {file = "arclet_alconna_tools-0.7.10.tar.gz", hash = "sha256:446a63a9c56886c23fb44548bb9a18655e0ba5b5dd80cc87915b858dfb02554c"}, @@ -189,7 +184,6 @@ version = "1.3.0" description = "Better dates & times for Python" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, @@ -214,7 +208,6 @@ version = "3.8.1" description = "ASGI specs, helper code, and adapters" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"}, {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"}, @@ -237,7 +230,6 @@ version = "1.4.11" description = "Async client for testing ASGI web applications" optional = false python-versions = "*" -groups = ["dev"] files = [ {file = "async-asgi-testclient-1.4.11.tar.gz", hash = "sha256:4449ac85d512d661998ec61f91c9ae01851639611d748d81ae7f816736551792"}, ] @@ -257,8 +249,6 @@ version = "5.0.1" description = "Timeout context manager for asyncio programs" optional = false python-versions = ">=3.8" -groups = ["main"] -markers = "python_version == \"3.10\"" files = [ {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, @@ -273,9 +263,8 @@ reference = "aliyun" name = "asyncpg" version = "0.30.0" description = "An asyncio PostgreSQL driver" -optional = false +optional = true python-versions = ">=3.8.0" -groups = ["main"] files = [ {file = "asyncpg-0.30.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bfb4dd5ae0699bad2b233672c8fc5ccbd9ad24b89afded02341786887e37927e"}, {file = "asyncpg-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc1f62c792752a49f88b7e6f774c26077091b44caceb1983509edc18a2222ec0"}, @@ -333,8 +322,8 @@ async-timeout = {version = ">=4.0.3", markers = "python_version < \"3.11.0\""} [package.extras] docs = ["Sphinx (>=8.1.3,<8.2.0)", "sphinx-rtd-theme (>=1.2.2)"] -gssauth = ["gssapi ; platform_system != \"Windows\"", "sspilib ; platform_system == \"Windows\""] -test = ["distro (>=1.9.0,<1.10.0)", "flake8 (>=6.1,<7.0)", "flake8-pyi (>=24.1.0,<24.2.0)", "gssapi ; platform_system == \"Linux\"", "k5test ; platform_system == \"Linux\"", "mypy (>=1.8.0,<1.9.0)", "sspilib ; platform_system == \"Windows\"", "uvloop (>=0.15.3) ; platform_system != \"Windows\" and python_version < \"3.14.0\""] +gssauth = ["gssapi", "sspilib"] +test = ["distro (>=1.9.0,<1.10.0)", "flake8 (>=6.1,<7.0)", "flake8-pyi (>=24.1.0,<24.2.0)", "gssapi", "k5test", "mypy (>=1.8.0,<1.9.0)", "sspilib", "uvloop (>=0.15.3)"] [package.source] type = "legacy" @@ -347,19 +336,18 @@ version = "25.3.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}, {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}, ] [package.extras] -benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] -tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [package.source] type = "legacy" @@ -372,7 +360,6 @@ version = "4.13.3" description = "Screen-scraping library" optional = false python-versions = ">=3.7.0" -groups = ["main"] files = [ {file = "beautifulsoup4-4.13.3-py3-none-any.whl", hash = "sha256:99045d7d3f08f91f0d656bc9b7efbae189426cd913d830294a15eefa0ea4df16"}, {file = "beautifulsoup4-4.13.3.tar.gz", hash = "sha256:1bd32405dacc920b42b83ba01644747ed77456a65760e285fbc47633ceddaf8b"}, @@ -400,7 +387,6 @@ version = "0.2.3.post0" description = "" optional = false python-versions = ">=3.7,<4.0" -groups = ["main"] files = [ {file = "bilireq-0.2.3.post0-py3-none-any.whl", hash = "sha256:8d1f98bb8fb59c0ce1dec226329353ce51e2efaad0a6b4d240437b6132648322"}, {file = "bilireq-0.2.3.post0.tar.gz", hash = "sha256:3185c3952a2becc7d31b0c01a12fda463fa477253504a68f81ea871594887ab4"}, @@ -426,7 +412,6 @@ version = "0.4.4" description = "Ultra-lightweight pure Python package to check if a file is binary or text." optional = false python-versions = "*" -groups = ["main"] files = [ {file = "binaryornot-0.4.4-py2.py3-none-any.whl", hash = "sha256:b8b71173c917bddcd2c16070412e369c3ed7f0528926f70cac18a6c97fd563e4"}, {file = "binaryornot-0.4.4.tar.gz", hash = "sha256:359501dfc9d40632edc9fac890e19542db1a287bbcfa58175b66658392018061"}, @@ -446,7 +431,6 @@ version = "7.4.0" description = "cache tools with async power" optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "cashews-7.4.0-py3-none-any.whl", hash = "sha256:e881cc9b4be05ac9ce2c448784bca2864776b1c13ee262658d7c0ebf0d3d257a"}, {file = "cashews-7.4.0.tar.gz", hash = "sha256:c9d22b9b9da567788f232374a5de3b30ceed1e5c24085c96d304b696df0dcbd8"}, @@ -471,7 +455,6 @@ version = "23.2.3" description = "Composable complex class support for attrs and dataclasses." optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, @@ -486,7 +469,7 @@ typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_ver bson = ["pymongo (>=4.4.0)"] cbor2 = ["cbor2 (>=5.4.6)"] msgpack = ["msgpack (>=1.0.5)"] -orjson = ["orjson (>=3.9.2) ; implementation_name == \"cpython\""] +orjson = ["orjson (>=3.9.2)"] pyyaml = ["pyyaml (>=6.0)"] tomlkit = ["tomlkit (>=0.11.8)"] ujson = ["ujson (>=5.7.0)"] @@ -502,7 +485,6 @@ version = "2025.1.31" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" -groups = ["main", "dev"] files = [ {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, @@ -519,8 +501,6 @@ version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" -groups = ["main"] -markers = "platform_python_implementation != \"PyPy\"" files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -605,7 +585,6 @@ version = "3.4.0" description = "Validate configuration and produce human readable error messages." optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, @@ -622,7 +601,6 @@ version = "5.2.0" description = "Universal encoding detector for Python 3" optional = false python-versions = ">=3.7" -groups = ["main"] files = [ {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, @@ -639,7 +617,6 @@ version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" -groups = ["main", "dev"] files = [ {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, @@ -746,7 +723,6 @@ version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" -groups = ["main"] files = [ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, @@ -766,7 +742,6 @@ version = "0.5.23" description = "Convert Chinese numerals and Arabic numerals." optional = false python-versions = ">=3.6" -groups = ["main"] files = [ {file = "cn2an-0.5.23-py3-none-any.whl", hash = "sha256:b19ab3c53676765c038ccdab51f69b7efa4f0b888139c34088935769241f1cbf"}, {file = "cn2an-0.5.23.tar.gz", hash = "sha256:eda06a63e5eff4a64488d9f22e5f2a4ceca6eaa63416e4f771e67edecb1a5bdb"}, @@ -786,12 +761,10 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["main", "dev"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -markers = {main = "sys_platform == \"win32\" or platform_system == \"Windows\"", dev = "sys_platform == \"win32\""} [package.source] type = "legacy" @@ -804,7 +777,6 @@ version = "2.6.0" description = "A command-line utility that creates projects from project templates, e.g. creating a Python package project from a Python package project template." optional = false python-versions = ">=3.7" -groups = ["main"] files = [ {file = "cookiecutter-2.6.0-py3-none-any.whl", hash = "sha256:a54a8e37995e4ed963b3e82831072d1ad4b005af736bb17b99c2cbd9d41b6e2d"}, {file = "cookiecutter-2.6.0.tar.gz", hash = "sha256:db21f8169ea4f4fdc2408d48ca44859349de2647fbe494a9d6c3edfc0542c21c"}, @@ -831,7 +803,6 @@ version = "7.8.0" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "coverage-7.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2931f66991175369859b5fd58529cd4b73582461877ecfd859b6549869287ffe"}, {file = "coverage-7.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52a523153c568d2c0ef8826f6cc23031dc86cffb8c6aeab92c4ff776e7951b28"}, @@ -902,7 +873,7 @@ files = [ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] -toml = ["tomli ; python_full_version <= \"3.11.0a6\""] +toml = ["tomli"] [package.source] type = "legacy" @@ -915,7 +886,6 @@ version = "44.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7, !=3.9.0, !=3.9.1" -groups = ["main"] files = [ {file = "cryptography-44.0.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:efcfe97d1b3c79e486554efddeb8f6f53a4cdd4cf6086642784fa31fc384e1d7"}, {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29ecec49f3ba3f3849362854b7253a9f59799e3763b0c9d0826259a88efa02f1"}, @@ -958,10 +928,10 @@ files = [ cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0) ; python_version >= \"3.8\""] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0)"] docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] -nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_version >= \"3.8\""] -pep8test = ["check-sdist ; python_version >= \"3.8\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] +nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2)"] +pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] sdist = ["build (>=1.0.0)"] ssh = ["bcrypt (>=3.1.5)"] test = ["certifi (>=2024)", "cryptography-vectors (==44.0.2)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] @@ -978,7 +948,6 @@ version = "1.2.1" description = "Date parsing library designed to parse dates from HTML pages" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "dateparser-1.2.1-py3-none-any.whl", hash = "sha256:bdcac262a467e6260030040748ad7c10d6bacd4f3b9cdb4cfd2251939174508c"}, {file = "dateparser-1.2.1.tar.gz", hash = "sha256:7e4919aeb48481dbfc01ac9683c8e20bfe95bb715a38c1e9f6af889f4f30ccc3"}, @@ -1006,7 +975,6 @@ version = "0.3.9" description = "Distribution utilities" optional = false python-versions = "*" -groups = ["main", "dev"] files = [ {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, @@ -1023,7 +991,6 @@ version = "0.19.1" description = "ECDSA cryptographic signature library (pure python)" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -groups = ["main"] files = [ {file = "ecdsa-0.19.1-py2.py3-none-any.whl", hash = "sha256:30638e27cf77b7e15c4c4cc1973720149e1033827cfd00661ca5c8cc0cdb24c3"}, {file = "ecdsa-0.19.1.tar.gz", hash = "sha256:478cba7b62555866fcb3bb3fe985e06decbdb68ef55713c4e5ab98c57d508e61"}, @@ -1047,7 +1014,6 @@ version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" -groups = ["main", "dev"] files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, @@ -1067,7 +1033,6 @@ version = "2.1.1" description = "execnet: rapid multi-Python deployment" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, @@ -1087,7 +1052,6 @@ version = "0.115.12" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "fastapi-0.115.12-py3-none-any.whl", hash = "sha256:e94613d6c05e27be7ffebdd6ea5f388112e5e430c8f7d6494a9d1d88d43e814d"}, {file = "fastapi-0.115.12.tar.gz", hash = "sha256:1e2c2a2646905f9e83d32f04a3f86aff4a286669c6c950ca95b5fd68c2602681"}, @@ -1113,7 +1077,6 @@ version = "6.0.11" description = "Universal feed parser, handles RSS 0.9x, RSS 1.0, RSS 2.0, CDF, Atom 0.3, and Atom 1.0 feeds" optional = false python-versions = ">=3.6" -groups = ["main"] files = [ {file = "feedparser-6.0.11-py3-none-any.whl", hash = "sha256:0be7ee7b395572b19ebeb1d6aafb0028dee11169f1c934e0ed67d54992f4ad45"}, {file = "feedparser-6.0.11.tar.gz", hash = "sha256:c9d0407b64c6f2a065d0ebb292c2b35c01050cc0dc33757461aaabdc4c4184d5"}, @@ -1133,7 +1096,6 @@ version = "3.18.0" description = "A platform independent file lock." optional = false python-versions = ">=3.9" -groups = ["main", "dev"] files = [ {file = "filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de"}, {file = "filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2"}, @@ -1142,7 +1104,7 @@ files = [ [package.extras] docs = ["furo (>=2024.8.6)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] testing = ["covdefaults (>=2.3)", "coverage (>=7.6.10)", "diff-cover (>=9.2.1)", "pytest (>=8.3.4)", "pytest-asyncio (>=0.25.2)", "pytest-cov (>=6)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.28.1)"] -typing = ["typing-extensions (>=4.12.2) ; python_version < \"3.11\""] +typing = ["typing-extensions (>=4.12.2)"] [package.source] type = "legacy" @@ -1155,7 +1117,6 @@ version = "3.1.1" description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.7" -groups = ["main"] files = [ {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, @@ -1247,7 +1208,6 @@ version = "1.71.0" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "grpcio-1.71.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:c200cb6f2393468142eb50ab19613229dcc7829b5ccee8b658a36005f6669fdd"}, {file = "grpcio-1.71.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b2266862c5ad664a380fbbcdbdb8289d71464c42a8c29053820ee78ba0119e5d"}, @@ -1316,7 +1276,6 @@ version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false python-versions = ">=3.7" -groups = ["main", "dev"] files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, @@ -1333,7 +1292,6 @@ version = "0.16.3" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.7" -groups = ["main", "dev"] files = [ {file = "httpcore-0.16.3-py3-none-any.whl", hash = "sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0"}, {file = "httpcore-0.16.3.tar.gz", hash = "sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb"}, @@ -1360,7 +1318,6 @@ version = "0.6.4" description = "A collection of framework independent HTTP protocol utils." optional = false python-versions = ">=3.8.0" -groups = ["main"] files = [ {file = "httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0"}, {file = "httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da"}, @@ -1421,7 +1378,6 @@ version = "0.23.3" description = "The next generation HTTP client." optional = false python-versions = ">=3.7" -groups = ["main", "dev"] files = [ {file = "httpx-0.23.3-py3-none-any.whl", hash = "sha256:a211fcce9b1254ea24f0cd6af9869b3d29aba40154e947d2a07bb499b3e310d6"}, {file = "httpx-0.23.3.tar.gz", hash = "sha256:9818458eb565bb54898ccb9b8b251a28785dd4a55afbc23d0eb410754fe7d0f9"}, @@ -1434,7 +1390,7 @@ rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} sniffio = "*" [package.extras] -brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] +brotli = ["brotli", "brotlicffi"] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<13)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] @@ -1450,7 +1406,6 @@ version = "2.6.9" description = "File identification library for Python" optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "identify-2.6.9-py2.py3-none-any.whl", hash = "sha256:c98b4322da415a8e5a70ff6e51fbc2d2932c015532d77e9f8537b4ba7813b150"}, {file = "identify-2.6.9.tar.gz", hash = "sha256:d40dfe3142a1421d8518e3d3985ef5ac42890683e32306ad614a29490abeb6bf"}, @@ -1470,7 +1425,6 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" -groups = ["main", "dev"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -1490,7 +1444,6 @@ version = "4.3.2" description = "Image Hashing library" optional = false python-versions = "*" -groups = ["main"] files = [ {file = "ImageHash-4.3.2-py2.py3-none-any.whl", hash = "sha256:02b0f965f8c77cd813f61d7d39031ea27d4780e7ebcad56c6cd6a709acc06e5f"}, {file = "ImageHash-4.3.2.tar.gz", hash = "sha256:e54a79805afb82a34acde4746a16540503a9636fd1ffb31d8e099b29bbbf8156"}, @@ -1513,7 +1466,6 @@ version = "8.6.1" description = "Read metadata from Python packages" optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e"}, {file = "importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580"}, @@ -1523,12 +1475,12 @@ files = [ zipp = ">=3.20" [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib_resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +test = ["flufl.flake8", "importlib_resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] type = ["pytest-mypy"] [package.source] @@ -1542,7 +1494,6 @@ version = "2.1.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, @@ -1559,7 +1510,6 @@ version = "1.1.0" description = "Simple module to parse ISO 8601 dates" optional = false python-versions = ">=3.6.2,<4.0" -groups = ["main"] files = [ {file = "iso8601-1.1.0-py3-none-any.whl", hash = "sha256:8400e90141bf792bce2634df533dc57e3bee19ea120a87bebcd3da89a58ad73f"}, {file = "iso8601-1.1.0.tar.gz", hash = "sha256:32811e7b81deee2063ea6d2e94f8819a86d1f3811e49d23623a41fa832bef03f"}, @@ -1576,7 +1526,6 @@ version = "3.1.6" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" -groups = ["main"] files = [ {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, @@ -1599,7 +1548,6 @@ version = "0.7.3" description = "Python logging made (stupidly) simple" optional = false python-versions = ">=3.5,<4.0" -groups = ["main", "dev"] files = [ {file = "loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c"}, {file = "loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6"}, @@ -1610,7 +1558,7 @@ colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} [package.extras] -dev = ["Sphinx (==8.1.3) ; python_version >= \"3.11\"", "build (==1.2.2) ; python_version >= \"3.11\"", "colorama (==0.4.5) ; python_version < \"3.8\"", "colorama (==0.4.6) ; python_version >= \"3.8\"", "exceptiongroup (==1.1.3) ; python_version >= \"3.7\" and python_version < \"3.11\"", "freezegun (==1.1.0) ; python_version < \"3.8\"", "freezegun (==1.5.0) ; python_version >= \"3.8\"", "mypy (==v0.910) ; python_version < \"3.6\"", "mypy (==v0.971) ; python_version == \"3.6\"", "mypy (==v1.13.0) ; python_version >= \"3.8\"", "mypy (==v1.4.1) ; python_version == \"3.7\"", "myst-parser (==4.0.0) ; python_version >= \"3.11\"", "pre-commit (==4.0.1) ; python_version >= \"3.9\"", "pytest (==6.1.2) ; python_version < \"3.8\"", "pytest (==8.3.2) ; python_version >= \"3.8\"", "pytest-cov (==2.12.1) ; python_version < \"3.8\"", "pytest-cov (==5.0.0) ; python_version == \"3.8\"", "pytest-cov (==6.0.0) ; python_version >= \"3.9\"", "pytest-mypy-plugins (==1.9.3) ; python_version >= \"3.6\" and python_version < \"3.8\"", "pytest-mypy-plugins (==3.1.0) ; python_version >= \"3.8\"", "sphinx-rtd-theme (==3.0.2) ; python_version >= \"3.11\"", "tox (==3.27.1) ; python_version < \"3.8\"", "tox (==4.23.2) ; python_version >= \"3.8\"", "twine (==6.0.1) ; python_version >= \"3.11\""] +dev = ["Sphinx (==8.1.3)", "build (==1.2.2)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.5.0)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.13.0)", "mypy (==v1.4.1)", "myst-parser (==4.0.0)", "pre-commit (==4.0.1)", "pytest (==6.1.2)", "pytest (==8.3.2)", "pytest-cov (==2.12.1)", "pytest-cov (==5.0.0)", "pytest-cov (==6.0.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.1.0)", "sphinx-rtd-theme (==3.0.2)", "tox (==3.27.1)", "tox (==4.23.2)", "twine (==6.0.1)"] [package.source] type = "legacy" @@ -1619,150 +1567,149 @@ reference = "aliyun" [[package]] name = "lxml" -version = "5.3.1" +version = "5.3.2" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = false python-versions = ">=3.6" -groups = ["main"] files = [ - {file = "lxml-5.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a4058f16cee694577f7e4dd410263cd0ef75644b43802a689c2b3c2a7e69453b"}, - {file = "lxml-5.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:364de8f57d6eda0c16dcfb999af902da31396949efa0e583e12675d09709881b"}, - {file = "lxml-5.3.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:528f3a0498a8edc69af0559bdcf8a9f5a8bf7c00051a6ef3141fdcf27017bbf5"}, - {file = "lxml-5.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db4743e30d6f5f92b6d2b7c86b3ad250e0bad8dee4b7ad8a0c44bfb276af89a3"}, - {file = "lxml-5.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:17b5d7f8acf809465086d498d62a981fa6a56d2718135bb0e4aa48c502055f5c"}, - {file = "lxml-5.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:928e75a7200a4c09e6efc7482a1337919cc61fe1ba289f297827a5b76d8969c2"}, - {file = "lxml-5.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a997b784a639e05b9d4053ef3b20c7e447ea80814a762f25b8ed5a89d261eac"}, - {file = "lxml-5.3.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:7b82e67c5feb682dbb559c3e6b78355f234943053af61606af126df2183b9ef9"}, - {file = "lxml-5.3.1-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:f1de541a9893cf8a1b1db9bf0bf670a2decab42e3e82233d36a74eda7822b4c9"}, - {file = "lxml-5.3.1-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:de1fc314c3ad6bc2f6bd5b5a5b9357b8c6896333d27fdbb7049aea8bd5af2d79"}, - {file = "lxml-5.3.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:7c0536bd9178f754b277a3e53f90f9c9454a3bd108b1531ffff720e082d824f2"}, - {file = "lxml-5.3.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:68018c4c67d7e89951a91fbd371e2e34cd8cfc71f0bb43b5332db38497025d51"}, - {file = "lxml-5.3.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:aa826340a609d0c954ba52fd831f0fba2a4165659ab0ee1a15e4aac21f302406"}, - {file = "lxml-5.3.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:796520afa499732191e39fc95b56a3b07f95256f2d22b1c26e217fb69a9db5b5"}, - {file = "lxml-5.3.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3effe081b3135237da6e4c4530ff2a868d3f80be0bda027e118a5971285d42d0"}, - {file = "lxml-5.3.1-cp310-cp310-win32.whl", hash = "sha256:a22f66270bd6d0804b02cd49dae2b33d4341015545d17f8426f2c4e22f557a23"}, - {file = "lxml-5.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:0bcfadea3cdc68e678d2b20cb16a16716887dd00a881e16f7d806c2138b8ff0c"}, - {file = "lxml-5.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e220f7b3e8656ab063d2eb0cd536fafef396829cafe04cb314e734f87649058f"}, - {file = "lxml-5.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f2cfae0688fd01f7056a17367e3b84f37c545fb447d7282cf2c242b16262607"}, - {file = "lxml-5.3.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:67d2f8ad9dcc3a9e826bdc7802ed541a44e124c29b7d95a679eeb58c1c14ade8"}, - {file = "lxml-5.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db0c742aad702fd5d0c6611a73f9602f20aec2007c102630c06d7633d9c8f09a"}, - {file = "lxml-5.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:198bb4b4dd888e8390afa4f170d4fa28467a7eaf857f1952589f16cfbb67af27"}, - {file = "lxml-5.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2a3e412ce1849be34b45922bfef03df32d1410a06d1cdeb793a343c2f1fd666"}, - {file = "lxml-5.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b8969dbc8d09d9cd2ae06362c3bad27d03f433252601ef658a49bd9f2b22d79"}, - {file = "lxml-5.3.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:5be8f5e4044146a69c96077c7e08f0709c13a314aa5315981185c1f00235fe65"}, - {file = "lxml-5.3.1-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:133f3493253a00db2c870d3740bc458ebb7d937bd0a6a4f9328373e0db305709"}, - {file = "lxml-5.3.1-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:52d82b0d436edd6a1d22d94a344b9a58abd6c68c357ed44f22d4ba8179b37629"}, - {file = "lxml-5.3.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b6f92e35e2658a5ed51c6634ceb5ddae32053182851d8cad2a5bc102a359b33"}, - {file = "lxml-5.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:203b1d3eaebd34277be06a3eb880050f18a4e4d60861efba4fb946e31071a295"}, - {file = "lxml-5.3.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:155e1a5693cf4b55af652f5c0f78ef36596c7f680ff3ec6eb4d7d85367259b2c"}, - {file = "lxml-5.3.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:22ec2b3c191f43ed21f9545e9df94c37c6b49a5af0a874008ddc9132d49a2d9c"}, - {file = "lxml-5.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7eda194dd46e40ec745bf76795a7cccb02a6a41f445ad49d3cf66518b0bd9cff"}, - {file = "lxml-5.3.1-cp311-cp311-win32.whl", hash = "sha256:fb7c61d4be18e930f75948705e9718618862e6fc2ed0d7159b2262be73f167a2"}, - {file = "lxml-5.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:c809eef167bf4a57af4b03007004896f5c60bd38dc3852fcd97a26eae3d4c9e6"}, - {file = "lxml-5.3.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e69add9b6b7b08c60d7ff0152c7c9a6c45b4a71a919be5abde6f98f1ea16421c"}, - {file = "lxml-5.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4e52e1b148867b01c05e21837586ee307a01e793b94072d7c7b91d2c2da02ffe"}, - {file = "lxml-5.3.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4b382e0e636ed54cd278791d93fe2c4f370772743f02bcbe431a160089025c9"}, - {file = "lxml-5.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2e49dc23a10a1296b04ca9db200c44d3eb32c8d8ec532e8c1fd24792276522a"}, - {file = "lxml-5.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4399b4226c4785575fb20998dc571bc48125dc92c367ce2602d0d70e0c455eb0"}, - {file = "lxml-5.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5412500e0dc5481b1ee9cf6b38bb3b473f6e411eb62b83dc9b62699c3b7b79f7"}, - {file = "lxml-5.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c93ed3c998ea8472be98fb55aed65b5198740bfceaec07b2eba551e55b7b9ae"}, - {file = "lxml-5.3.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:63d57fc94eb0bbb4735e45517afc21ef262991d8758a8f2f05dd6e4174944519"}, - {file = "lxml-5.3.1-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:b450d7cabcd49aa7ab46a3c6aa3ac7e1593600a1a0605ba536ec0f1b99a04322"}, - {file = "lxml-5.3.1-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:4df0ec814b50275ad6a99bc82a38b59f90e10e47714ac9871e1b223895825468"}, - {file = "lxml-5.3.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d184f85ad2bb1f261eac55cddfcf62a70dee89982c978e92b9a74a1bfef2e367"}, - {file = "lxml-5.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b725e70d15906d24615201e650d5b0388b08a5187a55f119f25874d0103f90dd"}, - {file = "lxml-5.3.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a31fa7536ec1fb7155a0cd3a4e3d956c835ad0a43e3610ca32384d01f079ea1c"}, - {file = "lxml-5.3.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3c3c8b55c7fc7b7e8877b9366568cc73d68b82da7fe33d8b98527b73857a225f"}, - {file = "lxml-5.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d61ec60945d694df806a9aec88e8f29a27293c6e424f8ff91c80416e3c617645"}, - {file = "lxml-5.3.1-cp312-cp312-win32.whl", hash = "sha256:f4eac0584cdc3285ef2e74eee1513a6001681fd9753b259e8159421ed28a72e5"}, - {file = "lxml-5.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:29bfc8d3d88e56ea0a27e7c4897b642706840247f59f4377d81be8f32aa0cfbf"}, - {file = "lxml-5.3.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c093c7088b40d8266f57ed71d93112bd64c6724d31f0794c1e52cc4857c28e0e"}, - {file = "lxml-5.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b0884e3f22d87c30694e625b1e62e6f30d39782c806287450d9dc2fdf07692fd"}, - {file = "lxml-5.3.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1637fa31ec682cd5760092adfabe86d9b718a75d43e65e211d5931809bc111e7"}, - {file = "lxml-5.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a364e8e944d92dcbf33b6b494d4e0fb3499dcc3bd9485beb701aa4b4201fa414"}, - {file = "lxml-5.3.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:779e851fd0e19795ccc8a9bb4d705d6baa0ef475329fe44a13cf1e962f18ff1e"}, - {file = "lxml-5.3.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c4393600915c308e546dc7003d74371744234e8444a28622d76fe19b98fa59d1"}, - {file = "lxml-5.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:673b9d8e780f455091200bba8534d5f4f465944cbdd61f31dc832d70e29064a5"}, - {file = "lxml-5.3.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:2e4a570f6a99e96c457f7bec5ad459c9c420ee80b99eb04cbfcfe3fc18ec6423"}, - {file = "lxml-5.3.1-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:71f31eda4e370f46af42fc9f264fafa1b09f46ba07bdbee98f25689a04b81c20"}, - {file = "lxml-5.3.1-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:42978a68d3825eaac55399eb37a4d52012a205c0c6262199b8b44fcc6fd686e8"}, - {file = "lxml-5.3.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:8b1942b3e4ed9ed551ed3083a2e6e0772de1e5e3aca872d955e2e86385fb7ff9"}, - {file = "lxml-5.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:85c4f11be9cf08917ac2a5a8b6e1ef63b2f8e3799cec194417e76826e5f1de9c"}, - {file = "lxml-5.3.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:231cf4d140b22a923b1d0a0a4e0b4f972e5893efcdec188934cc65888fd0227b"}, - {file = "lxml-5.3.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:5865b270b420eda7b68928d70bb517ccbe045e53b1a428129bb44372bf3d7dd5"}, - {file = "lxml-5.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:dbf7bebc2275016cddf3c997bf8a0f7044160714c64a9b83975670a04e6d2252"}, - {file = "lxml-5.3.1-cp313-cp313-win32.whl", hash = "sha256:d0751528b97d2b19a388b302be2a0ee05817097bab46ff0ed76feeec24951f78"}, - {file = "lxml-5.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:91fb6a43d72b4f8863d21f347a9163eecbf36e76e2f51068d59cd004c506f332"}, - {file = "lxml-5.3.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:016b96c58e9a4528219bb563acf1aaaa8bc5452e7651004894a973f03b84ba81"}, - {file = "lxml-5.3.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82a4bb10b0beef1434fb23a09f001ab5ca87895596b4581fd53f1e5145a8934a"}, - {file = "lxml-5.3.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d68eeef7b4d08a25e51897dac29bcb62aba830e9ac6c4e3297ee7c6a0cf6439"}, - {file = "lxml-5.3.1-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:f12582b8d3b4c6be1d298c49cb7ae64a3a73efaf4c2ab4e37db182e3545815ac"}, - {file = "lxml-5.3.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2df7ed5edeb6bd5590914cd61df76eb6cce9d590ed04ec7c183cf5509f73530d"}, - {file = "lxml-5.3.1-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:585c4dc429deebc4307187d2b71ebe914843185ae16a4d582ee030e6cfbb4d8a"}, - {file = "lxml-5.3.1-cp36-cp36m-win32.whl", hash = "sha256:06a20d607a86fccab2fc15a77aa445f2bdef7b49ec0520a842c5c5afd8381576"}, - {file = "lxml-5.3.1-cp36-cp36m-win_amd64.whl", hash = "sha256:057e30d0012439bc54ca427a83d458752ccda725c1c161cc283db07bcad43cf9"}, - {file = "lxml-5.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4867361c049761a56bd21de507cab2c2a608c55102311d142ade7dab67b34f32"}, - {file = "lxml-5.3.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3dddf0fb832486cc1ea71d189cb92eb887826e8deebe128884e15020bb6e3f61"}, - {file = "lxml-5.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bcc211542f7af6f2dfb705f5f8b74e865592778e6cafdfd19c792c244ccce19"}, - {file = "lxml-5.3.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaca5a812f050ab55426c32177091130b1e49329b3f002a32934cd0245571307"}, - {file = "lxml-5.3.1-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:236610b77589faf462337b3305a1be91756c8abc5a45ff7ca8f245a71c5dab70"}, - {file = "lxml-5.3.1-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:aed57b541b589fa05ac248f4cb1c46cbb432ab82cbd467d1c4f6a2bdc18aecf9"}, - {file = "lxml-5.3.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:75fa3d6946d317ffc7016a6fcc44f42db6d514b7fdb8b4b28cbe058303cb6e53"}, - {file = "lxml-5.3.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:96eef5b9f336f623ffc555ab47a775495e7e8846dde88de5f941e2906453a1ce"}, - {file = "lxml-5.3.1-cp37-cp37m-win32.whl", hash = "sha256:ef45f31aec9be01379fc6c10f1d9c677f032f2bac9383c827d44f620e8a88407"}, - {file = "lxml-5.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0611da6b07dd3720f492db1b463a4d1175b096b49438761cc9f35f0d9eaaef5"}, - {file = "lxml-5.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b2aca14c235c7a08558fe0a4786a1a05873a01e86b474dfa8f6df49101853a4e"}, - {file = "lxml-5.3.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae82fce1d964f065c32c9517309f0c7be588772352d2f40b1574a214bd6e6098"}, - {file = "lxml-5.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7aae7a3d63b935babfdc6864b31196afd5145878ddd22f5200729006366bc4d5"}, - {file = "lxml-5.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8e0d177b1fe251c3b1b914ab64135475c5273c8cfd2857964b2e3bb0fe196a7"}, - {file = "lxml-5.3.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:6c4dd3bfd0c82400060896717dd261137398edb7e524527438c54a8c34f736bf"}, - {file = "lxml-5.3.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:f1208c1c67ec9e151d78aa3435aa9b08a488b53d9cfac9b699f15255a3461ef2"}, - {file = "lxml-5.3.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:c6aacf00d05b38a5069826e50ae72751cb5bc27bdc4d5746203988e429b385bb"}, - {file = "lxml-5.3.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5881aaa4bf3a2d086c5f20371d3a5856199a0d8ac72dd8d0dbd7a2ecfc26ab73"}, - {file = "lxml-5.3.1-cp38-cp38-win32.whl", hash = "sha256:45fbb70ccbc8683f2fb58bea89498a7274af1d9ec7995e9f4af5604e028233fc"}, - {file = "lxml-5.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:7512b4d0fc5339d5abbb14d1843f70499cab90d0b864f790e73f780f041615d7"}, - {file = "lxml-5.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5885bc586f1edb48e5d68e7a4b4757b5feb2a496b64f462b4d65950f5af3364f"}, - {file = "lxml-5.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1b92fe86e04f680b848fff594a908edfa72b31bfc3499ef7433790c11d4c8cd8"}, - {file = "lxml-5.3.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a091026c3bf7519ab1e64655a3f52a59ad4a4e019a6f830c24d6430695b1cf6a"}, - {file = "lxml-5.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ffb141361108e864ab5f1813f66e4e1164181227f9b1f105b042729b6c15125"}, - {file = "lxml-5.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3715cdf0dd31b836433af9ee9197af10e3df41d273c19bb249230043667a5dfd"}, - {file = "lxml-5.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88b72eb7222d918c967202024812c2bfb4048deeb69ca328363fb8e15254c549"}, - {file = "lxml-5.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa59974880ab5ad8ef3afaa26f9bda148c5f39e06b11a8ada4660ecc9fb2feb3"}, - {file = "lxml-5.3.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:3bb8149840daf2c3f97cebf00e4ed4a65a0baff888bf2605a8d0135ff5cf764e"}, - {file = "lxml-5.3.1-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:0d6b2fa86becfa81f0a0271ccb9eb127ad45fb597733a77b92e8a35e53414914"}, - {file = "lxml-5.3.1-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:136bf638d92848a939fd8f0e06fcf92d9f2e4b57969d94faae27c55f3d85c05b"}, - {file = "lxml-5.3.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:89934f9f791566e54c1d92cdc8f8fd0009447a5ecdb1ec6b810d5f8c4955f6be"}, - {file = "lxml-5.3.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a8ade0363f776f87f982572c2860cc43c65ace208db49c76df0a21dde4ddd16e"}, - {file = "lxml-5.3.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:bfbbab9316330cf81656fed435311386610f78b6c93cc5db4bebbce8dd146675"}, - {file = "lxml-5.3.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:172d65f7c72a35a6879217bcdb4bb11bc88d55fb4879e7569f55616062d387c2"}, - {file = "lxml-5.3.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e3c623923967f3e5961d272718655946e5322b8d058e094764180cdee7bab1af"}, - {file = "lxml-5.3.1-cp39-cp39-win32.whl", hash = "sha256:ce0930a963ff593e8bb6fda49a503911accc67dee7e5445eec972668e672a0f0"}, - {file = "lxml-5.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:f7b64fcd670bca8800bc10ced36620c6bbb321e7bc1214b9c0c0df269c1dddc2"}, - {file = "lxml-5.3.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:afa578b6524ff85fb365f454cf61683771d0170470c48ad9d170c48075f86725"}, - {file = "lxml-5.3.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67f5e80adf0aafc7b5454f2c1cb0cde920c9b1f2cbd0485f07cc1d0497c35c5d"}, - {file = "lxml-5.3.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dd0b80ac2d8f13ffc906123a6f20b459cb50a99222d0da492360512f3e50f84"}, - {file = "lxml-5.3.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:422c179022ecdedbe58b0e242607198580804253da220e9454ffe848daa1cfd2"}, - {file = "lxml-5.3.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:524ccfded8989a6595dbdda80d779fb977dbc9a7bc458864fc9a0c2fc15dc877"}, - {file = "lxml-5.3.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:48fd46bf7155def2e15287c6f2b133a2f78e2d22cdf55647269977b873c65499"}, - {file = "lxml-5.3.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:05123fad495a429f123307ac6d8fd6f977b71e9a0b6d9aeeb8f80c017cb17131"}, - {file = "lxml-5.3.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a243132767150a44e6a93cd1dde41010036e1cbc63cc3e9fe1712b277d926ce3"}, - {file = "lxml-5.3.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c92ea6d9dd84a750b2bae72ff5e8cf5fdd13e58dda79c33e057862c29a8d5b50"}, - {file = "lxml-5.3.1-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:2f1be45d4c15f237209bbf123a0e05b5d630c8717c42f59f31ea9eae2ad89394"}, - {file = "lxml-5.3.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:a83d3adea1e0ee36dac34627f78ddd7f093bb9cfc0a8e97f1572a949b695cb98"}, - {file = "lxml-5.3.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:3edbb9c9130bac05d8c3fe150c51c337a471cc7fdb6d2a0a7d3a88e88a829314"}, - {file = "lxml-5.3.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2f23cf50eccb3255b6e913188291af0150d89dab44137a69e14e4dcb7be981f1"}, - {file = "lxml-5.3.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df7e5edac4778127f2bf452e0721a58a1cfa4d1d9eac63bdd650535eb8543615"}, - {file = "lxml-5.3.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:094b28ed8a8a072b9e9e2113a81fda668d2053f2ca9f2d202c2c8c7c2d6516b1"}, - {file = "lxml-5.3.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:514fe78fc4b87e7a7601c92492210b20a1b0c6ab20e71e81307d9c2e377c64de"}, - {file = "lxml-5.3.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8fffc08de02071c37865a155e5ea5fce0282e1546fd5bde7f6149fcaa32558ac"}, - {file = "lxml-5.3.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4b0d5cdba1b655d5b18042ac9c9ff50bda33568eb80feaaca4fc237b9c4fbfde"}, - {file = "lxml-5.3.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3031e4c16b59424e8d78522c69b062d301d951dc55ad8685736c3335a97fc270"}, - {file = "lxml-5.3.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb659702a45136c743bc130760c6f137870d4df3a9e14386478b8a0511abcfca"}, - {file = "lxml-5.3.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a11b16a33656ffc43c92a5343a28dc71eefe460bcc2a4923a96f292692709f6"}, - {file = "lxml-5.3.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c5ae125276f254b01daa73e2c103363d3e99e3e10505686ac7d9d2442dd4627a"}, - {file = "lxml-5.3.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c76722b5ed4a31ba103e0dc77ab869222ec36efe1a614e42e9bcea88a36186fe"}, - {file = "lxml-5.3.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:33e06717c00c788ab4e79bc4726ecc50c54b9bfb55355eae21473c145d83c2d2"}, - {file = "lxml-5.3.1.tar.gz", hash = "sha256:106b7b5d2977b339f1e97efe2778e2ab20e99994cbb0ec5e55771ed0795920c8"}, + {file = "lxml-5.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c4b84d6b580a9625dfa47269bf1fd7fbba7ad69e08b16366a46acb005959c395"}, + {file = "lxml-5.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b4c08ecb26e4270a62f81f81899dfff91623d349e433b126931c9c4577169666"}, + {file = "lxml-5.3.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef926e9f11e307b5a7c97b17c5c609a93fb59ffa8337afac8f89e6fe54eb0b37"}, + {file = "lxml-5.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:017ceeabe739100379fe6ed38b033cd244ce2da4e7f6f07903421f57da3a19a2"}, + {file = "lxml-5.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dae97d9435dc90590f119d056d233c33006b2fd235dd990d5564992261ee7ae8"}, + {file = "lxml-5.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:910f39425c6798ce63c93976ae5af5fff6949e2cb446acbd44d6d892103eaea8"}, + {file = "lxml-5.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9780de781a0d62a7c3680d07963db3048b919fc9e3726d9cfd97296a65ffce1"}, + {file = "lxml-5.3.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:1a06b0c6ba2e3ca45a009a78a4eb4d6b63831830c0a83dcdc495c13b9ca97d3e"}, + {file = "lxml-5.3.2-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:4c62d0a34d1110769a1bbaf77871a4b711a6f59c4846064ccb78bc9735978644"}, + {file = "lxml-5.3.2-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:8f961a4e82f411b14538fe5efc3e6b953e17f5e809c463f0756a0d0e8039b700"}, + {file = "lxml-5.3.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:3dfc78f5f9251b6b8ad37c47d4d0bfe63ceb073a916e5b50a3bf5fd67a703335"}, + {file = "lxml-5.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:10e690bc03214d3537270c88e492b8612d5e41b884f232df2b069b25b09e6711"}, + {file = "lxml-5.3.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:aa837e6ee9534de8d63bc4c1249e83882a7ac22bd24523f83fad68e6ffdf41ae"}, + {file = "lxml-5.3.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:da4c9223319400b97a2acdfb10926b807e51b69eb7eb80aad4942c0516934858"}, + {file = "lxml-5.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:dc0e9bdb3aa4d1de703a437576007d366b54f52c9897cae1a3716bb44fc1fc85"}, + {file = "lxml-5.3.2-cp310-cp310-win32.win32.whl", hash = "sha256:dd755a0a78dd0b2c43f972e7b51a43be518ebc130c9f1a7c4480cf08b4385486"}, + {file = "lxml-5.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:d64ea1686474074b38da13ae218d9fde0d1dc6525266976808f41ac98d9d7980"}, + {file = "lxml-5.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9d61a7d0d208ace43986a92b111e035881c4ed45b1f5b7a270070acae8b0bfb4"}, + {file = "lxml-5.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:856dfd7eda0b75c29ac80a31a6411ca12209183e866c33faf46e77ace3ce8a79"}, + {file = "lxml-5.3.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a01679e4aad0727bedd4c9407d4d65978e920f0200107ceeffd4b019bd48529"}, + {file = "lxml-5.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b6b37b4c3acb8472d191816d4582379f64d81cecbdce1a668601745c963ca5cc"}, + {file = "lxml-5.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3df5a54e7b7c31755383f126d3a84e12a4e0333db4679462ef1165d702517477"}, + {file = "lxml-5.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c09a40f28dcded933dc16217d6a092be0cc49ae25811d3b8e937c8060647c353"}, + {file = "lxml-5.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1ef20f1851ccfbe6c5a04c67ec1ce49da16ba993fdbabdce87a92926e505412"}, + {file = "lxml-5.3.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:f79a63289dbaba964eb29ed3c103b7911f2dce28c36fe87c36a114e6bd21d7ad"}, + {file = "lxml-5.3.2-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:75a72697d95f27ae00e75086aed629f117e816387b74a2f2da6ef382b460b710"}, + {file = "lxml-5.3.2-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:b9b00c9ee1cc3a76f1f16e94a23c344e0b6e5c10bec7f94cf2d820ce303b8c01"}, + {file = "lxml-5.3.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:77cbcab50cbe8c857c6ba5f37f9a3976499c60eada1bf6d38f88311373d7b4bc"}, + {file = "lxml-5.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:29424058f072a24622a0a15357bca63d796954758248a72da6d512f9bd9a4493"}, + {file = "lxml-5.3.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:7d82737a8afe69a7c80ef31d7626075cc7d6e2267f16bf68af2c764b45ed68ab"}, + {file = "lxml-5.3.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:95473d1d50a5d9fcdb9321fdc0ca6e1edc164dce4c7da13616247d27f3d21e31"}, + {file = "lxml-5.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2162068f6da83613f8b2a32ca105e37a564afd0d7009b0b25834d47693ce3538"}, + {file = "lxml-5.3.2-cp311-cp311-win32.whl", hash = "sha256:f8695752cf5d639b4e981afe6c99e060621362c416058effd5c704bede9cb5d1"}, + {file = "lxml-5.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:d1a94cbb4ee64af3ab386c2d63d6d9e9cf2e256ac0fd30f33ef0a3c88f575174"}, + {file = "lxml-5.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:16b3897691ec0316a1aa3c6585f61c8b7978475587c5b16fc1d2c28d283dc1b0"}, + {file = "lxml-5.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a8d4b34a0eeaf6e73169dcfd653c8d47f25f09d806c010daf074fba2db5e2d3f"}, + {file = "lxml-5.3.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9cd7a959396da425022e1e4214895b5cfe7de7035a043bcc2d11303792b67554"}, + {file = "lxml-5.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cac5eaeec3549c5df7f8f97a5a6db6963b91639389cdd735d5a806370847732b"}, + {file = "lxml-5.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29b5f7d77334877c2146e7bb8b94e4df980325fab0a8af4d524e5d43cd6f789d"}, + {file = "lxml-5.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13f3495cfec24e3d63fffd342cc8141355d1d26ee766ad388775f5c8c5ec3932"}, + {file = "lxml-5.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e70ad4c9658beeff99856926fd3ee5fde8b519b92c693f856007177c36eb2e30"}, + {file = "lxml-5.3.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:507085365783abd7879fa0a6fa55eddf4bdd06591b17a2418403bb3aff8a267d"}, + {file = "lxml-5.3.2-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:5bb304f67cbf5dfa07edad904732782cbf693286b9cd85af27059c5779131050"}, + {file = "lxml-5.3.2-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:3d84f5c093645c21c29a4e972b84cb7cf682f707f8706484a5a0c7ff13d7a988"}, + {file = "lxml-5.3.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:bdc13911db524bd63f37b0103af014b7161427ada41f1b0b3c9b5b5a9c1ca927"}, + {file = "lxml-5.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ec944539543f66ebc060ae180d47e86aca0188bda9cbfadff47d86b0dc057dc"}, + {file = "lxml-5.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:59d437cc8a7f838282df5a199cf26f97ef08f1c0fbec6e84bd6f5cc2b7913f6e"}, + {file = "lxml-5.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e275961adbd32e15672e14e0cc976a982075208224ce06d149c92cb43db5b93"}, + {file = "lxml-5.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:038aeb6937aa404480c2966b7f26f1440a14005cb0702078c173c028eca72c31"}, + {file = "lxml-5.3.2-cp312-cp312-win32.whl", hash = "sha256:3c2c8d0fa3277147bff180e3590be67597e17d365ce94beb2efa3138a2131f71"}, + {file = "lxml-5.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:77809fcd97dfda3f399102db1794f7280737b69830cd5c961ac87b3c5c05662d"}, + {file = "lxml-5.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:77626571fb5270ceb36134765f25b665b896243529eefe840974269b083e090d"}, + {file = "lxml-5.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:78a533375dc7aa16d0da44af3cf6e96035e484c8c6b2b2445541a5d4d3d289ee"}, + {file = "lxml-5.3.2-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6f62b2404b3f3f0744bbcabb0381c5fe186fa2a9a67ecca3603480f4846c585"}, + {file = "lxml-5.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ea918da00091194526d40c30c4996971f09dacab032607581f8d8872db34fbf"}, + {file = "lxml-5.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c35326f94702a7264aa0eea826a79547d3396a41ae87a70511b9f6e9667ad31c"}, + {file = "lxml-5.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3bef90af21d31c4544bc917f51e04f94ae11b43156356aff243cdd84802cbf2"}, + {file = "lxml-5.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52fa7ba11a495b7cbce51573c73f638f1dcff7b3ee23697467dc063f75352a69"}, + {file = "lxml-5.3.2-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:ad131e2c4d2c3803e736bb69063382334e03648de2a6b8f56a878d700d4b557d"}, + {file = "lxml-5.3.2-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:00a4463ca409ceacd20490a893a7e08deec7870840eff33dc3093067b559ce3e"}, + {file = "lxml-5.3.2-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:87e8d78205331cace2b73ac8249294c24ae3cba98220687b5b8ec5971a2267f1"}, + {file = "lxml-5.3.2-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:bf6389133bb255e530a4f2f553f41c4dd795b1fbb6f797aea1eff308f1e11606"}, + {file = "lxml-5.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b3709fc752b42fb6b6ffa2ba0a5b9871646d97d011d8f08f4d5b3ee61c7f3b2b"}, + {file = "lxml-5.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:abc795703d0de5d83943a4badd770fbe3d1ca16ee4ff3783d7caffc252f309ae"}, + {file = "lxml-5.3.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:98050830bb6510159f65d9ad1b8aca27f07c01bb3884ba95f17319ccedc4bcf9"}, + {file = "lxml-5.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6ba465a91acc419c5682f8b06bcc84a424a7aa5c91c220241c6fd31de2a72bc6"}, + {file = "lxml-5.3.2-cp313-cp313-win32.whl", hash = "sha256:56a1d56d60ea1ec940f949d7a309e0bff05243f9bd337f585721605670abb1c1"}, + {file = "lxml-5.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:1a580dc232c33d2ad87d02c8a3069d47abbcdce974b9c9cc82a79ff603065dbe"}, + {file = "lxml-5.3.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:1a59f7fe888d0ec1916d0ad69364c5400cfa2f885ae0576d909f342e94d26bc9"}, + {file = "lxml-5.3.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d67b50abc2df68502a26ed2ccea60c1a7054c289fb7fc31c12e5e55e4eec66bd"}, + {file = "lxml-5.3.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cb08d2cb047c98d6fbbb2e77d6edd132ad6e3fa5aa826ffa9ea0c9b1bc74a84"}, + {file = "lxml-5.3.2-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:495ddb7e10911fb4d673d8aa8edd98d1eadafb3b56e8c1b5f427fd33cadc455b"}, + {file = "lxml-5.3.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:884d9308ac7d581b705a3371185282e1b8eebefd68ccf288e00a2d47f077cc51"}, + {file = "lxml-5.3.2-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:37f3d7cf7f2dd2520df6cc8a13df4c3e3f913c8e0a1f9a875e44f9e5f98d7fee"}, + {file = "lxml-5.3.2-cp36-cp36m-win32.whl", hash = "sha256:e885a1bf98a76dff0a0648850c3083b99d9358ef91ba8fa307c681e8e0732503"}, + {file = "lxml-5.3.2-cp36-cp36m-win_amd64.whl", hash = "sha256:b45f505d0d85f4cdd440cd7500689b8e95110371eaa09da0c0b1103e9a05030f"}, + {file = "lxml-5.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b53cd668facd60b4f0dfcf092e01bbfefd88271b5b4e7b08eca3184dd006cb30"}, + {file = "lxml-5.3.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5dea998c891f082fe204dec6565dbc2f9304478f2fc97bd4d7a940fec16c873"}, + {file = "lxml-5.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d46bc3e58b01e4f38d75e0d7f745a46875b7a282df145aca9d1479c65ff11561"}, + {file = "lxml-5.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:661feadde89159fd5f7d7639a81ccae36eec46974c4a4d5ccce533e2488949c8"}, + {file = "lxml-5.3.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:43af2a69af2cacc2039024da08a90174e85f3af53483e6b2e3485ced1bf37151"}, + {file = "lxml-5.3.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:1539f962d82436f3d386eb9f29b2a29bb42b80199c74a695dff51b367a61ec0a"}, + {file = "lxml-5.3.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:6673920bf976421b5fac4f29b937702eef4555ee42329546a5fc68bae6178a48"}, + {file = "lxml-5.3.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:9fa722a9cd8845594593cce399a49aa6bfc13b6c83a7ee05e2ab346d9253d52f"}, + {file = "lxml-5.3.2-cp37-cp37m-win32.whl", hash = "sha256:2eadd4efa487f4710755415aed3d6ae9ac8b4327ea45226ffccb239766c8c610"}, + {file = "lxml-5.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:83d8707b1b08cd02c04d3056230ec3b771b18c566ec35e723e60cdf037064e08"}, + {file = "lxml-5.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc6e8678bfa5ccba370103976ccfcf776c85c83da9220ead41ea6fd15d2277b4"}, + {file = "lxml-5.3.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0bed509662f67f719119ad56006cd4a38efa68cfa74383060612044915e5f7ad"}, + {file = "lxml-5.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e3925975fadd6fd72a6d80541a6ec75dfbad54044a03aa37282dafcb80fbdfa"}, + {file = "lxml-5.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83c0462dedc5213ac586164c6d7227da9d4d578cf45dd7fbab2ac49b63a008eb"}, + {file = "lxml-5.3.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:53e3f9ca72858834688afa17278649d62aa768a4b2018344be00c399c4d29e95"}, + {file = "lxml-5.3.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:32ba634ef3f1b20f781019a91d78599224dc45745dd572f951adbf1c0c9b0d75"}, + {file = "lxml-5.3.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:1b16504c53f41da5fcf04868a80ac40a39d3eec5329caf761114caec6e844ad1"}, + {file = "lxml-5.3.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:1f9682786138549da44ca4c49b20e7144d063b75f2b2ba611f4cff9b83db1062"}, + {file = "lxml-5.3.2-cp38-cp38-win32.whl", hash = "sha256:d8f74ef8aacdf6ee5c07566a597634bb8535f6b53dc89790db43412498cf6026"}, + {file = "lxml-5.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:49f1cee0fa27e1ee02589c696a9bdf4027e7427f184fa98e6bef0c6613f6f0fa"}, + {file = "lxml-5.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:741c126bcf9aa939e950e64e5e0a89c8e01eda7a5f5ffdfc67073f2ed849caea"}, + {file = "lxml-5.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ab6e9e6aca1fd7d725ffa132286e70dee5b9a4561c5ed291e836440b82888f89"}, + {file = "lxml-5.3.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58e8c9b9ed3c15c2d96943c14efc324b69be6352fe5585733a7db2bf94d97841"}, + {file = "lxml-5.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7811828ddfb8c23f4f1fbf35e7a7b2edec2f2e4c793dee7c52014f28c4b35238"}, + {file = "lxml-5.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:72968623efb1e12e950cbdcd1d0f28eb14c8535bf4be153f1bfffa818b1cf189"}, + {file = "lxml-5.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ebfceaa2ea588b54efb6160e3520983663d45aed8a3895bb2031ada080fb5f04"}, + {file = "lxml-5.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d685d458505b2bfd2e28c812749fe9194a2b0ce285a83537e4309a187ffa270b"}, + {file = "lxml-5.3.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:334e0e414dab1f5366ead8ca34ec3148415f236d5660e175f1d640b11d645847"}, + {file = "lxml-5.3.2-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:02e56f7de72fa82561eae69628a7d6febd7891d72248c7ff7d3e7814d4031017"}, + {file = "lxml-5.3.2-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:638d06b4e1d34d1a074fa87deed5fb55c18485fa0dab97abc5604aad84c12031"}, + {file = "lxml-5.3.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:354dab7206d22d7a796fa27c4c5bffddd2393da2ad61835355a4759d435beb47"}, + {file = "lxml-5.3.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d9d9f82ff2c3bf9bb777cb355149f7f3a98ec58f16b7428369dc27ea89556a4c"}, + {file = "lxml-5.3.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:95ad58340e3b7d2b828efc370d1791856613c5cb62ae267158d96e47b3c978c9"}, + {file = "lxml-5.3.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:30fe05f4b7f6e9eb32862745512e7cbd021070ad0f289a7f48d14a0d3fc1d8a9"}, + {file = "lxml-5.3.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:34c688fef86f73dbca0798e0a61bada114677006afa524a8ce97d9e5fabf42e6"}, + {file = "lxml-5.3.2-cp39-cp39-win32.whl", hash = "sha256:4d6d3d1436d57f41984920667ec5ef04bcb158f80df89ac4d0d3f775a2ac0c87"}, + {file = "lxml-5.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:2996e1116bbb3ae2a1fbb2ba4da8f92742290b4011e7e5bce2bd33bbc9d9485a"}, + {file = "lxml-5.3.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:521ab9c80b98c30b2d987001c3ede2e647e92eeb2ca02e8cb66ef5122d792b24"}, + {file = "lxml-5.3.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f1231b0f9810289d41df1eacc4ebb859c63e4ceee29908a0217403cddce38d0"}, + {file = "lxml-5.3.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:271f1a4d5d2b383c36ad8b9b489da5ea9c04eca795a215bae61ed6a57cf083cd"}, + {file = "lxml-5.3.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:6fca8a5a13906ba2677a5252752832beb0f483a22f6c86c71a2bb320fba04f61"}, + {file = "lxml-5.3.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ea0c3b7922209160faef194a5b6995bfe7fa05ff7dda6c423ba17646b7b9de10"}, + {file = "lxml-5.3.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0a006390834603e5952a2ff74b9a31a6007c7cc74282a087aa6467afb4eea987"}, + {file = "lxml-5.3.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:eae4136a3b8c4cf76f69461fc8f9410d55d34ea48e1185338848a888d71b9675"}, + {file = "lxml-5.3.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d48e06be8d8c58e7feaedd8a37897a6122637efb1637d7ce00ddf5f11f9a92ad"}, + {file = "lxml-5.3.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4b83aed409134093d90e114007034d2c1ebcd92e501b71fd9ec70e612c8b2eb"}, + {file = "lxml-5.3.2-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7a0e77edfe26d3703f954d46bed52c3ec55f58586f18f4b7f581fc56954f1d84"}, + {file = "lxml-5.3.2-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:19f6fcfd15b82036b4d235749d78785eb9c991c7812012dc084e0d8853b4c1c0"}, + {file = "lxml-5.3.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:d49919c95d31ee06eefd43d8c6f69a3cc9bdf0a9b979cc234c4071f0eb5cb173"}, + {file = "lxml-5.3.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2d0a60841410123c533990f392819804a8448853f06daf412c0f383443925e89"}, + {file = "lxml-5.3.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b7f729e03090eb4e3981f10efaee35e6004b548636b1a062b8b9a525e752abc"}, + {file = "lxml-5.3.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:579df6e20d8acce3bcbc9fb8389e6ae00c19562e929753f534ba4c29cfe0be4b"}, + {file = "lxml-5.3.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:2abcf3f3b8367d6400b908d00d4cd279fc0b8efa287e9043820525762d383699"}, + {file = "lxml-5.3.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:348c06cb2e3176ce98bee8c397ecc89181681afd13d85870df46167f140a305f"}, + {file = "lxml-5.3.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:617ecaccd565cbf1ac82ffcaa410e7da5bd3a4b892bb3543fb2fe19bd1c4467d"}, + {file = "lxml-5.3.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c3eb4278dcdb9d86265ed2c20b9ecac45f2d6072e3904542e591e382c87a9c00"}, + {file = "lxml-5.3.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:258b6b53458c5cbd2a88795557ff7e0db99f73a96601b70bc039114cd4ee9e02"}, + {file = "lxml-5.3.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0a9d8d25ed2f2183e8471c97d512a31153e123ac5807f61396158ef2793cb6e"}, + {file = "lxml-5.3.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:73bcb635a848c18a3e422ea0ab0092f2e4ef3b02d8ebe87ab49748ebc8ec03d8"}, + {file = "lxml-5.3.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1545de0a69a16ced5767bae8cca1801b842e6e49e96f5e4a8a5acbef023d970b"}, + {file = "lxml-5.3.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:165fcdc2f40fc0fe88a3c3c06c9c2a097388a90bda6a16e6f7c9199c903c9b8e"}, + {file = "lxml-5.3.2.tar.gz", hash = "sha256:773947d0ed809ddad824b7b14467e1a481b8976e87278ac4a730c2f7c7fcddc1"}, ] [package.extras] @@ -1779,18 +1726,17 @@ reference = "aliyun" [[package]] name = "markdown" -version = "3.7" +version = "3.8" description = "Python implementation of John Gruber's Markdown." optional = false -python-versions = ">=3.8" -groups = ["main"] +python-versions = ">=3.9" files = [ - {file = "Markdown-3.7-py3-none-any.whl", hash = "sha256:7eb6df5690b81a1d7942992c97fad2938e956e79df20cbc6186e9c3a77b1c803"}, - {file = "markdown-3.7.tar.gz", hash = "sha256:2ae2471477cfd02dbbf038d5d9bc226d40def84b4fe2986e49b59b6b472bbed2"}, + {file = "markdown-3.8-py3-none-any.whl", hash = "sha256:794a929b79c5af141ef5ab0f2f642d0f7b1872981250230e72682346f7cc90dc"}, + {file = "markdown-3.8.tar.gz", hash = "sha256:7df81e63f0df5c4b24b7d156eb81e4690595239b7d70937d0409f1b0de319c6f"}, ] [package.extras] -docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] +docs = ["mdx_gh_links (>=0.2)", "mkdocs (>=1.6)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] testing = ["coverage", "pyyaml"] [package.source] @@ -1804,7 +1750,6 @@ version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, @@ -1834,7 +1779,6 @@ version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -1910,7 +1854,6 @@ version = "0.1.2" description = "Markdown URL utilities" optional = false python-versions = ">=3.7" -groups = ["main"] files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, @@ -1927,7 +1870,6 @@ version = "1.1.0" description = "MessagePack serializer" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ad442d527a7e358a469faf43fda45aaf4ac3249c8310a82f0ccff9164e5dccd"}, {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:74bed8f63f8f14d75eec75cf3d04ad581da6b914001b474a5d3cd3372c8cc27d"}, @@ -2002,104 +1944,115 @@ reference = "aliyun" [[package]] name = "multidict" -version = "6.2.0" +version = "6.4.3" description = "multidict implementation" optional = false python-versions = ">=3.9" -groups = ["main", "dev"] files = [ - {file = "multidict-6.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b9f6392d98c0bd70676ae41474e2eecf4c7150cb419237a41f8f96043fcb81d1"}, - {file = "multidict-6.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3501621d5e86f1a88521ea65d5cad0a0834c77b26f193747615b7c911e5422d2"}, - {file = "multidict-6.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:32ed748ff9ac682eae7859790d3044b50e3076c7d80e17a44239683769ff485e"}, - {file = "multidict-6.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc826b9a8176e686b67aa60fd6c6a7047b0461cae5591ea1dc73d28f72332a8a"}, - {file = "multidict-6.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:214207dcc7a6221d9942f23797fe89144128a71c03632bf713d918db99bd36de"}, - {file = "multidict-6.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:05fefbc3cddc4e36da209a5e49f1094bbece9a581faa7f3589201fd95df40e5d"}, - {file = "multidict-6.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e851e6363d0dbe515d8de81fd544a2c956fdec6f8a049739562286727d4a00c3"}, - {file = "multidict-6.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32c9b4878f48be3e75808ea7e499d6223b1eea6d54c487a66bc10a1871e3dc6a"}, - {file = "multidict-6.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7243c5a6523c5cfeca76e063efa5f6a656d1d74c8b1fc64b2cd1e84e507f7e2a"}, - {file = "multidict-6.2.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0e5a644e50ef9fb87878d4d57907f03a12410d2aa3b93b3acdf90a741df52c49"}, - {file = "multidict-6.2.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0dc25a3293c50744796e87048de5e68996104d86d940bb24bc3ec31df281b191"}, - {file = "multidict-6.2.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:a49994481b99cd7dedde07f2e7e93b1d86c01c0fca1c32aded18f10695ae17eb"}, - {file = "multidict-6.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:641cf2e3447c9ecff2f7aa6e9eee9eaa286ea65d57b014543a4911ff2799d08a"}, - {file = "multidict-6.2.0-cp310-cp310-win32.whl", hash = "sha256:0c383d28857f66f5aebe3e91d6cf498da73af75fbd51cedbe1adfb85e90c0460"}, - {file = "multidict-6.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:a33273a541f1e1a8219b2a4ed2de355848ecc0254264915b9290c8d2de1c74e1"}, - {file = "multidict-6.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:84e87a7d75fa36839a3a432286d719975362d230c70ebfa0948549cc38bd5b46"}, - {file = "multidict-6.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8de4d42dffd5ced9117af2ce66ba8722402541a3aa98ffdf78dde92badb68932"}, - {file = "multidict-6.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7d91a230c7f8af86c904a5a992b8c064b66330544693fd6759c3d6162382ecf"}, - {file = "multidict-6.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f6cad071960ba1914fa231677d21b1b4a3acdcce463cee41ea30bc82e6040cf"}, - {file = "multidict-6.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f74f2fc51555f4b037ef278efc29a870d327053aba5cb7d86ae572426c7cccc"}, - {file = "multidict-6.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:14ed9ed1bfedd72a877807c71113deac292bf485159a29025dfdc524c326f3e1"}, - {file = "multidict-6.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ac3fcf9a2d369bd075b2c2965544036a27ccd277fc3c04f708338cc57533081"}, - {file = "multidict-6.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fc6af8e39f7496047c7876314f4317736eac82bf85b54c7c76cf1a6f8e35d98"}, - {file = "multidict-6.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5f8cb1329f42fadfb40d6211e5ff568d71ab49be36e759345f91c69d1033d633"}, - {file = "multidict-6.2.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5389445f0173c197f4a3613713b5fb3f3879df1ded2a1a2e4bc4b5b9c5441b7e"}, - {file = "multidict-6.2.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:94a7bb972178a8bfc4055db80c51efd24baefaced5e51c59b0d598a004e8305d"}, - {file = "multidict-6.2.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da51d8928ad8b4244926fe862ba1795f0b6e68ed8c42cd2f822d435db9c2a8f4"}, - {file = "multidict-6.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:063be88bd684782a0715641de853e1e58a2f25b76388538bd62d974777ce9bc2"}, - {file = "multidict-6.2.0-cp311-cp311-win32.whl", hash = "sha256:52b05e21ff05729fbea9bc20b3a791c3c11da61649ff64cce8257c82a020466d"}, - {file = "multidict-6.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:1e2a2193d3aa5cbf5758f6d5680a52aa848e0cf611da324f71e5e48a9695cc86"}, - {file = "multidict-6.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:437c33561edb6eb504b5a30203daf81d4a9b727e167e78b0854d9a4e18e8950b"}, - {file = "multidict-6.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9f49585f4abadd2283034fc605961f40c638635bc60f5162276fec075f2e37a4"}, - {file = "multidict-6.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5dd7106d064d05896ce28c97da3f46caa442fe5a43bc26dfb258e90853b39b44"}, - {file = "multidict-6.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e25b11a0417475f093d0f0809a149aff3943c2c56da50fdf2c3c88d57fe3dfbd"}, - {file = "multidict-6.2.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac380cacdd3b183338ba63a144a34e9044520a6fb30c58aa14077157a033c13e"}, - {file = "multidict-6.2.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:61d5541f27533f803a941d3a3f8a3d10ed48c12cf918f557efcbf3cd04ef265c"}, - {file = "multidict-6.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:facaf11f21f3a4c51b62931feb13310e6fe3475f85e20d9c9fdce0d2ea561b87"}, - {file = "multidict-6.2.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:095a2eabe8c43041d3e6c2cb8287a257b5f1801c2d6ebd1dd877424f1e89cf29"}, - {file = "multidict-6.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a0cc398350ef31167e03f3ca7c19313d4e40a662adcb98a88755e4e861170bdd"}, - {file = "multidict-6.2.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7c611345bbe7cb44aabb877cb94b63e86f2d0db03e382667dbd037866d44b4f8"}, - {file = "multidict-6.2.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8cd1a0644ccaf27e9d2f6d9c9474faabee21f0578fe85225cc5af9a61e1653df"}, - {file = "multidict-6.2.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:89b3857652183b8206a891168af47bac10b970d275bba1f6ee46565a758c078d"}, - {file = "multidict-6.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:125dd82b40f8c06d08d87b3510beaccb88afac94e9ed4a6f6c71362dc7dbb04b"}, - {file = "multidict-6.2.0-cp312-cp312-win32.whl", hash = "sha256:76b34c12b013d813e6cb325e6bd4f9c984db27758b16085926bbe7ceeaace626"}, - {file = "multidict-6.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:0b183a959fb88ad1be201de2c4bdf52fa8e46e6c185d76201286a97b6f5ee65c"}, - {file = "multidict-6.2.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5c5e7d2e300d5cb3b2693b6d60d3e8c8e7dd4ebe27cd17c9cb57020cac0acb80"}, - {file = "multidict-6.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:256d431fe4583c5f1e0f2e9c4d9c22f3a04ae96009b8cfa096da3a8723db0a16"}, - {file = "multidict-6.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a3c0ff89fe40a152e77b191b83282c9664357dce3004032d42e68c514ceff27e"}, - {file = "multidict-6.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef7d48207926edbf8b16b336f779c557dd8f5a33035a85db9c4b0febb0706817"}, - {file = "multidict-6.2.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3c099d3899b14e1ce52262eb82a5f5cb92157bb5106bf627b618c090a0eadc"}, - {file = "multidict-6.2.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e16e7297f29a544f49340012d6fc08cf14de0ab361c9eb7529f6a57a30cbfda1"}, - {file = "multidict-6.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:042028348dc5a1f2be6c666437042a98a5d24cee50380f4c0902215e5ec41844"}, - {file = "multidict-6.2.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:08549895e6a799bd551cf276f6e59820aa084f0f90665c0f03dd3a50db5d3c48"}, - {file = "multidict-6.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4ccfd74957ef53fa7380aaa1c961f523d582cd5e85a620880ffabd407f8202c0"}, - {file = "multidict-6.2.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:83b78c680d4b15d33042d330c2fa31813ca3974197bddb3836a5c635a5fd013f"}, - {file = "multidict-6.2.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b4c153863dd6569f6511845922c53e39c8d61f6e81f228ad5443e690fca403de"}, - {file = "multidict-6.2.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:98aa8325c7f47183b45588af9c434533196e241be0a4e4ae2190b06d17675c02"}, - {file = "multidict-6.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9e658d1373c424457ddf6d55ec1db93c280b8579276bebd1f72f113072df8a5d"}, - {file = "multidict-6.2.0-cp313-cp313-win32.whl", hash = "sha256:3157126b028c074951839233647bd0e30df77ef1fedd801b48bdcad242a60f4e"}, - {file = "multidict-6.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:2e87f1926e91855ae61769ba3e3f7315120788c099677e0842e697b0bfb659f2"}, - {file = "multidict-6.2.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:2529ddbdaa424b2c6c2eb668ea684dd6b75b839d0ad4b21aad60c168269478d7"}, - {file = "multidict-6.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:13551d0e2d7201f0959725a6a769b6f7b9019a168ed96006479c9ac33fe4096b"}, - {file = "multidict-6.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d1996ee1330e245cd3aeda0887b4409e3930524c27642b046e4fae88ffa66c5e"}, - {file = "multidict-6.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c537da54ce4ff7c15e78ab1292e5799d0d43a2108e006578a57f531866f64025"}, - {file = "multidict-6.2.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f249badb360b0b4d694307ad40f811f83df4da8cef7b68e429e4eea939e49dd"}, - {file = "multidict-6.2.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48d39b1824b8d6ea7de878ef6226efbe0773f9c64333e1125e0efcfdd18a24c7"}, - {file = "multidict-6.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b99aac6bb2c37db336fa03a39b40ed4ef2818bf2dfb9441458165ebe88b793af"}, - {file = "multidict-6.2.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07bfa8bc649783e703263f783f73e27fef8cd37baaad4389816cf6a133141331"}, - {file = "multidict-6.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b2c00ad31fbc2cbac85d7d0fcf90853b2ca2e69d825a2d3f3edb842ef1544a2c"}, - {file = "multidict-6.2.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0d57a01a2a9fa00234aace434d8c131f0ac6e0ac6ef131eda5962d7e79edfb5b"}, - {file = "multidict-6.2.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:abf5b17bc0cf626a8a497d89ac691308dbd825d2ac372aa990b1ca114e470151"}, - {file = "multidict-6.2.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:f7716f7e7138252d88607228ce40be22660d6608d20fd365d596e7ca0738e019"}, - {file = "multidict-6.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d5a36953389f35f0a4e88dc796048829a2f467c9197265504593f0e420571547"}, - {file = "multidict-6.2.0-cp313-cp313t-win32.whl", hash = "sha256:e653d36b1bf48fa78c7fcebb5fa679342e025121ace8c87ab05c1cefd33b34fc"}, - {file = "multidict-6.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ca23db5fb195b5ef4fd1f77ce26cadefdf13dba71dab14dadd29b34d457d7c44"}, - {file = "multidict-6.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b4f3d66dd0354b79761481fc15bdafaba0b9d9076f1f42cc9ce10d7fcbda205a"}, - {file = "multidict-6.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e2a2d6749e1ff2c9c76a72c6530d5baa601205b14e441e6d98011000f47a7ac"}, - {file = "multidict-6.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cca83a629f77402cfadd58352e394d79a61c8015f1694b83ab72237ec3941f88"}, - {file = "multidict-6.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:781b5dd1db18c9e9eacc419027b0acb5073bdec9de1675c0be25ceb10e2ad133"}, - {file = "multidict-6.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cf8d370b2fea27fb300825ec3984334f7dd54a581bde6456799ba3776915a656"}, - {file = "multidict-6.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:25bb96338512e2f46f615a2bb7c6012fe92a4a5ebd353e5020836a7e33120349"}, - {file = "multidict-6.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19e2819b0b468174de25c0ceed766606a07cedeab132383f1e83b9a4e96ccb4f"}, - {file = "multidict-6.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6aed763b6a1b28c46c055692836879328f0b334a6d61572ee4113a5d0c859872"}, - {file = "multidict-6.2.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a1133414b771619aa3c3000701c11b2e4624a7f492f12f256aedde97c28331a2"}, - {file = "multidict-6.2.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:639556758c36093b35e2e368ca485dada6afc2bd6a1b1207d85ea6dfc3deab27"}, - {file = "multidict-6.2.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:163f4604e76639f728d127293d24c3e208b445b463168af3d031b92b0998bb90"}, - {file = "multidict-6.2.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2325105e16d434749e1be8022f942876a936f9bece4ec41ae244e3d7fae42aaf"}, - {file = "multidict-6.2.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e4371591e621579cb6da8401e4ea405b33ff25a755874a3567c4075ca63d56e2"}, - {file = "multidict-6.2.0-cp39-cp39-win32.whl", hash = "sha256:d1175b0e0d6037fab207f05774a176d71210ebd40b1c51f480a04b65ec5c786d"}, - {file = "multidict-6.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad81012b24b88aad4c70b2cbc2dad84018783221b7f923e926f4690ff8569da3"}, - {file = "multidict-6.2.0-py3-none-any.whl", hash = "sha256:5d26547423e5e71dcc562c4acdc134b900640a39abd9066d7326a7cc2324c530"}, - {file = "multidict-6.2.0.tar.gz", hash = "sha256:0085b0afb2446e57050140240a8595846ed64d1cbd26cef936bfab3192c673b8"}, + {file = "multidict-6.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:32a998bd8a64ca48616eac5a8c1cc4fa38fb244a3facf2eeb14abe186e0f6cc5"}, + {file = "multidict-6.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a54ec568f1fc7f3c313c2f3b16e5db346bf3660e1309746e7fccbbfded856188"}, + {file = "multidict-6.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a7be07e5df178430621c716a63151165684d3e9958f2bbfcb644246162007ab7"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b128dbf1c939674a50dd0b28f12c244d90e5015e751a4f339a96c54f7275e291"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b9cb19dfd83d35b6ff24a4022376ea6e45a2beba8ef3f0836b8a4b288b6ad685"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3cf62f8e447ea2c1395afa289b332e49e13d07435369b6f4e41f887db65b40bf"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:909f7d43ff8f13d1adccb6a397094adc369d4da794407f8dd592c51cf0eae4b1"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0bb8f8302fbc7122033df959e25777b0b7659b1fd6bcb9cb6bed76b5de67afef"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:224b79471b4f21169ea25ebc37ed6f058040c578e50ade532e2066562597b8a9"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a7bd27f7ab3204f16967a6f899b3e8e9eb3362c0ab91f2ee659e0345445e0078"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:99592bd3162e9c664671fd14e578a33bfdba487ea64bcb41d281286d3c870ad7"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a62d78a1c9072949018cdb05d3c533924ef8ac9bcb06cbf96f6d14772c5cd451"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:3ccdde001578347e877ca4f629450973c510e88e8865d5aefbcb89b852ccc666"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:eccb67b0e78aa2e38a04c5ecc13bab325a43e5159a181a9d1a6723db913cbb3c"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8b6fcf6054fc4114a27aa865f8840ef3d675f9316e81868e0ad5866184a6cba5"}, + {file = "multidict-6.4.3-cp310-cp310-win32.whl", hash = "sha256:f92c7f62d59373cd93bc9969d2da9b4b21f78283b1379ba012f7ee8127b3152e"}, + {file = "multidict-6.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:b57e28dbc031d13916b946719f213c494a517b442d7b48b29443e79610acd887"}, + {file = "multidict-6.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f6f19170197cc29baccd33ccc5b5d6a331058796485857cf34f7635aa25fb0cd"}, + {file = "multidict-6.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f2882bf27037eb687e49591690e5d491e677272964f9ec7bc2abbe09108bdfb8"}, + {file = "multidict-6.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fbf226ac85f7d6b6b9ba77db4ec0704fde88463dc17717aec78ec3c8546c70ad"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e329114f82ad4b9dd291bef614ea8971ec119ecd0f54795109976de75c9a852"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:1f4e0334d7a555c63f5c8952c57ab6f1c7b4f8c7f3442df689fc9f03df315c08"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:740915eb776617b57142ce0bb13b7596933496e2f798d3d15a20614adf30d229"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255dac25134d2b141c944b59a0d2f7211ca12a6d4779f7586a98b4b03ea80508"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4e8535bd4d741039b5aad4285ecd9b902ef9e224711f0b6afda6e38d7ac02c7"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c433a33be000dd968f5750722eaa0991037be0be4a9d453eba121774985bc8"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4eb33b0bdc50acd538f45041f5f19945a1f32b909b76d7b117c0c25d8063df56"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:75482f43465edefd8a5d72724887ccdcd0c83778ded8f0cb1e0594bf71736cc0"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ce5b3082e86aee80b3925ab4928198450d8e5b6466e11501fe03ad2191c6d777"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e413152e3212c4d39f82cf83c6f91be44bec9ddea950ce17af87fbf4e32ca6b2"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:8aac2eeff69b71f229a405c0a4b61b54bade8e10163bc7b44fcd257949620618"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ab583ac203af1d09034be41458feeab7863c0635c650a16f15771e1386abf2d7"}, + {file = "multidict-6.4.3-cp311-cp311-win32.whl", hash = "sha256:1b2019317726f41e81154df636a897de1bfe9228c3724a433894e44cd2512378"}, + {file = "multidict-6.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:43173924fa93c7486402217fab99b60baf78d33806af299c56133a3755f69589"}, + {file = "multidict-6.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1f1c2f58f08b36f8475f3ec6f5aeb95270921d418bf18f90dffd6be5c7b0e676"}, + {file = "multidict-6.4.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:26ae9ad364fc61b936fb7bf4c9d8bd53f3a5b4417142cd0be5c509d6f767e2f1"}, + {file = "multidict-6.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:659318c6c8a85f6ecfc06b4e57529e5a78dfdd697260cc81f683492ad7e9435a"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1eb72c741fd24d5a28242ce72bb61bc91f8451877131fa3fe930edb195f7054"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3cd06d88cb7398252284ee75c8db8e680aa0d321451132d0dba12bc995f0adcc"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4543d8dc6470a82fde92b035a92529317191ce993533c3c0c68f56811164ed07"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:30a3ebdc068c27e9d6081fca0e2c33fdf132ecea703a72ea216b81a66860adde"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b038f10e23f277153f86f95c777ba1958bcd5993194fda26a1d06fae98b2f00c"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c605a2b2dc14282b580454b9b5d14ebe0668381a3a26d0ac39daa0ca115eb2ae"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8bd2b875f4ca2bb527fe23e318ddd509b7df163407b0fb717df229041c6df5d3"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c2e98c840c9c8e65c0e04b40c6c5066c8632678cd50c8721fdbcd2e09f21a507"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:66eb80dd0ab36dbd559635e62fba3083a48a252633164857a1d1684f14326427"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c23831bdee0a2a3cf21be057b5e5326292f60472fb6c6f86392bbf0de70ba731"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:1535cec6443bfd80d028052e9d17ba6ff8a5a3534c51d285ba56c18af97e9713"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3b73e7227681f85d19dec46e5b881827cd354aabe46049e1a61d2f9aaa4e285a"}, + {file = "multidict-6.4.3-cp312-cp312-win32.whl", hash = "sha256:8eac0c49df91b88bf91f818e0a24c1c46f3622978e2c27035bfdca98e0e18124"}, + {file = "multidict-6.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:11990b5c757d956cd1db7cb140be50a63216af32cd6506329c2c59d732d802db"}, + {file = "multidict-6.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a76534263d03ae0cfa721fea40fd2b5b9d17a6f85e98025931d41dc49504474"}, + {file = "multidict-6.4.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:805031c2f599eee62ac579843555ed1ce389ae00c7e9f74c2a1b45e0564a88dd"}, + {file = "multidict-6.4.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c56c179839d5dcf51d565132185409d1d5dd8e614ba501eb79023a6cab25576b"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c64f4ddb3886dd8ab71b68a7431ad4aa01a8fa5be5b11543b29674f29ca0ba3"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3002a856367c0b41cad6784f5b8d3ab008eda194ed7864aaa58f65312e2abcac"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d75e621e7d887d539d6e1d789f0c64271c250276c333480a9e1de089611f790"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:995015cf4a3c0d72cbf453b10a999b92c5629eaf3a0c3e1efb4b5c1f602253bb"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b0fabae7939d09d7d16a711468c385272fa1b9b7fb0d37e51143585d8e72e0"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61ed4d82f8a1e67eb9eb04f8587970d78fe7cddb4e4d6230b77eda23d27938f9"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:062428944a8dc69df9fdc5d5fc6279421e5f9c75a9ee3f586f274ba7b05ab3c8"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:b90e27b4674e6c405ad6c64e515a505c6d113b832df52fdacb6b1ffd1fa9a1d1"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7d50d4abf6729921e9613d98344b74241572b751c6b37feed75fb0c37bd5a817"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:43fe10524fb0a0514be3954be53258e61d87341008ce4914f8e8b92bee6f875d"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:236966ca6c472ea4e2d3f02f6673ebfd36ba3f23159c323f5a496869bc8e47c9"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:422a5ec315018e606473ba1f5431e064cf8b2a7468019233dcf8082fabad64c8"}, + {file = "multidict-6.4.3-cp313-cp313-win32.whl", hash = "sha256:f901a5aace8e8c25d78960dcc24c870c8d356660d3b49b93a78bf38eb682aac3"}, + {file = "multidict-6.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:1c152c49e42277bc9a2f7b78bd5fa10b13e88d1b0328221e7aef89d5c60a99a5"}, + {file = "multidict-6.4.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:be8751869e28b9c0d368d94f5afcb4234db66fe8496144547b4b6d6a0645cfc6"}, + {file = "multidict-6.4.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0d4b31f8a68dccbcd2c0ea04f0e014f1defc6b78f0eb8b35f2265e8716a6df0c"}, + {file = "multidict-6.4.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:032efeab3049e37eef2ff91271884303becc9e54d740b492a93b7e7266e23756"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e78006af1a7c8a8007e4f56629d7252668344442f66982368ac06522445e375"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:daeac9dd30cda8703c417e4fddccd7c4dc0c73421a0b54a7da2713be125846be"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f6f90700881438953eae443a9c6f8a509808bc3b185246992c4233ccee37fea"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f84627997008390dd15762128dcf73c3365f4ec0106739cde6c20a07ed198ec8"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3307b48cd156153b117c0ea54890a3bdbf858a5b296ddd40dc3852e5f16e9b02"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ead46b0fa1dcf5af503a46e9f1c2e80b5d95c6011526352fa5f42ea201526124"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:1748cb2743bedc339d63eb1bca314061568793acd603a6e37b09a326334c9f44"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:acc9fa606f76fc111b4569348cc23a771cb52c61516dcc6bcef46d612edb483b"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:31469d5832b5885adeb70982e531ce86f8c992334edd2f2254a10fa3182ac504"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ba46b51b6e51b4ef7bfb84b82f5db0dc5e300fb222a8a13b8cd4111898a869cf"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:389cfefb599edf3fcfd5f64c0410da686f90f5f5e2c4d84e14f6797a5a337af4"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:64bc2bbc5fba7b9db5c2c8d750824f41c6994e3882e6d73c903c2afa78d091e4"}, + {file = "multidict-6.4.3-cp313-cp313t-win32.whl", hash = "sha256:0ecdc12ea44bab2807d6b4a7e5eef25109ab1c82a8240d86d3c1fc9f3b72efd5"}, + {file = "multidict-6.4.3-cp313-cp313t-win_amd64.whl", hash = "sha256:7146a8742ea71b5d7d955bffcef58a9e6e04efba704b52a460134fefd10a8208"}, + {file = "multidict-6.4.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5427a2679e95a642b7f8b0f761e660c845c8e6fe3141cddd6b62005bd133fc21"}, + {file = "multidict-6.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:24a8caa26521b9ad09732972927d7b45b66453e6ebd91a3c6a46d811eeb7349b"}, + {file = "multidict-6.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6b5a272bc7c36a2cd1b56ddc6bff02e9ce499f9f14ee4a45c45434ef083f2459"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edf74dc5e212b8c75165b435c43eb0d5e81b6b300a938a4eb82827119115e840"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9f35de41aec4b323c71f54b0ca461ebf694fb48bec62f65221f52e0017955b39"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae93e0ff43b6f6892999af64097b18561691ffd835e21a8348a441e256592e1f"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e3929269e9d7eff905d6971d8b8c85e7dbc72c18fb99c8eae6fe0a152f2e343"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb6214fe1750adc2a1b801a199d64b5a67671bf76ebf24c730b157846d0e90d2"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d79cf5c0c6284e90f72123f4a3e4add52d6c6ebb4a9054e88df15b8d08444c6"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2427370f4a255262928cd14533a70d9738dfacadb7563bc3b7f704cc2360fc4e"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:fbd8d737867912b6c5f99f56782b8cb81f978a97b4437a1c476de90a3e41c9a1"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0ee1bf613c448997f73fc4efb4ecebebb1c02268028dd4f11f011f02300cf1e8"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:578568c4ba5f2b8abd956baf8b23790dbfdc953e87d5b110bce343b4a54fc9e7"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:a059ad6b80de5b84b9fa02a39400319e62edd39d210b4e4f8c4f1243bdac4752"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:dd53893675b729a965088aaadd6a1f326a72b83742b056c1065bdd2e2a42b4df"}, + {file = "multidict-6.4.3-cp39-cp39-win32.whl", hash = "sha256:abcfed2c4c139f25c2355e180bcc077a7cae91eefbb8b3927bb3f836c9586f1f"}, + {file = "multidict-6.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:b1b389ae17296dd739015d5ddb222ee99fd66adeae910de21ac950e00979d897"}, + {file = "multidict-6.4.3-py3-none-any.whl", hash = "sha256:59fe01ee8e2a1e8ceb3f6dbb216b09c8d9f4ef1c22c4fc825d045a147fa2ebc9"}, + {file = "multidict-6.4.3.tar.gz", hash = "sha256:3ada0b058c9f213c5f95ba301f922d402ac234f1111a7d8fd70f1b99f3c281ec"}, ] [package.dependencies] @@ -2116,7 +2069,6 @@ version = "1.4.2" description = "CLI for nonebot2" optional = false python-versions = "<4.0,>=3.9" -groups = ["main"] files = [ {file = "nb_cli-1.4.2-py3-none-any.whl", hash = "sha256:8348480a988fb8632130e14925977ad117d4a0c76c971f91ad813f91a7592263"}, {file = "nb_cli-1.4.2.tar.gz", hash = "sha256:1d97b2d51569c7f7c7371744b9ed4b73361bc1853111bde2ddf1e990a1e19fef"}, @@ -2149,7 +2101,6 @@ version = "0.7.7" description = "a complex pattern, support typing" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "nepattern-0.7.7-py3-none-any.whl", hash = "sha256:2d66f964333f42df7971390da4fb98dfed1e8b769236f305c28a83c0bcda849a"}, {file = "nepattern-0.7.7.tar.gz", hash = "sha256:6667f888457e78937998f9412eb70ad16d220464d2d77850dd2b05e9ecfb3207"}, @@ -2170,7 +2121,6 @@ version = "1.9.1" description = "Node.js virtual environment builder" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" -groups = ["dev"] files = [ {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, @@ -2187,7 +2137,6 @@ version = "2.4.6" description = "OneBot(CQHTTP) adapter for nonebot2" optional = false python-versions = ">=3.9,<4.0" -groups = ["main"] files = [ {file = "nonebot_adapter_onebot-2.4.6-py3-none-any.whl", hash = "sha256:b1ec7023fd83d731f63b513217327a57d12893a261944934b9195f79173791ad"}, {file = "nonebot_adapter_onebot-2.4.6.tar.gz", hash = "sha256:e33c93649ad11b320d8e9ff213635f29b23b4d0413c9158bd031c513c2f8f701"}, @@ -2210,7 +2159,6 @@ version = "0.54.2" description = "Alconna Adapter for Nonebot" optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "nonebot_plugin_alconna-0.54.2-py3-none-any.whl", hash = "sha256:ab9a1a5f0f8c9a30ba57a49bed5d3e9c3f761ea5954cbafb15bcd2aa9c7d5507"}, {file = "nonebot_plugin_alconna-0.54.2.tar.gz", hash = "sha256:0216da3bc2e5f8b4c4c44c2701f8f0a536d35ea0db79e708cc2ecd002b57ace6"}, @@ -2236,7 +2184,6 @@ version = "0.5.0" description = "APScheduler Support for NoneBot2" optional = false python-versions = ">=3.9,<4.0" -groups = ["main"] files = [ {file = "nonebot_plugin_apscheduler-0.5.0-py3-none-any.whl", hash = "sha256:8b99b5ee60c4bc195d4df2fd27dab3d6963691e3332f6cee31a06eb4277c307f"}, {file = "nonebot_plugin_apscheduler-0.5.0.tar.gz", hash = "sha256:6c0230e99765f275dc83d6639ff33bd6f71203fa10cd1b8a204b0f95530cda86"}, @@ -2258,7 +2205,6 @@ version = "0.6.3" description = "通过浏览器渲染图片" optional = false python-versions = "<4.0,>=3.9" -groups = ["main"] files = [ {file = "nonebot_plugin_htmlrender-0.6.3-py3-none-any.whl", hash = "sha256:bc9ce830a4652ff1a6501c7e335114921584a9528a8e7f53df2dddee0b2410b5"}, {file = "nonebot_plugin_htmlrender-0.6.3.tar.gz", hash = "sha256:212beb78f776416b0fe5536d799d59c09a39b7d663d3cf815dff5ebcb56cfb45"}, @@ -2285,7 +2231,6 @@ version = "0.2.3" description = "Nonebot2 会话信息提取与会话id定义" optional = false python-versions = ">=3.8,<4.0" -groups = ["main"] files = [ {file = "nonebot_plugin_session-0.2.3-py3-none-any.whl", hash = "sha256:5f652a0c082231c1cea72deb994a81e50f77ba532e14d30fdec09772f69079fd"}, {file = "nonebot_plugin_session-0.2.3.tar.gz", hash = "sha256:33af37400f5005927c4ff861e593774bedc314fba00cfe06f482e582d9f447b7"}, @@ -2306,7 +2251,6 @@ version = "0.7.2" description = "Universal Information Model for Nonebot2" optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "nonebot_plugin_uninfo-0.7.2-py3-none-any.whl", hash = "sha256:0fe133b7a0ab1babe740c8bfe64ad365a60a694f6ef08369f9a79666cd744957"}, {file = "nonebot_plugin_uninfo-0.7.2.tar.gz", hash = "sha256:623cfbf81806d8b0314be0b731b74fb3f16b414b9febb52c60643f7117c414a8"}, @@ -2327,7 +2271,6 @@ version = "0.8.1" description = "An alternative for got-and-reject in Nonebot" optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "nonebot_plugin_waiter-0.8.1-py3-none-any.whl", hash = "sha256:3e1afc8f134496d3a4ecefd9c3a2a98d6ef28a5318268cb22b99a0ef61a44080"}, {file = "nonebot_plugin_waiter-0.8.1.tar.gz", hash = "sha256:5e54213dfea1fd8a1e20dbe6d93b7881f35cbeedf80005148cdc39c1fd2ccc0f"}, @@ -2350,7 +2293,6 @@ version = "2.4.2" description = "An asynchronous python bot framework." optional = false python-versions = ">=3.9,<4.0" -groups = ["main", "dev"] files = [ {file = "nonebot2-2.4.2-py3-none-any.whl", hash = "sha256:ed3e970cdb6c885fb23349b65a045c08cf3ac7f43e28564ae0c72d3671ecda74"}, {file = "nonebot2-2.4.2.tar.gz", hash = "sha256:cf72d5920503ff373ba1d7963f3ddf573db913eb504e3b68ee347efb937db27d"}, @@ -2388,7 +2330,6 @@ version = "0.4.3" description = "nonebot2 test framework" optional = false python-versions = ">=3.9,<4.0" -groups = ["dev"] files = [ {file = "nonebug-0.4.3-py3-none-any.whl", hash = "sha256:eb9b2c8ab3d45459a4f00ebdaae90729e9e9628575c0685fca4c871dd4cfd425"}, {file = "nonebug-0.4.3.tar.gz", hash = "sha256:e9592d2c7a42b76f4a336f98726cba92e1300f6bab155c8822e865919786f10c"}, @@ -2412,7 +2353,6 @@ version = "0.1.9" description = "Prompt toolkit for console interaction" optional = false python-versions = ">=3.8,<4.0" -groups = ["main"] files = [ {file = "noneprompt-0.1.9-py3-none-any.whl", hash = "sha256:a54f1e6a19a3da2dedf7f365f80420e9ae49326a0ffe60a8a9c7afdee6b6eeb3"}, {file = "noneprompt-0.1.9.tar.gz", hash = "sha256:338b8bb89a8d22ef35f1dedb3aa7c1b228cf139973bdc43c5ffc3eef64457db9"}, @@ -2432,7 +2372,6 @@ version = "2.2.4" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.10" -groups = ["main"] files = [ {file = "numpy-2.2.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8146f3550d627252269ac42ae660281d673eb6f8b32f113538e0cc2a9aed42b9"}, {file = "numpy-2.2.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e642d86b8f956098b564a45e6f6ce68a22c2c97a04f5acd3f221f57b8cb850ae"}, @@ -2502,7 +2441,6 @@ version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, @@ -2519,7 +2457,6 @@ version = "10.4.0" description = "Python Imaging Library (Fork)" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "pillow-10.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e"}, {file = "pillow-10.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d"}, @@ -2608,7 +2545,7 @@ docs = ["furo", "olefile", "sphinx (>=7.3)", "sphinx-copybutton", "sphinx-inline fpx = ["olefile"] mic = ["olefile"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] -typing = ["typing-extensions ; python_version < \"3.10\""] +typing = ["typing-extensions"] xmp = ["defusedxml"] [package.source] @@ -2622,7 +2559,6 @@ version = "4.3.7" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.9" -groups = ["main", "dev"] files = [ {file = "platformdirs-4.3.7-py3-none-any.whl", hash = "sha256:a03875334331946f13c549dbd8f4bac7a13a50a895a0eb1e8c6a8ace80d40a94"}, {file = "platformdirs-4.3.7.tar.gz", hash = "sha256:eb437d586b6a0986388f0d6f74aa0cde27b48d0e3d66843640bfb6bdcdb6e351"}, @@ -2644,7 +2580,6 @@ version = "1.51.0" description = "A high-level API to automate web browsers" optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "playwright-1.51.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:bcaaa3d5d73bda659bfb9ff2a288b51e85a91bd89eda86eaf8186550973e416a"}, {file = "playwright-1.51.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:2e0ae6eb44297b24738e1a6d9c580ca4243b4e21b7e65cf936a71492c08dd0d4"}, @@ -2670,7 +2605,6 @@ version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -2691,7 +2625,6 @@ version = "4.2.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "pre_commit-4.2.0-py2.py3-none-any.whl", hash = "sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd"}, {file = "pre_commit-4.2.0.tar.gz", hash = "sha256:601283b9757afd87d40c4c4a9b2b5de9637a8ea02eaff7adc2d0fb4e04841146"}, @@ -2715,7 +2648,6 @@ version = "0.1.7" description = "text preprocess." optional = false python-versions = ">=3.6" -groups = ["main"] files = [ {file = "proces-0.1.7-py3-none-any.whl", hash = "sha256:308325bbc96877263f06e57e5e9c760c4b42cc722887ad60be6b18fc37d68762"}, {file = "proces-0.1.7.tar.gz", hash = "sha256:70a05d9e973dd685f7a9092c58be695a8181a411d63796c213232fd3fdc43775"}, @@ -2732,7 +2664,6 @@ version = "3.0.50" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.8.0" -groups = ["main"] files = [ {file = "prompt_toolkit-3.0.50-py3-none-any.whl", hash = "sha256:9b6427eb19e479d98acff65196a307c555eb567989e6d88ebbb1b509d9779198"}, {file = "prompt_toolkit-3.0.50.tar.gz", hash = "sha256:544748f3860a2623ca5cd6d2795e7a14f3d0e1c3c9728359013f79877fc89bab"}, @@ -2752,7 +2683,6 @@ version = "0.3.1" description = "Accelerated property cache" optional = false python-versions = ">=3.9" -groups = ["main", "dev"] files = [ {file = "propcache-0.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f27785888d2fdd918bc36de8b8739f2d6c791399552333721b58193f68ea3e98"}, {file = "propcache-0.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4e89cde74154c7b5957f87a355bb9c8ec929c167b59c83d90654ea36aeb6180"}, @@ -2865,7 +2795,6 @@ version = "4.25.6" description = "" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "protobuf-4.25.6-cp310-abi3-win32.whl", hash = "sha256:61df6b5786e2b49fc0055f636c1e8f0aff263808bb724b95b164685ac1bcc13a"}, {file = "protobuf-4.25.6-cp310-abi3-win_amd64.whl", hash = "sha256:b8f837bfb77513fe0e2f263250f423217a173b6d85135be4d81e96a4653bcd3c"}, @@ -2891,7 +2820,6 @@ version = "5.9.8" description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -groups = ["main"] files = [ {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, @@ -2912,7 +2840,7 @@ files = [ ] [package.extras] -test = ["enum34 ; python_version <= \"3.4\"", "ipaddress ; python_version < \"3.0\"", "mock ; python_version < \"3.0\"", "pywin32 ; sys_platform == \"win32\"", "wmi ; sys_platform == \"win32\""] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] [package.source] type = "legacy" @@ -2925,7 +2853,6 @@ version = "9.0.0" description = "Get CPU info with pure Python" optional = false python-versions = "*" -groups = ["main"] files = [ {file = "py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690"}, {file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"}, @@ -2942,7 +2869,6 @@ version = "0.4.8" description = "ASN.1 types and codecs" optional = false python-versions = "*" -groups = ["main"] files = [ {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, @@ -2959,8 +2885,6 @@ version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" -groups = ["main"] -markers = "platform_python_implementation != \"PyPy\"" files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, @@ -2973,14 +2897,13 @@ reference = "aliyun" [[package]] name = "pydantic" -version = "2.11.2" +version = "2.11.3" description = "Data validation using Python type hints" optional = false python-versions = ">=3.9" -groups = ["main", "dev"] files = [ - {file = "pydantic-2.11.2-py3-none-any.whl", hash = "sha256:7f17d25846bcdf89b670a86cdfe7b29a9f1c9ca23dee154221c9aa81845cfca7"}, - {file = "pydantic-2.11.2.tar.gz", hash = "sha256:2138628e050bd7a1e70b91d4bf4a91167f4ad76fdb83209b107c8d84b854917e"}, + {file = "pydantic-2.11.3-py3-none-any.whl", hash = "sha256:a082753436a07f9ba1289c6ffa01cd93db3548776088aa917cc43b63f68fa60f"}, + {file = "pydantic-2.11.3.tar.gz", hash = "sha256:7471657138c16adad9322fe3070c0116dd6c3ad8d649300e3cbdfe91f4db4ec3"}, ] [package.dependencies] @@ -2991,7 +2914,7 @@ typing-inspection = ">=0.4.0" [package.extras] email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] +timezone = ["tzdata"] [package.source] type = "legacy" @@ -3004,7 +2927,6 @@ version = "2.33.1" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.9" -groups = ["main", "dev"] files = [ {file = "pydantic_core-2.33.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3077cfdb6125cc8dab61b155fdd714663e401f0e6883f9632118ec12cf42df26"}, {file = "pydantic_core-2.33.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8ffab8b2908d152e74862d276cf5017c81a2f3719f14e8e3e8d6b83fda863927"}, @@ -3121,7 +3043,6 @@ version = "12.1.1" description = "A rough port of Node.js's EventEmitter to Python with a few tricks of its own" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "pyee-12.1.1-py3-none-any.whl", hash = "sha256:18a19c650556bb6b32b406d7f017c8f513aceed1ef7ca618fb65de7bd2d347ef"}, {file = "pyee-12.1.1.tar.gz", hash = "sha256:bbc33c09e2ff827f74191e3e5bbc6be7da02f627b7ec30d86f5ce1a6fb2424a3"}, @@ -3131,7 +3052,7 @@ files = [ typing-extensions = "*" [package.extras] -dev = ["black", "build", "flake8", "flake8-black", "isort", "jupyter-console", "mkdocs", "mkdocs-include-markdown-plugin", "mkdocstrings[python]", "pytest", "pytest-asyncio ; python_version >= \"3.4\"", "pytest-trio ; python_version >= \"3.7\"", "sphinx", "toml", "tox", "trio", "trio ; python_version > \"3.6\"", "trio-typing ; python_version > \"3.6\"", "twine", "twisted", "validate-pyproject[all]"] +dev = ["black", "build", "flake8", "flake8-black", "isort", "jupyter-console", "mkdocs", "mkdocs-include-markdown-plugin", "mkdocstrings[python]", "pytest", "pytest-asyncio", "pytest-trio", "sphinx", "toml", "tox", "trio", "trio", "trio-typing", "twine", "twisted", "validate-pyproject[all]"] [package.source] type = "legacy" @@ -3144,7 +3065,6 @@ version = "1.0.2" description = "Pure-python FIGlet implementation" optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "pyfiglet-1.0.2-py3-none-any.whl", hash = "sha256:889b351d79c99e50a3f619c8f8e6ffdb27fd8c939fc43ecbd7559bd57d5f93ea"}, {file = "pyfiglet-1.0.2.tar.gz", hash = "sha256:758788018ab8faaddc0984e1ea05ff330d3c64be663c513cc1f105f6a3066dab"}, @@ -3161,7 +3081,6 @@ version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, @@ -3181,7 +3100,6 @@ version = "2.5.0" description = "A pure Python trie data structure implementation." optional = false python-versions = "*" -groups = ["main", "dev"] files = [ {file = "pygtrie-2.5.0-py3-none-any.whl", hash = "sha256:8795cda8105493d5ae159a5bef313ff13156c5d4d72feddefacaad59f8c8ce16"}, {file = "pygtrie-2.5.0.tar.gz", hash = "sha256:203514ad826eb403dab1d2e2ddd034e0d1534bbe4dbe0213bb0593f66beba4e2"}, @@ -3198,7 +3116,6 @@ version = "10.14.3" description = "Extension pack for Python Markdown." optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "pymdown_extensions-10.14.3-py3-none-any.whl", hash = "sha256:05e0bee73d64b9c71a4ae17c72abc2f700e8bc8403755a00580b49a4e9f189e9"}, {file = "pymdown_extensions-10.14.3.tar.gz", hash = "sha256:41e576ce3f5d650be59e900e4ceff231e0aed2a88cf30acaee41e02f063a061b"}, @@ -3222,7 +3139,6 @@ version = "0.1.6" description = "Forked from pypika and streamline just for tortoise-orm" optional = false python-versions = ">=3.7,<4.0" -groups = ["main"] files = [ {file = "pypika-tortoise-0.1.6.tar.gz", hash = "sha256:d802868f479a708e3263724c7b5719a26ad79399b2a70cea065f4a4cadbebf36"}, {file = "pypika_tortoise-0.1.6-py3-none-any.whl", hash = "sha256:2d68bbb7e377673743cff42aa1059f3a80228d411fbcae591e4465e173109fd8"}, @@ -3239,7 +3155,6 @@ version = "0.51.0" description = "汉字拼音转换模块/工具." optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, <4" -groups = ["main"] files = [ {file = "pypinyin-0.51.0-py2.py3-none-any.whl", hash = "sha256:ae8878f08fee15d0c5c11053a737e68a4158c22c63dc632b4de060af5c95bf84"}, {file = "pypinyin-0.51.0.tar.gz", hash = "sha256:cede34fc35a79ef6c799f161e2c280e7b6755ee072fb741cae5ce2a60c4ae0c5"}, @@ -3256,7 +3171,6 @@ version = "8.3.5" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, @@ -3284,7 +3198,6 @@ version = "0.25.3" description = "Pytest support for asyncio" optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "pytest_asyncio-0.25.3-py3-none-any.whl", hash = "sha256:9e89518e0f9bd08928f97a3482fdc4e244df17529460bc038291ccaf8f85c7c3"}, {file = "pytest_asyncio-0.25.3.tar.gz", hash = "sha256:fc1da2cf9f125ada7e710b4ddad05518d4cee187ae9412e9ac9271003497f07a"}, @@ -3308,7 +3221,6 @@ version = "5.0.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, @@ -3332,7 +3244,6 @@ version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, @@ -3355,7 +3266,6 @@ version = "3.6.1" description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"}, {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"}, @@ -3381,7 +3291,6 @@ version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -groups = ["main"] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -3401,7 +3310,6 @@ version = "1.1.0" description = "Read key-value pairs from a .env file and set them as environment variables" optional = false python-versions = ">=3.9" -groups = ["main", "dev"] files = [ {file = "python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d"}, {file = "python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5"}, @@ -3421,7 +3329,6 @@ version = "3.4.0" description = "JOSE implementation in Python" optional = false python-versions = "*" -groups = ["main"] files = [ {file = "python-jose-3.4.0.tar.gz", hash = "sha256:9a9a40f418ced8ecaf7e3b28d69887ceaa76adad3bcaa6dae0d9e596fec1d680"}, {file = "python_jose-3.4.0-py2.py3-none-any.whl", hash = "sha256:9c9f616819652d109bd889ecd1e15e9a162b9b94d682534c9c2146092945b78f"}, @@ -3446,14 +3353,13 @@ reference = "aliyun" [[package]] name = "python-markdown-math" -version = "0.8" +version = "0.9" description = "Math extension for Python-Markdown" optional = false -python-versions = ">=3.6" -groups = ["main"] +python-versions = ">=3.9" files = [ - {file = "python-markdown-math-0.8.tar.gz", hash = "sha256:8564212af679fc18d53f38681f16080fcd3d186073f23825c7ce86fadd3e3635"}, - {file = "python_markdown_math-0.8-py3-none-any.whl", hash = "sha256:c685249d84b5b697e9114d7beb352bd8ca2e07fd268fd4057ffca888c14641e5"}, + {file = "python_markdown_math-0.9-py3-none-any.whl", hash = "sha256:ac9932df517a5c0f6d01c56e7a44d065eca4a420893ac45f7a6937c67cb41e86"}, + {file = "python_markdown_math-0.9.tar.gz", hash = "sha256:567395553dc4941e79b3789a1096dcabb3fda9539d150d558ef3507948b264a3"}, ] [package.dependencies] @@ -3470,7 +3376,6 @@ version = "0.0.9" description = "A streaming multipart parser for Python" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "python_multipart-0.0.9-py3-none-any.whl", hash = "sha256:97ca7b8ea7b05f977dc3849c3ba99d51689822fab725c3703af7c866a0c2b215"}, {file = "python_multipart-0.0.9.tar.gz", hash = "sha256:03f54688c663f1b7977105f021043b0793151e4cb1c1a9d4a11fc13d622c4026"}, @@ -3490,7 +3395,6 @@ version = "8.0.4" description = "A Python slugify application that also handles Unicode" optional = false python-versions = ">=3.7" -groups = ["main"] files = [ {file = "python-slugify-8.0.4.tar.gz", hash = "sha256:59202371d1d05b54a9e7720c5e038f928f45daaffe41dd10822f3907b937c856"}, {file = "python_slugify-8.0.4-py2.py3-none-any.whl", hash = "sha256:276540b79961052b66b7d116620b36518847f52d5fd9e3a70164fc8c50faa6b8"}, @@ -3513,7 +3417,6 @@ version = "2025.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" -groups = ["main"] files = [ {file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"}, {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"}, @@ -3530,7 +3433,6 @@ version = "1.8.0" description = "PyWavelets, wavelet transform module" optional = false python-versions = ">=3.10" -groups = ["main"] files = [ {file = "pywavelets-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f5c86fcb203c8e61d1f3d4afbfc08d626c64e4e3708207315577264c724632bf"}, {file = "pywavelets-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fafb5fa126277e1690c3d6329287122fc08e4d25a262ce126e3d81b1f5709308"}, @@ -3589,7 +3491,6 @@ version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" -groups = ["main", "dev"] files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -3651,13 +3552,35 @@ type = "legacy" url = "https://mirrors.aliyun.com/pypi/simple" reference = "aliyun" +[[package]] +name = "redis" +version = "5.2.1" +description = "Python client for Redis database and key-value store" +optional = false +python-versions = ">=3.8" +files = [ + {file = "redis-5.2.1-py3-none-any.whl", hash = "sha256:ee7e1056b9aea0f04c6c2ed59452947f34c4940ee025f5dd83e6a6418b6989e4"}, + {file = "redis-5.2.1.tar.gz", hash = "sha256:16f2e22dff21d5125e8481515e386711a34cbec50f0e44413dd7d9c060a54e0f"}, +] + +[package.dependencies] +async-timeout = {version = ">=4.0.3", markers = "python_full_version < \"3.11.3\""} + +[package.extras] +hiredis = ["hiredis (>=3.0.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==23.2.1)", "requests (>=2.31.0)"] + +[package.source] +type = "legacy" +url = "https://mirrors.aliyun.com/pypi/simple" +reference = "aliyun" + [[package]] name = "regex" version = "2024.11.6" description = "Alternative regular expression module, to replace re." optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"}, {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"}, @@ -3766,7 +3689,6 @@ version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" -groups = ["main", "dev"] files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, @@ -3793,7 +3715,6 @@ version = "0.21.1" description = "A utility for mocking out the Python HTTPX and HTTP Core libraries." optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ {file = "respx-0.21.1-py2.py3-none-any.whl", hash = "sha256:05f45de23f0c785862a2c92a3e173916e8ca88e4caad715dd5f68584d6053c20"}, {file = "respx-0.21.1.tar.gz", hash = "sha256:0bd7fe21bfaa52106caa1223ce61224cf30786985f17c63c5d71eff0307ee8af"}, @@ -3813,7 +3734,6 @@ version = "1.3.4" description = "Retrying" optional = false python-versions = "*" -groups = ["main"] files = [ {file = "retrying-1.3.4-py3-none-any.whl", hash = "sha256:8cc4d43cb8e1125e0ff3344e9de678fefd85db3b750b81b2240dc0183af37b35"}, {file = "retrying-1.3.4.tar.gz", hash = "sha256:345da8c5765bd982b1d1915deb9102fd3d1f7ad16bd84a9700b85f64d24e8f3e"}, @@ -3833,7 +3753,6 @@ version = "1.5.0" description = "Validating URI References per RFC 3986" optional = false python-versions = "*" -groups = ["main", "dev"] files = [ {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, @@ -3856,7 +3775,6 @@ version = "14.0.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.8.0" -groups = ["main"] files = [ {file = "rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0"}, {file = "rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725"}, @@ -3881,7 +3799,6 @@ version = "4.9" description = "Pure-Python RSA implementation" optional = false python-versions = ">=3.6,<4" -groups = ["main"] files = [ {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, @@ -3901,7 +3818,6 @@ version = "0.18.10" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" optional = false python-versions = ">=3.7" -groups = ["main"] files = [ {file = "ruamel.yaml-0.18.10-py3-none-any.whl", hash = "sha256:30f22513ab2301b3d2b577adc121c6471f28734d3d9728581245f1e76468b4f1"}, {file = "ruamel.yaml-0.18.10.tar.gz", hash = "sha256:20c86ab29ac2153f80a428e1254a8adf686d3383df04490514ca3b79a362db58"}, @@ -3925,8 +3841,6 @@ version = "0.2.12" description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" optional = false python-versions = ">=3.9" -groups = ["main"] -markers = "platform_python_implementation == \"CPython\" and python_version < \"3.13\"" files = [ {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:11f891336688faf5156a36293a9c362bdc7c88f03a8a027c2c1d8e0bcde998e5"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:a606ef75a60ecf3d924613892cc603b154178ee25abb3055db5062da811fd969"}, @@ -3934,6 +3848,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd"}, + {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a52d48f4e7bf9005e8f0a89209bf9a73f7190ddf0489eee5eb51377385f59f2a"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl", hash = "sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6"}, @@ -3942,6 +3857,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2"}, + {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1492a6051dab8d912fc2adeef0e8c72216b24d57bd896ea607cb90bb0c4981d3"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl", hash = "sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl", hash = "sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632"}, @@ -3950,6 +3866,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680"}, + {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a"}, @@ -3958,6 +3875,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1"}, + {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4f6f3eac23941b32afccc23081e1f50612bdbe4e982012ef4f5797986828cd01"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl", hash = "sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl", hash = "sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987"}, @@ -3966,6 +3884,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed"}, + {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2c59aa6170b990d8d2719323e628aaf36f3bfbc1c26279c0eeeb24d05d2d11c7"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl", hash = "sha256:beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl", hash = "sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b"}, {file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"}, @@ -3982,7 +3901,6 @@ version = "0.8.6" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ {file = "ruff-0.8.6-py3-none-linux_armv6l.whl", hash = "sha256:defed167955d42c68b407e8f2e6f56ba52520e790aba4ca707a9c88619e580e3"}, {file = "ruff-0.8.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:54799ca3d67ae5e0b7a7ac234baa657a9c1784b48ec954a094da7c206e0365b1"}, @@ -4015,7 +3933,6 @@ version = "1.15.2" description = "Fundamental algorithms for scientific computing in Python" optional = false python-versions = ">=3.10" -groups = ["main"] files = [ {file = "scipy-1.15.2-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:a2ec871edaa863e8213ea5df811cd600734f6400b4af272e1c011e69401218e9"}, {file = "scipy-1.15.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:6f223753c6ea76983af380787611ae1291e3ceb23917393079dcc746ba60cfb5"}, @@ -4071,7 +3988,7 @@ numpy = ">=1.23.5,<2.5" [package.extras] dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodestyle", "pydevtool", "rich-click", "ruff (>=0.0.292)", "types-psutil", "typing_extensions"] doc = ["intersphinx_registry", "jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.16.5)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0,<8.0.0)", "sphinx-copybutton", "sphinx-design (>=0.4.0)"] -test = ["Cython", "array-api-strict (>=2.0,<2.1.1)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja ; sys_platform != \"emscripten\"", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] +test = ["Cython", "array-api-strict (>=2.0,<2.1.1)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] [package.source] type = "legacy" @@ -4084,7 +4001,6 @@ version = "1.0.0" description = "Py3k port of sgmllib." optional = false python-versions = "*" -groups = ["main"] files = [ {file = "sgmllib3k-1.0.0.tar.gz", hash = "sha256:7868fb1c8bfa764c1ac563d3cf369c381d1325d36124933a726f29fcdaa812e9"}, ] @@ -4100,7 +4016,6 @@ version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -groups = ["main"] files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, @@ -4117,7 +4032,6 @@ version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" -groups = ["main", "dev"] files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, @@ -4134,7 +4048,6 @@ version = "2.6" description = "A modern CSS selector implementation for Beautiful Soup." optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, @@ -4151,7 +4064,6 @@ version = "0.46.1" description = "The little ASGI library that shines." optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "starlette-0.46.1-py3-none-any.whl", hash = "sha256:77c74ed9d2720138b25875133f3a2dae6d854af2ec37dceb56aef370c1d8a227"}, {file = "starlette-0.46.1.tar.gz", hash = "sha256:3c88d58ee4bd1bb807c0d1acb381838afc7752f9ddaec81bbe4383611d833230"}, @@ -4174,7 +4086,6 @@ version = "0.4.15" description = "An Enum that inherits from str." optional = false python-versions = "*" -groups = ["main"] files = [ {file = "StrEnum-0.4.15-py3-none-any.whl", hash = "sha256:a30cda4af7cc6b5bf52c8055bc4bf4b2b6b14a93b574626da33df53cf7740659"}, {file = "StrEnum-0.4.15.tar.gz", hash = "sha256:878fb5ab705442070e4dd1929bb5e2249511c0bcf2b0eeacf3bcd80875c82eff"}, @@ -4196,7 +4107,6 @@ version = "0.6.8" description = "A collection of common utils for Arclet" optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "tarina-0.6.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a2f7b7e61912a020d6ba3c591c4edbc31bb468544640bd814470c69a07dcc4cd"}, {file = "tarina-0.6.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1cac7cbd49317b8e63eba7d0ce0ba11e1218ab51c9d6ee9df8404b5e226db15b"}, @@ -4294,7 +4204,6 @@ version = "9.1.2" description = "Retry code until it succeeds" optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138"}, {file = "tenacity-9.1.2.tar.gz", hash = "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb"}, @@ -4315,7 +4224,6 @@ version = "1.3" description = "The most basic Text::Unidecode port" optional = false python-versions = "*" -groups = ["main"] files = [ {file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"}, {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, @@ -4332,7 +4240,6 @@ version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" -groups = ["main", "dev"] files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, @@ -4367,7 +4274,6 @@ files = [ {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] -markers = {main = "python_version == \"3.10\"", dev = "python_full_version <= \"3.11.0a6\""} [package.source] type = "legacy" @@ -4380,7 +4286,6 @@ version = "0.13.2" description = "Style preserving TOML library" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, @@ -4397,7 +4302,6 @@ version = "0.20.1" description = "Easy async ORM for python, built with relations in mind" optional = false python-versions = ">=3.8,<4.0" -groups = ["main"] files = [ {file = "tortoise_orm-0.20.1-py3-none-any.whl", hash = "sha256:bf88bc1ba7495a8827565c071efba0a89c4b5f83ff1c16be3c837a4e6b672c21"}, {file = "tortoise_orm-0.20.1.tar.gz", hash = "sha256:c896c90a90d1213b822ac0d607b61659ad5fcd5ff72698a8ba2d9efbad9932f3"}, @@ -4405,14 +4309,13 @@ files = [ [package.dependencies] aiosqlite = ">=0.16.0,<0.18.0" -asyncpg = {version = "*", optional = true, markers = "extra == \"asyncpg\""} iso8601 = ">=1.0.2,<2.0.0" pydantic = ">=2.0,<2.7.0 || >2.7.0,<3.0" pypika-tortoise = ">=0.1.6,<0.2.0" pytz = "*" [package.extras] -accel = ["ciso8601 ; sys_platform != \"win32\" and implementation_name == \"cpython\"", "orjson", "uvloop ; sys_platform != \"win32\" and implementation_name == \"cpython\""] +accel = ["ciso8601", "orjson", "uvloop"] aiomysql = ["aiomysql"] asyncmy = ["asyncmy (>=0.2.8,<0.3.0)"] asyncodbc = ["asyncodbc (>=0.1.1,<0.2.0)"] @@ -4430,7 +4333,6 @@ version = "2.9.0.20241206" description = "Typing stubs for python-dateutil" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "types_python_dateutil-2.9.0.20241206-py3-none-any.whl", hash = "sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53"}, {file = "types_python_dateutil-2.9.0.20241206.tar.gz", hash = "sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb"}, @@ -4443,14 +4345,13 @@ reference = "aliyun" [[package]] name = "typing-extensions" -version = "4.13.1" +version = "4.13.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" -groups = ["main", "dev"] files = [ - {file = "typing_extensions-4.13.1-py3-none-any.whl", hash = "sha256:4b6cf02909eb5495cfbc3f6e8fd49217e6cc7944e145cdda8caa3734777f9e69"}, - {file = "typing_extensions-4.13.1.tar.gz", hash = "sha256:98795af00fb9640edec5b8e31fc647597b4691f099ad75f469a2616be1a76dff"}, + {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, + {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, ] [package.source] @@ -4464,7 +4365,6 @@ version = "0.4.0" description = "Runtime typing introspection tools" optional = false python-versions = ">=3.9" -groups = ["main", "dev"] files = [ {file = "typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f"}, {file = "typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122"}, @@ -4484,8 +4384,6 @@ version = "2025.2" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" -groups = ["main"] -markers = "platform_system == \"Windows\"" files = [ {file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"}, {file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"}, @@ -4502,7 +4400,6 @@ version = "5.3.1" description = "tzinfo object for the local timezone" optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "tzlocal-5.3.1-py3-none-any.whl", hash = "sha256:eb1a66c3ef5847adf7a834f1be0800581b683b5608e74f86ecbcef8ab91bb85d"}, {file = "tzlocal-5.3.1.tar.gz", hash = "sha256:cceffc7edecefea1f595541dbd6e990cb1ea3d19bf01b2809f362a03dd7921fd"}, @@ -4525,7 +4422,6 @@ version = "5.10.0" description = "Ultra fast JSON encoder and decoder for Python" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "ujson-5.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2601aa9ecdbee1118a1c2065323bda35e2c5a2cf0797ef4522d485f9d3ef65bd"}, {file = "ujson-5.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:348898dd702fc1c4f1051bc3aacbf894caa0927fe2c53e68679c073375f732cf"}, @@ -4614,18 +4510,17 @@ reference = "aliyun" [[package]] name = "urllib3" -version = "2.3.0" +version = "2.4.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" -groups = ["main", "dev"] files = [ - {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, - {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, + {file = "urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813"}, + {file = "urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466"}, ] [package.extras] -brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -4641,7 +4536,6 @@ version = "0.34.0" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4"}, {file = "uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9"}, @@ -4655,12 +4549,12 @@ httptools = {version = ">=0.6.3", optional = true, markers = "extra == \"standar python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} -uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\" and extra == \"standard\""} +uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} [package.extras] -standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"] +standard = ["colorama (>=0.4)", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] [package.source] type = "legacy" @@ -4673,8 +4567,6 @@ version = "0.21.0" description = "Fast implementation of asyncio event loop on top of libuv" optional = false python-versions = ">=3.8.0" -groups = ["main"] -markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"" files = [ {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"}, {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"}, @@ -4731,7 +4623,6 @@ version = "20.30.0" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.8" -groups = ["main", "dev"] files = [ {file = "virtualenv-20.30.0-py3-none-any.whl", hash = "sha256:e34302959180fca3af42d1800df014b35019490b119eba981af27f2fa486e5d6"}, {file = "virtualenv-20.30.0.tar.gz", hash = "sha256:800863162bcaa5450a6e4d721049730e7f2dae07720e0902b0e4040bd6f9ada8"}, @@ -4744,7 +4635,7 @@ platformdirs = ">=3.9.1,<5" [package.extras] docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\" or platform_python_implementation == \"GraalVM\" or platform_python_implementation == \"CPython\" and sys_platform == \"win32\" and python_version >= \"3.13\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] [package.source] type = "legacy" @@ -4757,7 +4648,6 @@ version = "0.24.0" description = "Simple, modern and high performance file watching and code reload in python." optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "watchfiles-0.24.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:083dc77dbdeef09fa44bb0f4d1df571d2e12d8a8f985dccde71ac3ac9ac067a0"}, {file = "watchfiles-0.24.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e94e98c7cb94cfa6e071d401ea3342767f28eb5a06a58fafdc0d2a4974f4f35c"}, @@ -4858,7 +4748,6 @@ version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" -groups = ["main"] files = [ {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, @@ -4875,7 +4764,6 @@ version = "15.0.1" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d63efaa0cd96cf0c5fe4d581521d9fa87744540d4bc999ae6e08595a1014b45b"}, {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac60e3b188ec7574cb761b08d50fcedf9d77f1530352db4eef1707fe9dee7205"}, @@ -4959,15 +4847,13 @@ version = "1.2.0" description = "A small Python utility to set file creation time on Windows" optional = false python-versions = ">=3.5" -groups = ["main", "dev"] -markers = "sys_platform == \"win32\"" files = [ {file = "win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390"}, {file = "win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0"}, ] [package.extras] -dev = ["black (>=19.3b0) ; python_version >= \"3.6\"", "pytest (>=4.6.2)"] +dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [package.source] type = "legacy" @@ -4976,100 +4862,104 @@ reference = "aliyun" [[package]] name = "yarl" -version = "1.18.3" +version = "1.19.0" description = "Yet another URL library" optional = false python-versions = ">=3.9" -groups = ["main", "dev"] files = [ - {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34"}, - {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7"}, - {file = "yarl-1.18.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:602d98f2c2d929f8e697ed274fbadc09902c4025c5a9963bf4e9edfc3ab6f7ed"}, - {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c654d5207c78e0bd6d749f6dae1dcbbfde3403ad3a4b11f3c5544d9906969dde"}, - {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5094d9206c64181d0f6e76ebd8fb2f8fe274950a63890ee9e0ebfd58bf9d787b"}, - {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35098b24e0327fc4ebdc8ffe336cee0a87a700c24ffed13161af80124b7dc8e5"}, - {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3236da9272872443f81fedc389bace88408f64f89f75d1bdb2256069a8730ccc"}, - {file = "yarl-1.18.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2c08cc9b16f4f4bc522771d96734c7901e7ebef70c6c5c35dd0f10845270bcd"}, - {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80316a8bd5109320d38eef8833ccf5f89608c9107d02d2a7f985f98ed6876990"}, - {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c1e1cc06da1491e6734f0ea1e6294ce00792193c463350626571c287c9a704db"}, - {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fea09ca13323376a2fdfb353a5fa2e59f90cd18d7ca4eaa1fd31f0a8b4f91e62"}, - {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e3b9fd71836999aad54084906f8663dffcd2a7fb5cdafd6c37713b2e72be1760"}, - {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:757e81cae69244257d125ff31663249b3013b5dc0a8520d73694aed497fb195b"}, - {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b1771de9944d875f1b98a745bc547e684b863abf8f8287da8466cf470ef52690"}, - {file = "yarl-1.18.3-cp310-cp310-win32.whl", hash = "sha256:8874027a53e3aea659a6d62751800cf6e63314c160fd607489ba5c2edd753cf6"}, - {file = "yarl-1.18.3-cp310-cp310-win_amd64.whl", hash = "sha256:93b2e109287f93db79210f86deb6b9bbb81ac32fc97236b16f7433db7fc437d8"}, - {file = "yarl-1.18.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8503ad47387b8ebd39cbbbdf0bf113e17330ffd339ba1144074da24c545f0069"}, - {file = "yarl-1.18.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:02ddb6756f8f4517a2d5e99d8b2f272488e18dd0bfbc802f31c16c6c20f22193"}, - {file = "yarl-1.18.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:67a283dd2882ac98cc6318384f565bffc751ab564605959df4752d42483ad889"}, - {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d980e0325b6eddc81331d3f4551e2a333999fb176fd153e075c6d1c2530aa8a8"}, - {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b643562c12680b01e17239be267bc306bbc6aac1f34f6444d1bded0c5ce438ca"}, - {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c017a3b6df3a1bd45b9fa49a0f54005e53fbcad16633870104b66fa1a30a29d8"}, - {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75674776d96d7b851b6498f17824ba17849d790a44d282929c42dbb77d4f17ae"}, - {file = "yarl-1.18.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ccaa3a4b521b780a7e771cc336a2dba389a0861592bbce09a476190bb0c8b4b3"}, - {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2d06d3005e668744e11ed80812e61efd77d70bb7f03e33c1598c301eea20efbb"}, - {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:9d41beda9dc97ca9ab0b9888cb71f7539124bc05df02c0cff6e5acc5a19dcc6e"}, - {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ba23302c0c61a9999784e73809427c9dbedd79f66a13d84ad1b1943802eaaf59"}, - {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6748dbf9bfa5ba1afcc7556b71cda0d7ce5f24768043a02a58846e4a443d808d"}, - {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0b0cad37311123211dc91eadcb322ef4d4a66008d3e1bdc404808992260e1a0e"}, - {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0fb2171a4486bb075316ee754c6d8382ea6eb8b399d4ec62fde2b591f879778a"}, - {file = "yarl-1.18.3-cp311-cp311-win32.whl", hash = "sha256:61b1a825a13bef4a5f10b1885245377d3cd0bf87cba068e1d9a88c2ae36880e1"}, - {file = "yarl-1.18.3-cp311-cp311-win_amd64.whl", hash = "sha256:b9d60031cf568c627d028239693fd718025719c02c9f55df0a53e587aab951b5"}, - {file = "yarl-1.18.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1dd4bdd05407ced96fed3d7f25dbbf88d2ffb045a0db60dbc247f5b3c5c25d50"}, - {file = "yarl-1.18.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7c33dd1931a95e5d9a772d0ac5e44cac8957eaf58e3c8da8c1414de7dd27c576"}, - {file = "yarl-1.18.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b411eddcfd56a2f0cd6a384e9f4f7aa3efee14b188de13048c25b5e91f1640"}, - {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:436c4fc0a4d66b2badc6c5fc5ef4e47bb10e4fd9bf0c79524ac719a01f3607c2"}, - {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e35ef8683211db69ffe129a25d5634319a677570ab6b2eba4afa860f54eeaf75"}, - {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84b2deecba4a3f1a398df819151eb72d29bfeb3b69abb145a00ddc8d30094512"}, - {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba"}, - {file = "yarl-1.18.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0e883008013c0e4aef84dcfe2a0b172c4d23c2669412cf5b3371003941f72bb"}, - {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5a3f356548e34a70b0172d8890006c37be92995f62d95a07b4a42e90fba54272"}, - {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ccd17349166b1bee6e529b4add61727d3f55edb7babbe4069b5764c9587a8cc6"}, - {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b958ddd075ddba5b09bb0be8a6d9906d2ce933aee81100db289badbeb966f54e"}, - {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c7d79f7d9aabd6011004e33b22bc13056a3e3fb54794d138af57f5ee9d9032cb"}, - {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4891ed92157e5430874dad17b15eb1fda57627710756c27422200c52d8a4e393"}, - {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ce1af883b94304f493698b00d0f006d56aea98aeb49d75ec7d98cd4a777e9285"}, - {file = "yarl-1.18.3-cp312-cp312-win32.whl", hash = "sha256:f91c4803173928a25e1a55b943c81f55b8872f0018be83e3ad4938adffb77dd2"}, - {file = "yarl-1.18.3-cp312-cp312-win_amd64.whl", hash = "sha256:7e2ee16578af3b52ac2f334c3b1f92262f47e02cc6193c598502bd46f5cd1477"}, - {file = "yarl-1.18.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:90adb47ad432332d4f0bc28f83a5963f426ce9a1a8809f5e584e704b82685dcb"}, - {file = "yarl-1.18.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:913829534200eb0f789d45349e55203a091f45c37a2674678744ae52fae23efa"}, - {file = "yarl-1.18.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ef9f7768395923c3039055c14334ba4d926f3baf7b776c923c93d80195624782"}, - {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88a19f62ff30117e706ebc9090b8ecc79aeb77d0b1f5ec10d2d27a12bc9f66d0"}, - {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e17c9361d46a4d5addf777c6dd5eab0715a7684c2f11b88c67ac37edfba6c482"}, - {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a74a13a4c857a84a845505fd2d68e54826a2cd01935a96efb1e9d86c728e186"}, - {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41f7ce59d6ee7741af71d82020346af364949314ed3d87553763a2df1829cc58"}, - {file = "yarl-1.18.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f52a265001d830bc425f82ca9eabda94a64a4d753b07d623a9f2863fde532b53"}, - {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:82123d0c954dc58db301f5021a01854a85bf1f3bb7d12ae0c01afc414a882ca2"}, - {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2ec9bbba33b2d00999af4631a3397d1fd78290c48e2a3e52d8dd72db3a067ac8"}, - {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fbd6748e8ab9b41171bb95c6142faf068f5ef1511935a0aa07025438dd9a9bc1"}, - {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:877d209b6aebeb5b16c42cbb377f5f94d9e556626b1bfff66d7b0d115be88d0a"}, - {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b464c4ab4bfcb41e3bfd3f1c26600d038376c2de3297760dfe064d2cb7ea8e10"}, - {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8d39d351e7faf01483cc7ff7c0213c412e38e5a340238826be7e0e4da450fdc8"}, - {file = "yarl-1.18.3-cp313-cp313-win32.whl", hash = "sha256:61ee62ead9b68b9123ec24bc866cbef297dd266175d53296e2db5e7f797f902d"}, - {file = "yarl-1.18.3-cp313-cp313-win_amd64.whl", hash = "sha256:578e281c393af575879990861823ef19d66e2b1d0098414855dd367e234f5b3c"}, - {file = "yarl-1.18.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:61e5e68cb65ac8f547f6b5ef933f510134a6bf31bb178be428994b0cb46c2a04"}, - {file = "yarl-1.18.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe57328fbc1bfd0bd0514470ac692630f3901c0ee39052ae47acd1d90a436719"}, - {file = "yarl-1.18.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a440a2a624683108a1b454705ecd7afc1c3438a08e890a1513d468671d90a04e"}, - {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09c7907c8548bcd6ab860e5f513e727c53b4a714f459b084f6580b49fa1b9cee"}, - {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4f6450109834af88cb4cc5ecddfc5380ebb9c228695afc11915a0bf82116789"}, - {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9ca04806f3be0ac6d558fffc2fdf8fcef767e0489d2684a21912cc4ed0cd1b8"}, - {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77a6e85b90a7641d2e07184df5557132a337f136250caafc9ccaa4a2a998ca2c"}, - {file = "yarl-1.18.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6333c5a377c8e2f5fae35e7b8f145c617b02c939d04110c76f29ee3676b5f9a5"}, - {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0b3c92fa08759dbf12b3a59579a4096ba9af8dd344d9a813fc7f5070d86bbab1"}, - {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:4ac515b860c36becb81bb84b667466885096b5fc85596948548b667da3bf9f24"}, - {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:045b8482ce9483ada4f3f23b3774f4e1bf4f23a2d5c912ed5170f68efb053318"}, - {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:a4bb030cf46a434ec0225bddbebd4b89e6471814ca851abb8696170adb163985"}, - {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:54d6921f07555713b9300bee9c50fb46e57e2e639027089b1d795ecd9f7fa910"}, - {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1d407181cfa6e70077df3377938c08012d18893f9f20e92f7d2f314a437c30b1"}, - {file = "yarl-1.18.3-cp39-cp39-win32.whl", hash = "sha256:ac36703a585e0929b032fbaab0707b75dc12703766d0b53486eabd5139ebadd5"}, - {file = "yarl-1.18.3-cp39-cp39-win_amd64.whl", hash = "sha256:ba87babd629f8af77f557b61e49e7c7cac36f22f871156b91e10a6e9d4f829e9"}, - {file = "yarl-1.18.3-py3-none-any.whl", hash = "sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b"}, - {file = "yarl-1.18.3.tar.gz", hash = "sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1"}, + {file = "yarl-1.19.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0bae32f8ebd35c04d6528cedb4a26b8bf25339d3616b04613b97347f919b76d3"}, + {file = "yarl-1.19.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8015a076daf77823e7ebdcba474156587391dab4e70c732822960368c01251e6"}, + {file = "yarl-1.19.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9973ac95327f5d699eb620286c39365990b240031672b5c436a4cd00539596c5"}, + {file = "yarl-1.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd4b5fbd7b9dde785cfeb486b8cca211a0b138d4f3a7da27db89a25b3c482e5c"}, + {file = "yarl-1.19.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:75460740005de5a912b19f657848aef419387426a40f581b1dc9fac0eb9addb5"}, + {file = "yarl-1.19.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:57abd66ca913f2cfbb51eb3dbbbac3648f1f6983f614a4446e0802e241441d2a"}, + {file = "yarl-1.19.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:46ade37911b7c99ce28a959147cb28bffbd14cea9e7dd91021e06a8d2359a5aa"}, + {file = "yarl-1.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8346ec72ada749a6b5d82bff7be72578eab056ad7ec38c04f668a685abde6af0"}, + {file = "yarl-1.19.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e4cb14a6ee5b6649ccf1c6d648b4da9220e8277d4d4380593c03cc08d8fe937"}, + {file = "yarl-1.19.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:66fc1c2926a73a2fb46e4b92e3a6c03904d9bc3a0b65e01cb7d2b84146a8bd3b"}, + {file = "yarl-1.19.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:5a70201dd1e0a4304849b6445a9891d7210604c27e67da59091d5412bc19e51c"}, + {file = "yarl-1.19.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e4807aab1bdeab6ae6f296be46337a260ae4b1f3a8c2fcd373e236b4b2b46efd"}, + {file = "yarl-1.19.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ae584afe81a1de4c1bb06672481050f0d001cad13163e3c019477409f638f9b7"}, + {file = "yarl-1.19.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:30eaf4459df6e91f21b2999d1ee18f891bcd51e3cbe1de301b4858c84385895b"}, + {file = "yarl-1.19.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0e617d45d03c8dec0dfce6f51f3e1b8a31aa81aaf4a4d1442fdb232bcf0c6d8c"}, + {file = "yarl-1.19.0-cp310-cp310-win32.whl", hash = "sha256:32ba32d0fa23893fd8ea8d05bdb05de6eb19d7f2106787024fd969f4ba5466cb"}, + {file = "yarl-1.19.0-cp310-cp310-win_amd64.whl", hash = "sha256:545575ecfcd465891b51546c2bcafdde0acd2c62c2097d8d71902050b20e4922"}, + {file = "yarl-1.19.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:163ff326680de5f6d4966954cf9e3fe1bf980f5fee2255e46e89b8cf0f3418b5"}, + {file = "yarl-1.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a626c4d9cca298d1be8625cff4b17004a9066330ac82d132bbda64a4c17c18d3"}, + {file = "yarl-1.19.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:961c3e401ea7f13d02b8bb7cb0c709152a632a6e14cdc8119e9c6ee5596cd45d"}, + {file = "yarl-1.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a39d7b807ab58e633ed760f80195cbd145b58ba265436af35f9080f1810dfe64"}, + {file = "yarl-1.19.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c4228978fb59c6b10f60124ba8e311c26151e176df364e996f3f8ff8b93971b5"}, + {file = "yarl-1.19.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ba536b17ecf3c74a94239ec1137a3ad3caea8c0e4deb8c8d2ffe847d870a8c5"}, + {file = "yarl-1.19.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a251e00e445d2e9df7b827c9843c0b87f58a3254aaa3f162fb610747491fe00f"}, + {file = "yarl-1.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9b92431d8b4d4ca5ccbfdbac95b05a3a6cd70cd73aa62f32f9627acfde7549c"}, + {file = "yarl-1.19.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec2f56edaf476f70b5831bbd59700b53d9dd011b1f77cd4846b5ab5c5eafdb3f"}, + {file = "yarl-1.19.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:acf9b92c4245ac8b59bc7ec66a38d3dcb8d1f97fac934672529562bb824ecadb"}, + {file = "yarl-1.19.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:57711f1465c06fee8825b95c0b83e82991e6d9425f9a042c3c19070a70ac92bf"}, + {file = "yarl-1.19.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:528e86f5b1de0ad8dd758ddef4e0ed24f5d946d4a1cef80ffb2d4fca4e10f122"}, + {file = "yarl-1.19.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:3b77173663e075d9e5a57e09d711e9da2f3266be729ecca0b8ae78190990d260"}, + {file = "yarl-1.19.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:d8717924cf0a825b62b1a96fc7d28aab7f55a81bf5338b8ef41d7a76ab9223e9"}, + {file = "yarl-1.19.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0df9f0221a78d858793f40cbea3915c29f969c11366646a92ca47e080a14f881"}, + {file = "yarl-1.19.0-cp311-cp311-win32.whl", hash = "sha256:8b3ade62678ee2c7c10dcd6be19045135e9badad53108f7d2ed14896ee396045"}, + {file = "yarl-1.19.0-cp311-cp311-win_amd64.whl", hash = "sha256:0626ee31edb23ac36bdffe607231de2cca055ad3a5e2dc5da587ef8bc6a321bc"}, + {file = "yarl-1.19.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:7b687c334da3ff8eab848c9620c47a253d005e78335e9ce0d6868ed7e8fd170b"}, + {file = "yarl-1.19.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b0fe766febcf523a2930b819c87bb92407ae1368662c1bc267234e79b20ff894"}, + {file = "yarl-1.19.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:742ceffd3c7beeb2b20d47cdb92c513eef83c9ef88c46829f88d5b06be6734ee"}, + {file = "yarl-1.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2af682a1e97437382ee0791eacbf540318bd487a942e068e7e0a6c571fadbbd3"}, + {file = "yarl-1.19.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:63702f1a098d0eaaea755e9c9d63172be1acb9e2d4aeb28b187092bcc9ca2d17"}, + {file = "yarl-1.19.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3560dcba3c71ae7382975dc1e912ee76e50b4cd7c34b454ed620d55464f11876"}, + {file = "yarl-1.19.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:68972df6a0cc47c8abaf77525a76ee5c5f6ea9bbdb79b9565b3234ded3c5e675"}, + {file = "yarl-1.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5684e7ff93ea74e47542232bd132f608df4d449f8968fde6b05aaf9e08a140f9"}, + {file = "yarl-1.19.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8182ad422bfacdebd4759ce3adc6055c0c79d4740aea1104e05652a81cd868c6"}, + {file = "yarl-1.19.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aee5b90a5a9b71ac57400a7bdd0feaa27c51e8f961decc8d412e720a004a1791"}, + {file = "yarl-1.19.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:8c0b2371858d5a814b08542d5d548adb03ff2d7ab32f23160e54e92250961a72"}, + {file = "yarl-1.19.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cd430c2b7df4ae92498da09e9b12cad5bdbb140d22d138f9e507de1aa3edfea3"}, + {file = "yarl-1.19.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a93208282c0ccdf73065fd76c6c129bd428dba5ff65d338ae7d2ab27169861a0"}, + {file = "yarl-1.19.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:b8179280cdeb4c36eb18d6534a328f9d40da60d2b96ac4a295c5f93e2799e9d9"}, + {file = "yarl-1.19.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eda3c2b42dc0c389b7cfda2c4df81c12eeb552019e0de28bde8f913fc3d1fcf3"}, + {file = "yarl-1.19.0-cp312-cp312-win32.whl", hash = "sha256:57f3fed859af367b9ca316ecc05ce79ce327d6466342734305aa5cc380e4d8be"}, + {file = "yarl-1.19.0-cp312-cp312-win_amd64.whl", hash = "sha256:5507c1f7dd3d41251b67eecba331c8b2157cfd324849879bebf74676ce76aff7"}, + {file = "yarl-1.19.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:59281b9ed27bc410e0793833bcbe7fc149739d56ffa071d1e0fe70536a4f7b61"}, + {file = "yarl-1.19.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d27a6482ad5e05e8bafd47bf42866f8a1c0c3345abcb48d4511b3c29ecc197dc"}, + {file = "yarl-1.19.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7a8e19fd5a6fdf19a91f2409665c7a089ffe7b9b5394ab33c0eec04cbecdd01f"}, + {file = "yarl-1.19.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cda34ab19099c3a1685ad48fe45172536610c312b993310b5f1ca3eb83453b36"}, + {file = "yarl-1.19.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7908a25d33f94852b479910f9cae6cdb9e2a509894e8d5f416c8342c0253c397"}, + {file = "yarl-1.19.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e66c14d162bac94973e767b24de5d7e6c5153f7305a64ff4fcba701210bcd638"}, + {file = "yarl-1.19.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c03607bf932aa4cfae371e2dc9ca8b76faf031f106dac6a6ff1458418140c165"}, + {file = "yarl-1.19.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9931343d1c1f4e77421687b6b94bbebd8a15a64ab8279adf6fbb047eff47e536"}, + {file = "yarl-1.19.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:262087a8a0d73e1d169d45c2baf968126f93c97cf403e1af23a7d5455d52721f"}, + {file = "yarl-1.19.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:70f384921c24e703d249a6ccdabeb57dd6312b568b504c69e428a8dd3e8e68ca"}, + {file = "yarl-1.19.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:756b9ea5292a2c180d1fe782a377bc4159b3cfefaca7e41b5b0a00328ef62fa9"}, + {file = "yarl-1.19.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cbeb9c145d534c240a63b6ecc8a8dd451faeb67b3dc61d729ec197bb93e29497"}, + {file = "yarl-1.19.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:087ae8f8319848c18e0d114d0f56131a9c017f29200ab1413b0137ad7c83e2ae"}, + {file = "yarl-1.19.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362f5480ba527b6c26ff58cff1f229afe8b7fdd54ee5ffac2ab827c1a75fc71c"}, + {file = "yarl-1.19.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f408d4b4315e814e5c3668094e33d885f13c7809cbe831cbdc5b1bb8c7a448f4"}, + {file = "yarl-1.19.0-cp313-cp313-win32.whl", hash = "sha256:24e4c367ad69988a2283dd45ea88172561ca24b2326b9781e164eb46eea68345"}, + {file = "yarl-1.19.0-cp313-cp313-win_amd64.whl", hash = "sha256:0110f91c57ab43d1538dfa92d61c45e33b84df9257bd08fcfcda90cce931cbc9"}, + {file = "yarl-1.19.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:85ac908cd5a97bbd3048cca9f1bf37b932ea26c3885099444f34b0bf5d5e9fa6"}, + {file = "yarl-1.19.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6ba0931b559f1345df48a78521c31cfe356585670e8be22af84a33a39f7b9221"}, + {file = "yarl-1.19.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5bc503e1c1fee1b86bcb58db67c032957a52cae39fe8ddd95441f414ffbab83e"}, + {file = "yarl-1.19.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d995122dcaf180fd4830a9aa425abddab7c0246107c21ecca2fa085611fa7ce9"}, + {file = "yarl-1.19.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:217f69e60a14da4eed454a030ea8283f8fbd01a7d6d81e57efb865856822489b"}, + {file = "yarl-1.19.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aad67c8f13a4b79990082f72ef09c078a77de2b39899aabf3960a48069704973"}, + {file = "yarl-1.19.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dff065a1a8ed051d7e641369ba1ad030d5a707afac54cf4ede7069b959898835"}, + {file = "yarl-1.19.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ada882e26b16ee651ab6544ce956f2f4beaed38261238f67c2a96db748e17741"}, + {file = "yarl-1.19.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:67a56b1acc7093451ea2de0687aa3bd4e58d6b4ef6cbeeaad137b45203deaade"}, + {file = "yarl-1.19.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e97d2f0a06b39e231e59ebab0e6eec45c7683b339e8262299ac952707bdf7688"}, + {file = "yarl-1.19.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:a5288adb7c59d0f54e4ad58d86fb06d4b26e08a59ed06d00a1aac978c0e32884"}, + {file = "yarl-1.19.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1efbf4d03e6eddf5da27752e0b67a8e70599053436e9344d0969532baa99df53"}, + {file = "yarl-1.19.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:f228f42f29cc87db67020f7d71624102b2c837686e55317b16e1d3ef2747a993"}, + {file = "yarl-1.19.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c515f7dd60ca724e4c62b34aeaa603188964abed2eb66bb8e220f7f104d5a187"}, + {file = "yarl-1.19.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:4815ec6d3d68a96557fa71bd36661b45ac773fb50e5cfa31a7e843edb098f060"}, + {file = "yarl-1.19.0-cp39-cp39-win32.whl", hash = "sha256:9fac2dd1c5ecb921359d9546bc23a6dcc18c6acd50c6d96f118188d68010f497"}, + {file = "yarl-1.19.0-cp39-cp39-win_amd64.whl", hash = "sha256:5864f539ce86b935053bfa18205fa08ce38e9a40ea4d51b19ce923345f0ed5db"}, + {file = "yarl-1.19.0-py3-none-any.whl", hash = "sha256:a727101eb27f66727576630d02985d8a065d09cd0b5fcbe38a5793f71b2a97ef"}, + {file = "yarl-1.19.0.tar.gz", hash = "sha256:01e02bb80ae0dbed44273c304095295106e1d9470460e773268a27d11e594892"}, ] [package.dependencies] idna = ">=2.0" multidict = ">=4.0" -propcache = ">=0.2.0" +propcache = ">=0.2.1" [package.source] type = "legacy" @@ -5082,18 +4972,17 @@ version = "3.21.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] [package.source] @@ -5101,7 +4990,11 @@ type = "legacy" url = "https://mirrors.aliyun.com/pypi/simple" reference = "aliyun" +[extras] +postgresql = ["asyncpg"] +redis = ["redis"] + [metadata] -lock-version = "2.1" +lock-version = "2.0" python-versions = "^3.10" -content-hash = "48aa6fabc582a0c75b333f9bd3418264a1fd15a5c8c50220b456ba00d03cd35e" +content-hash = "ed42547d3e975f73e9e1fd1d4c4660d4363d70f40a47c5626cb1664508a9c156" diff --git a/pyproject.toml b/pyproject.toml index 621472fe..4dd31f16 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ python = "^3.10" playwright = "^1.41.1" nonebot-adapter-onebot = "^2.3.1" nonebot-plugin-apscheduler = "^0.5" -tortoise-orm = { extras = ["asyncpg"], version = "^0.20.0" } +tortoise-orm = "^0.20.0" cattrs = "^23.2.3" ruamel-yaml = "^0.18.5" strenum = "^0.4.15" @@ -39,7 +39,7 @@ dateparser = "^1.2.0" bilireq = "0.2.3post0" python-jose = { extras = ["cryptography"], version = "^3.3.0" } python-multipart = "^0.0.9" -aiocache = "^0.12.2" +aiocache = {extras = ["redis"], version = "^0.12.3"} py-cpuinfo = "^9.0.0" nonebot-plugin-alconna = "^0.54.0" tenacity = "^9.0.0" @@ -47,6 +47,9 @@ nonebot-plugin-uninfo = ">0.4.1" nonebot-plugin-waiter = "^0.8.1" multidict = ">=6.0.0,!=6.3.2" +redis = { version = ">=5", optional = true } +asyncpg = { version = ">=0.20.0", optional = true } + [tool.poetry.group.dev.dependencies] nonebug = "^0.4" pytest-cov = "^5.0.0" @@ -57,6 +60,9 @@ respx = "^0.21.1" ruff = "^0.8.0" pre-commit = "^4.0.0" +[tool.poetry.extras] +redis = ["redis"] +postgresql = ["asyncpg"] [tool.nonebot] plugins = [ diff --git a/zhenxun/builtin_plugins/admin/ban/__init__.py b/zhenxun/builtin_plugins/admin/ban/__init__.py index 32e97f2d..7c356bcc 100644 --- a/zhenxun/builtin_plugins/admin/ban/__init__.py +++ b/zhenxun/builtin_plugins/admin/ban/__init__.py @@ -87,13 +87,17 @@ __plugin_meta__ = PluginMetadata( smart_tools=[ AICallableTag( name="call_ban", - description="某人多次(至少三次)辱骂你,调用此方法进行封禁", + description="如果你讨厌某个人(好感度过低并让你感到困扰,或者多次辱骂你),调用此方法进行封禁,调用该方法后要告知用户被封禁和原因", parameters=AICallableParam( type="object", properties={ "user_id": AICallableProperties( type="string", description="用户的id" ), + "duration": AICallableProperties( + type="integer", + description="封禁时长(选择的值只能是1-360),单位为分钟,如果频繁触发,按情况增加", + ), }, required=["user_id"], ), diff --git a/zhenxun/builtin_plugins/admin/ban/_data_source.py b/zhenxun/builtin_plugins/admin/ban/_data_source.py index ae465bdf..5b67191b 100644 --- a/zhenxun/builtin_plugins/admin/ban/_data_source.py +++ b/zhenxun/builtin_plugins/admin/ban/_data_source.py @@ -9,14 +9,14 @@ from zhenxun.services.log import logger from zhenxun.utils.image_utils import BuildImage, ImageTemplate -async def call_ban(user_id: str): +async def call_ban(user_id: str, duration: int = 1): """调用ban 参数: user_id: 用户id """ - await BanConsole.ban(user_id, None, 9, 60 * 12) - logger.info("辱骂次数过多,已将用户加入黑名单...", "ban", session=user_id) + await BanConsole.ban(user_id, None, 9, duration * 60) + logger.info("被讨厌了,已将用户加入黑名单...", "ban", session=user_id) class BanManage: @@ -114,7 +114,7 @@ class BanManage: if not is_superuser and user_id and session.id1: user_level = await LevelUser.get_user_level(session.id1, group_id) if idx: - ban_data = await BanConsole.get_or_none(id=idx) + ban_data = await BanConsole.get_ban(id=idx) if not ban_data: return False, "该用户/群组不在黑名单中捏..." if ban_data.ban_level > user_level: diff --git a/zhenxun/builtin_plugins/admin/plugin_switch/_data_source.py b/zhenxun/builtin_plugins/admin/plugin_switch/_data_source.py index fb245cf2..a2e51bcb 100644 --- a/zhenxun/builtin_plugins/admin/plugin_switch/_data_source.py +++ b/zhenxun/builtin_plugins/admin/plugin_switch/_data_source.py @@ -1,10 +1,13 @@ import os +from typing import cast from zhenxun.configs.path_config import DATA_PATH, IMAGE_PATH from zhenxun.models.group_console import GroupConsole from zhenxun.models.plugin_info import PluginInfo from zhenxun.models.task_info import TaskInfo -from zhenxun.utils.enum import BlockType, PluginType +from zhenxun.services.cache import CacheRoot +from zhenxun.utils.common_utils import CommonUtils +from zhenxun.utils.enum import BlockType, CacheType, PluginType from zhenxun.utils.exception import GroupInfoNotFound from zhenxun.utils.image_utils import BuildImage, ImageTemplate, RowStyle @@ -116,9 +119,7 @@ async def build_task(group_id: str | None) -> BuildImage: column_name = ["ID", "模块", "名称", "群组状态", "全局状态", "运行时间"] group = None if group_id: - group = await GroupConsole.get_or_none( - group_id=group_id, channel_id__isnull=True - ) + group = await GroupConsole.get_group(group_id=group_id) if not group: raise GroupInfoNotFound() else: @@ -200,26 +201,26 @@ class PluginManager: ) return f"成功将所有功能进群默认状态修改为: {'开启' if status else '关闭'}" if group_id: - if group := await GroupConsole.get_or_none( - group_id=group_id, channel_id__isnull=True - ): - module_list = await PluginInfo.filter( - plugin_type=PluginType.NORMAL - ).values_list("module", flat=True) + if group := await GroupConsole.get_group(group_id=group_id): + module_list = cast( + list[str], + await PluginInfo.filter(plugin_type=PluginType.NORMAL).values_list( + "module", flat=True + ), + ) if status: - for module in module_list: - group.block_plugin = group.block_plugin.replace( - f"<{module},", "" - ) + # 开启所有功能 - 清空禁用列表 + group.block_plugin = "" else: - module_list = [f"<{module}" for module in module_list] - group.block_plugin = ",".join(module_list) + "," # type: ignore + # 关闭所有功能 - 将模块列表转换为禁用格式 + group.block_plugin = CommonUtils.convert_module_format(module_list) await group.save(update_fields=["block_plugin"]) return f"成功将此群组所有功能状态修改为: {'开启' if status else '关闭'}" return "获取群组失败..." await PluginInfo.filter(plugin_type=PluginType.NORMAL).update( status=status, block_type=None if status else BlockType.ALL ) + await CacheRoot.invalidate_cache(CacheType.PLUGINS) return f"成功将所有功能全局状态修改为: {'开启' if status else '关闭'}" @classmethod @@ -232,9 +233,7 @@ class PluginManager: 返回: bool: 是否醒来 """ - if c := await GroupConsole.get_or_none( - group_id=group_id, channel_id__isnull=True - ): + if c := await GroupConsole.get_group(group_id=group_id): return c.status return False @@ -245,9 +244,11 @@ class PluginManager: 参数: group_id: 群组id """ - await GroupConsole.filter(group_id=group_id, channel_id__isnull=True).update( - status=False + group, _ = await GroupConsole.get_or_create( + group_id=group_id, channel_id__isnull=True ) + group.status = False + await group.save(update_fields=["status"]) @classmethod async def wake(cls, group_id: str): @@ -256,9 +257,11 @@ class PluginManager: 参数: group_id: 群组id """ - await GroupConsole.filter(group_id=group_id, channel_id__isnull=True).update( - status=True + group, _ = await GroupConsole.get_or_create( + group_id=group_id, channel_id__isnull=True ) + group.status = True + await group.save(update_fields=["status"]) @classmethod async def block(cls, module: str): @@ -267,7 +270,9 @@ class PluginManager: 参数: module: 模块名 """ - await PluginInfo.filter(module=module).update(status=False) + if plugin := await PluginInfo.get_plugin(module=module): + plugin.status = False + await plugin.save(update_fields=["status"]) @classmethod async def unblock(cls, module: str): @@ -276,7 +281,9 @@ class PluginManager: 参数: module: 模块名 """ - await PluginInfo.filter(module=module).update(status=True) + if plugin := await PluginInfo.get_plugin(module=module): + plugin.status = True + await plugin.save(update_fields=["status"]) @classmethod async def block_group_plugin(cls, plugin_name: str, group_id: str) -> str: @@ -437,17 +444,18 @@ class PluginManager: """ status_str = "关闭" if status else "开启" if is_all: - modules = await TaskInfo.annotate().values_list("module", flat=True) - if modules: + module_list = cast( + list[str], await TaskInfo.annotate().values_list("module", flat=True) + ) + if module_list: group, _ = await GroupConsole.get_or_create( group_id=group_id, channel_id__isnull=True ) - modules = [f"<{module}" for module in modules] if status: - group.block_task = ",".join(modules) + "," # type: ignore + group.block_task = CommonUtils.convert_module_format(module_list) else: - for module in modules: - group.block_task = group.block_task.replace(f"{module},", "") + # 开启所有模块 - 清空禁用列表 + group.block_task = "" await group.save(update_fields=["block_task"]) return f"已成功{status_str}全部被动技能!" elif task := await TaskInfo.get_or_none(name=task_name): diff --git a/zhenxun/builtin_plugins/chat_history/chat_message.py b/zhenxun/builtin_plugins/chat_history/chat_message.py index b3bebb4f..36ea4930 100644 --- a/zhenxun/builtin_plugins/chat_history/chat_message.py +++ b/zhenxun/builtin_plugins/chat_history/chat_message.py @@ -1,13 +1,15 @@ from nonebot import on_message from nonebot.plugin import PluginMetadata from nonebot_plugin_alconna import UniMsg -from nonebot_plugin_session import EventSession +from nonebot_plugin_apscheduler import scheduler +from nonebot_plugin_uninfo import Uninfo from zhenxun.configs.config import Config from zhenxun.configs.utils import PluginExtraData, RegisterConfig from zhenxun.models.chat_history import ChatHistory from zhenxun.services.log import logger from zhenxun.utils.enum import PluginType +from zhenxun.utils.utils import get_entity_ids __plugin_meta__ = PluginMetadata( name="消息存储", @@ -37,18 +39,34 @@ def rule(message: UniMsg) -> bool: chat_history = on_message(rule=rule, priority=1, block=False) +TEMP_LIST = [] + @chat_history.handle() -async def handle_message(message: UniMsg, session: EventSession): - """处理消息存储""" - try: - await ChatHistory.create( - user_id=session.id1, - group_id=session.id2, +async def _(message: UniMsg, session: Uninfo): + entity = get_entity_ids(session) + TEMP_LIST.append( + ChatHistory( + user_id=entity.user_id, + group_id=entity.group_id, text=str(message), plain_text=message.extract_plain_text(), - bot_id=session.bot_id, + bot_id=session.self_id, platform=session.platform, ) + ) + + +@scheduler.scheduled_job( + "interval", + minutes=1, +) +async def _(): + try: + message_list = TEMP_LIST.copy() + TEMP_LIST.clear() + if message_list: + await ChatHistory.bulk_create(message_list) + logger.debug(f"批量添加聊天记录 {len(message_list)} 条", "定时任务") except Exception as e: logger.warning("存储聊天记录失败", "chat_history", e=e) diff --git a/zhenxun/builtin_plugins/help/_utils.py b/zhenxun/builtin_plugins/help/_utils.py index 0554fc8d..d17edcda 100644 --- a/zhenxun/builtin_plugins/help/_utils.py +++ b/zhenxun/builtin_plugins/help/_utils.py @@ -45,11 +45,13 @@ async def classify_plugin( """ sort_data = await sort_type() classify: dict[str, list] = {} - group = await GroupConsole.get_or_none(group_id=group_id) if group_id else None + group = await GroupConsole.get_group(group_id=group_id) if group_id else None bot = await BotConsole.get_or_none(bot_id=session.self_id) for menu, value in sort_data.items(): for plugin in value: if not classify.get(menu): classify[menu] = [] classify[menu].append(handle(bot, plugin, group, is_detail)) + for value in classify.values(): + value.sort(key=lambda x: x.id) return classify diff --git a/zhenxun/builtin_plugins/help/html_help.py b/zhenxun/builtin_plugins/help/html_help.py index 7c552a0d..dec0a835 100644 --- a/zhenxun/builtin_plugins/help/html_help.py +++ b/zhenxun/builtin_plugins/help/html_help.py @@ -21,6 +21,8 @@ class Item(BaseModel): """插件名称""" sta: int """插件状态""" + id: int + """插件id""" class PluginList(BaseModel): @@ -80,10 +82,9 @@ def __handle_item( sta = 2 if f"{plugin.module}," in group.block_plugin: sta = 1 - if bot: - if f"{plugin.module}," in bot.block_plugins: - sta = 2 - return Item(plugin_name=plugin.name, sta=sta) + if bot and f"{plugin.module}," in bot.block_plugins: + sta = 2 + return Item(plugin_name=plugin.name, sta=sta, id=plugin.id) def build_plugin_data(classify: dict[str, list[Item]]) -> list[dict[str, str]]: @@ -142,7 +143,7 @@ async def build_html_image( template_name="zhenxun_menu.html", templates={"plugin_list": plugin_list}, pages={ - "viewport": {"width": 1903, "height": 975}, + "viewport": {"width": 1903, "height": 10}, "base_url": f"file://{TEMPLATE_PATH}", }, wait=2, diff --git a/zhenxun/builtin_plugins/help/normal_help.py b/zhenxun/builtin_plugins/help/normal_help.py index 0ef9aa89..f381f900 100644 --- a/zhenxun/builtin_plugins/help/normal_help.py +++ b/zhenxun/builtin_plugins/help/normal_help.py @@ -45,7 +45,7 @@ async def build_normal_image(group_id: str | None, is_detail: bool) -> BuildImag color="black" if idx % 2 else "white", ) curr_h = 10 - group = await GroupConsole.get_or_none(group_id=group_id) + group = await GroupConsole.get_group(group_id=group_id) if group_id else None for _, plugin in enumerate(plugin_list): text_color = (255, 255, 255) if idx % 2 else (0, 0, 0) if group and f"{plugin.module}," in group.block_plugin: @@ -80,7 +80,7 @@ async def build_normal_image(group_id: str | None, is_detail: bool) -> BuildImag width, height = 10, 10 for s in [ "目前支持的功能列表:", - "可以通过 ‘帮助 [功能名称或功能Id]’ 来获取对应功能的使用方法", + "可以通过 '帮助 [功能名称或功能Id]' 来获取对应功能的使用方法", ]: text = await BuildImage.build_text_image(s, "HYWenHei-85W.ttf", 24) await result.paste(text, (width, height)) diff --git a/zhenxun/builtin_plugins/help/zhenxun_help.py b/zhenxun/builtin_plugins/help/zhenxun_help.py index b96d3c59..ea04bdc0 100644 --- a/zhenxun/builtin_plugins/help/zhenxun_help.py +++ b/zhenxun/builtin_plugins/help/zhenxun_help.py @@ -20,6 +20,12 @@ class Item(BaseModel): """插件名称""" commands: list[str] """插件命令""" + id: str + """插件id""" + status: bool + """插件状态""" + has_superuser_help: bool + """插件是否拥有超级用户帮助""" def __handle_item( @@ -39,23 +45,36 @@ def __handle_item( 返回: Item: Item """ + status = True + has_superuser_help = False + nb_plugin = nonebot.get_plugin_by_module_name(plugin.module_path) + if nb_plugin and nb_plugin.metadata and nb_plugin.metadata.extra: + extra_data = PluginExtraData(**nb_plugin.metadata.extra) + if extra_data.superuser_help: + has_superuser_help = True if not plugin.status: if plugin.block_type == BlockType.ALL: - plugin.name = f"{plugin.name}(不可用)" + status = False elif group and plugin.block_type == BlockType.GROUP: - plugin.name = f"{plugin.name}(不可用)" + status = False elif not group and plugin.block_type == BlockType.PRIVATE: - plugin.name = f"{plugin.name}(不可用)" + status = False elif group and f"{plugin.module}," in group.block_plugin: - plugin.name = f"{plugin.name}(不可用)" + status = False elif bot and f"{plugin.module}," in bot.block_plugins: - plugin.name = f"{plugin.name}(不可用)" + status = False commands = [] nb_plugin = nonebot.get_plugin_by_module_name(plugin.module_path) if is_detail and nb_plugin and nb_plugin.metadata and nb_plugin.metadata.extra: extra_data = PluginExtraData(**nb_plugin.metadata.extra) commands = [cmd.command for cmd in extra_data.commands] - return Item(plugin_name=f"{plugin.id}-{plugin.name}", commands=commands) + return Item( + plugin_name=plugin.name, + commands=commands, + id=str(plugin.id), + status=status, + has_superuser_help=has_superuser_help, + ) def build_plugin_data(classify: dict[str, list[Item]]) -> list[dict[str, str]]: @@ -78,68 +97,10 @@ def build_plugin_data(classify: dict[str, list[Item]]) -> list[dict[str, str]]: } for menu, value in classify.items() ] - plugin_list = build_line_data(plugin_list) - plugin_list.insert( - 0, - build_plugin_line( - menu_key if menu_key not in ["normal", "功能"] else "主要功能", - max_data, - 30, - 100, - True, - ), - ) - return plugin_list - - -def build_plugin_line( - name: str, items: list, left: int, width: int | None = None, is_max: bool = False -) -> dict: - """构造插件行数据 - - 参数: - name: 菜单名称 - items: 插件名称列表 - left: 左边距 - width: 总插件长度. - is_max: 是否为最大长度的插件菜单 - - 返回: - dict: 插件数据 - """ - _plugins = [] - width = width or 50 - if len(items) // 2 > 6 or is_max: - width = 100 - plugin_list1 = [] - plugin_list2 = [] - for i in range(len(items)): - if i % 2: - plugin_list1.append(items[i]) - else: - plugin_list2.append(items[i]) - _plugins = [(30, 50, plugin_list1), (0, 50, plugin_list2)] - else: - _plugins = [(left, 100, items)] - return {"name": name, "items": _plugins, "width": width} - - -def build_line_data(plugin_list: list[dict]) -> list[dict]: - """构造插件数据 - - 参数: - plugin_list: 插件列表 - - 返回: - list[dict]: 插件数据 - """ - left = 30 - data = [] + plugin_list.insert(0, {"name": menu_key, "items": max_data}) for plugin in plugin_list: - data.append(build_plugin_line(plugin["name"], plugin["items"], left)) - if len(plugin["items"]) // 2 <= 6: - left = 15 if left == 30 else 30 - return data + plugin["items"].sort(key=lambda x: x.id) + return plugin_list async def build_zhenxun_image( @@ -160,6 +121,7 @@ async def build_zhenxun_image( width = int(637 * 1.5) if is_detail else 637 title_font = int(53 * 1.5) if is_detail else 53 tip_font = int(19 * 1.5) if is_detail else 19 + plugin_count = sum(len(plugin["items"]) for plugin in plugin_list) return await template_to_pic( template_path=str((TEMPLATE_PATH / "ss_menu").absolute()), template_name="main.html", @@ -170,10 +132,11 @@ async def build_zhenxun_image( "width": width, "font_size": (title_font, tip_font), "is_detail": is_detail, + "plugin_count": plugin_count, } }, pages={ - "viewport": {"width": width, "height": 453}, + "viewport": {"width": width, "height": 10}, "base_url": f"file://{TEMPLATE_PATH}", }, wait=2, diff --git a/zhenxun/builtin_plugins/hooks/_auth_checker.py b/zhenxun/builtin_plugins/hooks/_auth_checker.py deleted file mode 100644 index 3a990d89..00000000 --- a/zhenxun/builtin_plugins/hooks/_auth_checker.py +++ /dev/null @@ -1,597 +0,0 @@ -from typing import ClassVar - -from nonebot.adapters import Bot, Event -from nonebot.adapters.onebot.v11 import PokeNotifyEvent -from nonebot.exception import IgnoredException -from nonebot.matcher import Matcher -from nonebot_plugin_alconna import At, UniMsg -from nonebot_plugin_session import EventSession -from pydantic import BaseModel -from tortoise.exceptions import IntegrityError - -from zhenxun.configs.config import Config -from zhenxun.models.bot_console import BotConsole -from zhenxun.models.group_console import GroupConsole -from zhenxun.models.level_user import LevelUser -from zhenxun.models.plugin_info import PluginInfo -from zhenxun.models.plugin_limit import PluginLimit -from zhenxun.models.sign_user import SignUser -from zhenxun.models.user_console import UserConsole -from zhenxun.services.log import logger -from zhenxun.utils.enum import ( - BlockType, - GoldHandle, - LimitWatchType, - PluginLimitType, - PluginType, -) -from zhenxun.utils.exception import InsufficientGold -from zhenxun.utils.message import MessageUtils -from zhenxun.utils.utils import CountLimiter, FreqLimiter, UserBlockLimiter - -base_config = Config.get("hook") - - -class Limit(BaseModel): - limit: PluginLimit - limiter: FreqLimiter | UserBlockLimiter | CountLimiter - - class Config: - arbitrary_types_allowed = True - - -class LimitManage: - add_module: ClassVar[list] = [] - - cd_limit: ClassVar[dict[str, Limit]] = {} - block_limit: ClassVar[dict[str, Limit]] = {} - count_limit: ClassVar[dict[str, Limit]] = {} - - @classmethod - def add_limit(cls, limit: PluginLimit): - """添加限制 - - 参数: - limit: PluginLimit - """ - if limit.module not in cls.add_module: - cls.add_module.append(limit.module) - if limit.limit_type == PluginLimitType.BLOCK: - cls.block_limit[limit.module] = Limit( - limit=limit, limiter=UserBlockLimiter() - ) - elif limit.limit_type == PluginLimitType.CD: - cls.cd_limit[limit.module] = Limit( - limit=limit, limiter=FreqLimiter(limit.cd) - ) - elif limit.limit_type == PluginLimitType.COUNT: - cls.count_limit[limit.module] = Limit( - limit=limit, limiter=CountLimiter(limit.max_count) - ) - - @classmethod - def unblock( - cls, module: str, user_id: str, group_id: str | None, channel_id: str | None - ): - """解除插件block - - 参数: - module: 模块名 - user_id: 用户id - group_id: 群组id - channel_id: 频道id - """ - if limit_model := cls.block_limit.get(module): - limit = limit_model.limit - limiter: UserBlockLimiter = limit_model.limiter # type: ignore - key_type = user_id - if group_id and limit.watch_type == LimitWatchType.GROUP: - key_type = channel_id or group_id - logger.debug( - f"解除对象: {key_type} 的block限制", - "AuthChecker", - session=user_id, - group_id=group_id, - ) - limiter.set_false(key_type) - - @classmethod - async def check( - cls, - module: str, - user_id: str, - group_id: str | None, - channel_id: str | None, - session: EventSession, - ): - """检测限制 - - 参数: - module: 模块名 - user_id: 用户id - group_id: 群组id - channel_id: 频道id - session: Session - - 异常: - IgnoredException: IgnoredException - """ - if limit_model := cls.cd_limit.get(module): - await cls.__check(limit_model, user_id, group_id, channel_id, session) - if limit_model := cls.block_limit.get(module): - await cls.__check(limit_model, user_id, group_id, channel_id, session) - if limit_model := cls.count_limit.get(module): - await cls.__check(limit_model, user_id, group_id, channel_id, session) - - @classmethod - async def __check( - cls, - limit_model: Limit | None, - user_id: str, - group_id: str | None, - channel_id: str | None, - session: EventSession, - ): - """检测限制 - - 参数: - limit_model: Limit - user_id: 用户id - group_id: 群组id - channel_id: 频道id - session: Session - - 异常: - IgnoredException: IgnoredException - """ - if not limit_model: - return - limit = limit_model.limit - limiter = limit_model.limiter - is_limit = ( - LimitWatchType.ALL - or (group_id and limit.watch_type == LimitWatchType.GROUP) - or (not group_id and limit.watch_type == LimitWatchType.USER) - ) - key_type = user_id - if group_id and limit.watch_type == LimitWatchType.GROUP: - key_type = channel_id or group_id - if is_limit and not limiter.check(key_type): - if limit.result: - await MessageUtils.build_message(limit.result).send() - logger.debug( - f"{limit.module}({limit.limit_type}) 正在限制中...", - "AuthChecker", - session=session, - ) - raise IgnoredException(f"{limit.module} 正在限制中...") - else: - logger.debug( - f"开始进行限制 {limit.module}({limit.limit_type})...", - "AuthChecker", - session=user_id, - group_id=group_id, - ) - if isinstance(limiter, FreqLimiter): - limiter.start_cd(key_type) - if isinstance(limiter, UserBlockLimiter): - limiter.set_true(key_type) - if isinstance(limiter, CountLimiter): - limiter.increase(key_type) - - -class IsSuperuserException(Exception): - pass - - -class AuthChecker: - """ - 权限检查 - """ - - def __init__(self): - check_notice_info_cd = Config.get_config("hook", "CHECK_NOTICE_INFO_CD") - if check_notice_info_cd is None or check_notice_info_cd < 0: - raise ValueError("模块: [hook], 配置项: [CHECK_NOTICE_INFO_CD] 为空或小于0") - self._flmt = FreqLimiter(check_notice_info_cd) - self._flmt_g = FreqLimiter(check_notice_info_cd) - self._flmt_s = FreqLimiter(check_notice_info_cd) - self._flmt_c = FreqLimiter(check_notice_info_cd) - - def is_send_limit_message(self, plugin: PluginInfo, sid: str) -> bool: - """是否发送提示消息 - - 参数: - plugin: PluginInfo - - 返回: - bool: 是否发送提示消息 - """ - if not base_config.get("IS_SEND_TIP_MESSAGE"): - return False - if plugin.plugin_type == PluginType.DEPENDANT: - return False - if plugin.ignore_prompt: - return False - return self._flmt_s.check(sid) - - async def auth( - self, - matcher: Matcher, - event: Event, - bot: Bot, - session: EventSession, - message: UniMsg, - ): - """权限检查 - - 参数: - matcher: matcher - bot: bot - session: EventSession - message: UniMsg - """ - is_ignore = False - cost_gold = 0 - user_id = session.id1 - group_id = session.id3 - channel_id = session.id2 - if not group_id: - group_id = channel_id - channel_id = None - if matcher.type == "notice" and not isinstance(event, PokeNotifyEvent): - """过滤除poke外的notice""" - return - if user_id and matcher.plugin and (module_path := matcher.plugin.module_name): - try: - user = await UserConsole.get_user(user_id, session.platform) - except IntegrityError as e: - logger.debug( - "重复创建用户,已跳过该次权限...", - "AuthChecker", - session=session, - e=e, - ) - return - if plugin := await PluginInfo.get_or_none(module_path=module_path): - if plugin.plugin_type == PluginType.HIDDEN: - logger.debug( - f"插件: {plugin.name}:{plugin.module} " - "为HIDDEN,已跳过权限检查..." - ) - return - try: - cost_gold = await self.auth_cost(user, plugin, session) - if session.id1 in bot.config.superusers: - if plugin.plugin_type == PluginType.SUPERUSER: - raise IsSuperuserException() - if not plugin.limit_superuser: - cost_gold = 0 - raise IsSuperuserException() - await self.auth_bot(plugin, bot.self_id) - await self.auth_group(plugin, session, message) - await self.auth_admin(plugin, session) - await self.auth_plugin(plugin, session, event) - await self.auth_limit(plugin, session) - except IsSuperuserException: - logger.debug( - "超级用户或被ban跳过权限检测...", "AuthChecker", session=session - ) - except IgnoredException: - is_ignore = True - LimitManage.unblock( - matcher.plugin.name, user_id, group_id, channel_id - ) - except AssertionError as e: - is_ignore = True - logger.debug("消息无法发送", session=session, e=e) - if cost_gold and user_id: - """花费金币""" - try: - await UserConsole.reduce_gold( - user_id, - cost_gold, - GoldHandle.PLUGIN, - matcher.plugin.name if matcher.plugin else "", - session.platform, - ) - except InsufficientGold: - if u := await UserConsole.get_user(user_id): - u.gold = 0 - await u.save(update_fields=["gold"]) - logger.debug( - f"调用功能花费金币: {cost_gold}", "AuthChecker", session=session - ) - if is_ignore: - raise IgnoredException("权限检测 ignore") - - async def auth_bot(self, plugin: PluginInfo, bot_id: str): - """机器人权限 - - 参数: - plugin: PluginInfo - bot_id: bot_id - """ - if not await BotConsole.get_bot_status(bot_id): - logger.debug("Bot休眠中阻断权限检测...", "AuthChecker") - raise IgnoredException("BotConsole休眠权限检测 ignore") - if await BotConsole.is_block_plugin(bot_id, plugin.module): - logger.debug( - f"Bot插件 {plugin.name}({plugin.module}) 权限检查结果为关闭...", - "AuthChecker", - ) - raise IgnoredException("BotConsole插件权限检测 ignore") - - async def auth_limit(self, plugin: PluginInfo, session: EventSession): - """插件限制 - - 参数: - plugin: PluginInfo - session: EventSession - """ - user_id = session.id1 - group_id = session.id3 - channel_id = session.id2 - if not group_id: - group_id = channel_id - channel_id = None - if plugin.module not in LimitManage.add_module: - limit_list: list[PluginLimit] = await plugin.plugin_limit.filter( - status=True - ).all() # type: ignore - for limit in limit_list: - LimitManage.add_limit(limit) - if user_id: - await LimitManage.check( - plugin.module, user_id, group_id, channel_id, session - ) - - async def auth_plugin( - self, plugin: PluginInfo, session: EventSession, event: Event - ): - """插件状态 - - 参数: - plugin: PluginInfo - session: EventSession - """ - group_id = session.id3 - channel_id = session.id2 - if not group_id: - group_id = channel_id - channel_id = None - if user_id := session.id1: - if plugin.impression > 0: - sign_user = await SignUser.get_user(user_id) - if float(sign_user.impression) < plugin.impression: - if self.is_send_limit_message(plugin, user_id): - self._flmt_s.start_cd(user_id) - await MessageUtils.build_message( - f"好感度不足哦,当前功能需要好感度: {plugin.impression}," - "请继续签到提升好感度吧!" - ).send(reply_to=True) - logger.debug( - f"{plugin.name}({plugin.module}) 用户好感度不足...", - "AuthChecker", - session=session, - ) - raise IgnoredException("好感度不足...") - if group_id: - sid = group_id or user_id - if await GroupConsole.is_superuser_block_plugin( - group_id, plugin.module - ): - """超级用户群组插件状态""" - if self.is_send_limit_message(plugin, sid): - self._flmt_s.start_cd(group_id or user_id) - await MessageUtils.build_message( - "超级管理员禁用了该群此功能..." - ).send(reply_to=True) - logger.debug( - f"{plugin.name}({plugin.module}) 超级管理员禁用了该群此功能...", - "AuthChecker", - session=session, - ) - raise IgnoredException("超级管理员禁用了该群此功能...") - if await GroupConsole.is_normal_block_plugin(group_id, plugin.module): - """群组插件状态""" - if self.is_send_limit_message(plugin, sid): - self._flmt_s.start_cd(group_id or user_id) - await MessageUtils.build_message("该群未开启此功能...").send( - reply_to=True - ) - logger.debug( - f"{plugin.name}({plugin.module}) 未开启此功能...", - "AuthChecker", - session=session, - ) - raise IgnoredException("该群未开启此功能...") - if plugin.block_type == BlockType.GROUP: - """全局群组禁用""" - try: - if self.is_send_limit_message(plugin, sid): - self._flmt_c.start_cd(group_id) - await MessageUtils.build_message( - "该功能在群组中已被禁用..." - ).send(reply_to=True) - except Exception as e: - logger.error( - "auth_plugin 发送消息失败", - "AuthChecker", - session=session, - e=e, - ) - logger.debug( - f"{plugin.name}({plugin.module}) 该插件在群组中已被禁用...", - "AuthChecker", - session=session, - ) - raise IgnoredException("该插件在群组中已被禁用...") - else: - sid = user_id - if plugin.block_type == BlockType.PRIVATE: - """全局私聊禁用""" - try: - if self.is_send_limit_message(plugin, sid): - self._flmt_c.start_cd(user_id) - await MessageUtils.build_message( - "该功能在私聊中已被禁用..." - ).send() - except Exception as e: - logger.error( - "auth_admin 发送消息失败", - "AuthChecker", - session=session, - e=e, - ) - logger.debug( - f"{plugin.name}({plugin.module}) 该插件在私聊中已被禁用...", - "AuthChecker", - session=session, - ) - raise IgnoredException("该插件在私聊中已被禁用...") - if not plugin.status and plugin.block_type == BlockType.ALL: - """全局状态""" - if group_id and await GroupConsole.is_super_group(group_id): - raise IsSuperuserException() - logger.debug( - f"{plugin.name}({plugin.module}) 全局未开启此功能...", - "AuthChecker", - session=session, - ) - if self.is_send_limit_message(plugin, sid): - self._flmt_s.start_cd(group_id or user_id) - await MessageUtils.build_message("全局未开启此功能...").send() - raise IgnoredException("全局未开启此功能...") - - async def auth_admin(self, plugin: PluginInfo, session: EventSession): - """管理员命令 个人权限 - - 参数: - plugin: PluginInfo - session: EventSession - """ - user_id = session.id1 - if user_id and plugin.admin_level: - if group_id := session.id3 or session.id2: - if not await LevelUser.check_level( - user_id, group_id, plugin.admin_level - ): - try: - if self._flmt.check(user_id): - self._flmt.start_cd(user_id) - await MessageUtils.build_message( - [ - At(flag="user", target=user_id), - f"你的权限不足喔," - f"该功能需要的权限等级: {plugin.admin_level}", - ] - ).send(reply_to=True) - except Exception as e: - logger.error( - "auth_admin 发送消息失败", - "AuthChecker", - session=session, - e=e, - ) - logger.debug( - f"{plugin.name}({plugin.module}) 管理员权限不足...", - "AuthChecker", - session=session, - ) - raise IgnoredException("管理员权限不足...") - elif not await LevelUser.check_level(user_id, None, plugin.admin_level): - try: - await MessageUtils.build_message( - f"你的权限不足喔,该功能需要的权限等级: {plugin.admin_level}" - ).send() - except Exception as e: - logger.error( - "auth_admin 发送消息失败", "AuthChecker", session=session, e=e - ) - logger.debug( - f"{plugin.name}({plugin.module}) 管理员权限不足...", - "AuthChecker", - session=session, - ) - raise IgnoredException("权限不足") - - async def auth_group( - self, plugin: PluginInfo, session: EventSession, message: UniMsg - ): - """群黑名单检测 群总开关检测 - - 参数: - plugin: PluginInfo - session: EventSession - message: UniMsg - """ - if not (group_id := session.id3 or session.id2): - return - text = message.extract_plain_text() - group = await GroupConsole.get_group(group_id) - if not group: - """群不存在""" - logger.debug( - "群组信息不存在...", - "AuthChecker", - session=session, - ) - raise IgnoredException("群不存在") - if group.level < 0: - """群权限小于0""" - logger.debug( - "群黑名单, 群权限-1...", - "AuthChecker", - session=session, - ) - raise IgnoredException("群黑名单") - if not group.status: - """群休眠""" - if text.strip() != "醒来": - logger.debug("群休眠状态...", "AuthChecker", session=session) - raise IgnoredException("群休眠状态") - if plugin.level > group.level: - """插件等级大于群等级""" - logger.debug( - f"{plugin.name}({plugin.module}) 群等级限制.." - f"该功能需要的群等级: {plugin.level}..", - "AuthChecker", - session=session, - ) - raise IgnoredException(f"{plugin.name}({plugin.module}) 群等级限制...") - - async def auth_cost( - self, user: UserConsole, plugin: PluginInfo, session: EventSession - ) -> int: - """检测是否满足金币条件 - - 参数: - user: UserConsole - plugin: PluginInfo - session: EventSession - - 返回: - int: 需要消耗的金币 - """ - if user.gold < plugin.cost_gold: - """插件消耗金币不足""" - try: - await MessageUtils.build_message( - f"金币不足..该功能需要{plugin.cost_gold}金币.." - ).send() - except Exception as e: - logger.error( - "auth_cost 发送消息失败", "AuthChecker", session=session, e=e - ) - logger.debug( - f"{plugin.name}({plugin.module}) 金币限制.." - f"该功能需要{plugin.cost_gold}金币..", - "AuthChecker", - session=session, - ) - raise IgnoredException(f"{plugin.name}({plugin.module}) 金币限制...") - return plugin.cost_gold - - -checker = AuthChecker() diff --git a/zhenxun/builtin_plugins/hooks/auth/auth_admin.py b/zhenxun/builtin_plugins/hooks/auth/auth_admin.py new file mode 100644 index 00000000..19059f98 --- /dev/null +++ b/zhenxun/builtin_plugins/hooks/auth/auth_admin.py @@ -0,0 +1,99 @@ +import asyncio +import time + +from nonebot_plugin_alconna import At +from nonebot_plugin_uninfo import Uninfo + +from zhenxun.models.level_user import LevelUser +from zhenxun.models.plugin_info import PluginInfo +from zhenxun.services.data_access import DataAccess +from zhenxun.services.db_context import DB_TIMEOUT_SECONDS +from zhenxun.services.log import logger +from zhenxun.utils.utils import get_entity_ids + +from .config import LOGGER_COMMAND, WARNING_THRESHOLD +from .exception import SkipPluginException +from .utils import send_message + + +async def auth_admin(plugin: PluginInfo, session: Uninfo): + """管理员命令 个人权限 + + 参数: + plugin: PluginInfo + session: Uninfo + """ + start_time = time.time() + + if not plugin.admin_level: + return + + try: + entity = get_entity_ids(session) + level_dao = DataAccess(LevelUser) + + # 并行查询用户权限数据 + global_user: LevelUser | None = None + group_users: LevelUser | None = None + + # 查询全局权限 + global_user_task = level_dao.safe_get_or_none( + user_id=session.user.id, group_id__isnull=True + ) + + # 如果在群组中,查询群组权限 + group_users_task = None + if entity.group_id: + group_users_task = level_dao.safe_get_or_none( + user_id=session.user.id, group_id=entity.group_id + ) + + # 等待查询完成,添加超时控制 + try: + results = await asyncio.wait_for( + asyncio.gather(global_user_task, group_users_task or asyncio.sleep(0)), + timeout=DB_TIMEOUT_SECONDS, + ) + global_user = results[0] + group_users = results[1] if group_users_task else None + except asyncio.TimeoutError: + logger.error(f"查询用户权限超时: user_id={session.user.id}", LOGGER_COMMAND) + # 超时时不阻塞,继续执行 + return + + user_level = global_user.user_level if global_user else 0 + if entity.group_id and group_users: + user_level = max(user_level, group_users.user_level) + + if user_level < plugin.admin_level: + await send_message( + session, + [ + At(flag="user", target=session.user.id), + f"你的权限不足喔,该功能需要的权限等级: {plugin.admin_level}", + ], + entity.user_id, + ) + + raise SkipPluginException( + f"{plugin.name}({plugin.module}) 管理员权限不足..." + ) + elif global_user: + if global_user.user_level < plugin.admin_level: + await send_message( + session, + f"你的权限不足喔,该功能需要的权限等级: {plugin.admin_level}", + ) + + raise SkipPluginException( + f"{plugin.name}({plugin.module}) 管理员权限不足..." + ) + finally: + # 记录执行时间 + elapsed = time.time() - start_time + if elapsed > WARNING_THRESHOLD: # 记录耗时超过500ms的检查 + logger.warning( + f"auth_admin 耗时: {elapsed:.3f}s, plugin={plugin.module}", + LOGGER_COMMAND, + session=session, + ) diff --git a/zhenxun/builtin_plugins/hooks/auth/auth_ban.py b/zhenxun/builtin_plugins/hooks/auth/auth_ban.py new file mode 100644 index 00000000..76b19370 --- /dev/null +++ b/zhenxun/builtin_plugins/hooks/auth/auth_ban.py @@ -0,0 +1,303 @@ +import asyncio +import time + +from nonebot.adapters import Bot +from nonebot.matcher import Matcher +from nonebot_plugin_alconna import At +from nonebot_plugin_uninfo import Uninfo + +from zhenxun.configs.config import Config +from zhenxun.models.ban_console import BanConsole +from zhenxun.models.plugin_info import PluginInfo +from zhenxun.services.data_access import DataAccess +from zhenxun.services.db_context import DB_TIMEOUT_SECONDS +from zhenxun.services.log import logger +from zhenxun.utils.enum import PluginType +from zhenxun.utils.utils import EntityIDs, get_entity_ids + +from .config import LOGGER_COMMAND, WARNING_THRESHOLD +from .exception import SkipPluginException +from .utils import freq, send_message + +Config.add_plugin_config( + "hook", + "BAN_RESULT", + "才不会给你发消息.", + help="对被ban用户发送的消息", +) + + +def calculate_ban_time(ban_record: BanConsole | None) -> int: + """根据ban记录计算剩余ban时间 + + 参数: + ban_record: BanConsole记录 + + 返回: + int: ban剩余时长,-1时为永久ban,0表示未被ban + """ + if not ban_record: + return 0 + + if ban_record.duration == -1: + return -1 + + _time = time.time() - (ban_record.ban_time + ban_record.duration) + return 0 if _time > 0 else int(abs(_time)) + + +async def is_ban(user_id: str | None, group_id: str | None) -> int: + """检查用户或群组是否被ban + + 参数: + user_id: 用户ID + group_id: 群组ID + + 返回: + int: ban的剩余时间,0表示未被ban + """ + if not user_id and not group_id: + return 0 + + start_time = time.time() + ban_dao = DataAccess(BanConsole) + + # 分别获取用户在群组中的ban记录和全局ban记录 + group_user = None + user = None + + try: + # 并行查询用户和群组的 ban 记录 + tasks = [] + if user_id and group_id: + tasks.append(ban_dao.safe_get_or_none(user_id=user_id, group_id=group_id)) + if user_id: + tasks.append( + ban_dao.safe_get_or_none(user_id=user_id, group_id__isnull=True) + ) + + # 等待所有查询完成,添加超时控制 + if tasks: + try: + ban_records = await asyncio.wait_for( + asyncio.gather(*tasks), timeout=DB_TIMEOUT_SECONDS + ) + if len(tasks) == 2: + group_user, user = ban_records + elif user_id and group_id: + group_user = ban_records[0] + else: + user = ban_records[0] + except asyncio.TimeoutError: + logger.error( + f"查询ban记录超时: user_id={user_id}, group_id={group_id}", + LOGGER_COMMAND, + ) + # 超时时返回0,避免阻塞 + return 0 + + # 检查记录并计算ban时间 + results = [] + if group_user: + results.append(group_user) + if user: + results.append(user) + + # 如果没有找到记录,返回0 + if not results: + return 0 + + logger.debug(f"查询到的ban记录: {results}", LOGGER_COMMAND) + # 检查所有记录,找出最严格的ban(时间最长的) + max_ban_time: int = 0 + for result in results: + if result.duration > 0 or result.duration == -1: + # 直接计算ban时间,避免再次查询数据库 + ban_time = calculate_ban_time(result) + if ban_time == -1 or ban_time > max_ban_time: + max_ban_time = ban_time + + return max_ban_time + finally: + # 记录执行时间 + elapsed = time.time() - start_time + if elapsed > WARNING_THRESHOLD: # 记录耗时超过500ms的检查 + logger.warning( + f"is_ban 耗时: {elapsed:.3f}s", + LOGGER_COMMAND, + session=user_id, + group_id=group_id, + ) + + +def check_plugin_type(matcher: Matcher) -> bool: + """判断插件类型是否是隐藏插件 + + 参数: + matcher: Matcher + + 返回: + bool: 是否为隐藏插件 + """ + if plugin := matcher.plugin: + if metadata := plugin.metadata: + extra = metadata.extra + if extra.get("plugin_type") in [PluginType.HIDDEN]: + return False + return True + + +def format_time(time_val: float) -> str: + """格式化时间 + + 参数: + time_val: ban时长 + + 返回: + str: 格式化时间文本 + """ + if time_val == -1: + return "∞" + time_val = abs(int(time_val)) + if time_val < 60: + time_str = f"{time_val!s} 秒" + else: + minute = int(time_val / 60) + if minute > 60: + hours = minute // 60 + minute %= 60 + time_str = f"{hours} 小时 {minute}分钟" + else: + time_str = f"{minute} 分钟" + return time_str + + +async def group_handle(group_id: str) -> None: + """群组ban检查 + + 参数: + group_id: 群组id + + 异常: + SkipPluginException: 群组处于黑名单 + """ + start_time = time.time() + try: + if await is_ban(None, group_id): + raise SkipPluginException("群组处于黑名单中...") + finally: + # 记录执行时间 + elapsed = time.time() - start_time + if elapsed > WARNING_THRESHOLD: # 记录耗时超过500ms的检查 + logger.warning( + f"group_handle 耗时: {elapsed:.3f}s", + LOGGER_COMMAND, + group_id=group_id, + ) + + +async def user_handle(module: str, entity: EntityIDs, session: Uninfo) -> None: + """用户ban检查 + + 参数: + module: 插件模块名 + entity: 实体ID信息 + session: Uninfo + + 异常: + SkipPluginException: 用户处于黑名单 + """ + start_time = time.time() + try: + ban_result = Config.get_config("hook", "BAN_RESULT") + time_val = await is_ban(entity.user_id, entity.group_id) + if not time_val: + return + time_str = format_time(time_val) + plugin_dao = DataAccess(PluginInfo) + try: + db_plugin = await asyncio.wait_for( + plugin_dao.safe_get_or_none(module=module), timeout=DB_TIMEOUT_SECONDS + ) + except asyncio.TimeoutError: + logger.error(f"查询插件信息超时: {module}", LOGGER_COMMAND) + # 超时时不阻塞,继续执行 + raise SkipPluginException("用户处于黑名单中...") + + if ( + db_plugin + and not db_plugin.ignore_prompt + and time_val != -1 + and ban_result + and freq.is_send_limit_message(db_plugin, entity.user_id, False) + ): + try: + await asyncio.wait_for( + send_message( + session, + [ + At(flag="user", target=entity.user_id), + f"{ban_result}\n在..在 {time_str} 后才会理你喔", + ], + entity.user_id, + ), + timeout=DB_TIMEOUT_SECONDS, + ) + except asyncio.TimeoutError: + logger.error(f"发送消息超时: {entity.user_id}", LOGGER_COMMAND) + raise SkipPluginException("用户处于黑名单中...") + finally: + # 记录执行时间 + elapsed = time.time() - start_time + if elapsed > WARNING_THRESHOLD: # 记录耗时超过500ms的检查 + logger.warning( + f"user_handle 耗时: {elapsed:.3f}s", + LOGGER_COMMAND, + session=session, + ) + + +async def auth_ban(matcher: Matcher, bot: Bot, session: Uninfo) -> None: + """权限检查 - ban 检查 + + 参数: + matcher: Matcher + bot: Bot + session: Uninfo + """ + start_time = time.time() + try: + if not check_plugin_type(matcher): + return + if not matcher.plugin_name: + return + entity = get_entity_ids(session) + if entity.user_id in bot.config.superusers: + return + if entity.group_id: + try: + await asyncio.wait_for( + group_handle(entity.group_id), timeout=DB_TIMEOUT_SECONDS + ) + except asyncio.TimeoutError: + logger.error(f"群组ban检查超时: {entity.group_id}", LOGGER_COMMAND) + # 超时时不阻塞,继续执行 + + if entity.user_id: + try: + await asyncio.wait_for( + user_handle(matcher.plugin_name, entity, session), + timeout=DB_TIMEOUT_SECONDS, + ) + except asyncio.TimeoutError: + logger.error(f"用户ban检查超时: {entity.user_id}", LOGGER_COMMAND) + # 超时时不阻塞,继续执行 + finally: + # 记录总执行时间 + elapsed = time.time() - start_time + if elapsed > WARNING_THRESHOLD: # 记录耗时超过500ms的检查 + logger.warning( + f"auth_ban 总耗时: {elapsed:.3f}s, plugin={matcher.plugin_name}", + LOGGER_COMMAND, + session=session, + ) diff --git a/zhenxun/builtin_plugins/hooks/auth/auth_bot.py b/zhenxun/builtin_plugins/hooks/auth/auth_bot.py new file mode 100644 index 00000000..ab902991 --- /dev/null +++ b/zhenxun/builtin_plugins/hooks/auth/auth_bot.py @@ -0,0 +1,55 @@ +import asyncio +import time + +from zhenxun.models.bot_console import BotConsole +from zhenxun.models.plugin_info import PluginInfo +from zhenxun.services.data_access import DataAccess +from zhenxun.services.db_context import DB_TIMEOUT_SECONDS +from zhenxun.services.log import logger +from zhenxun.utils.common_utils import CommonUtils + +from .config import LOGGER_COMMAND, WARNING_THRESHOLD +from .exception import SkipPluginException + + +async def auth_bot(plugin: PluginInfo, bot_id: str): + """bot层面的权限检查 + + 参数: + plugin: PluginInfo + bot_id: bot id + + 异常: + SkipPluginException: 忽略插件 + SkipPluginException: 忽略插件 + """ + start_time = time.time() + + try: + # 从数据库或缓存中获取 bot 信息 + bot_dao = DataAccess(BotConsole) + + try: + bot: BotConsole | None = await asyncio.wait_for( + bot_dao.safe_get_or_none(bot_id=bot_id), timeout=DB_TIMEOUT_SECONDS + ) + except asyncio.TimeoutError: + logger.error(f"查询Bot信息超时: bot_id={bot_id}", LOGGER_COMMAND) + # 超时时不阻塞,继续执行 + return + + if not bot or not bot.status: + raise SkipPluginException("Bot不存在或休眠中阻断权限检测...") + if CommonUtils.format(plugin.module) in bot.block_plugins: + raise SkipPluginException( + f"Bot插件 {plugin.name}({plugin.module}) 权限检查结果为关闭..." + ) + finally: + # 记录执行时间 + elapsed = time.time() - start_time + if elapsed > WARNING_THRESHOLD: # 记录耗时超过500ms的检查 + logger.warning( + f"auth_bot 耗时: {elapsed:.3f}s, " + f"bot_id={bot_id}, plugin={plugin.module}", + LOGGER_COMMAND, + ) diff --git a/zhenxun/builtin_plugins/hooks/auth/auth_cost.py b/zhenxun/builtin_plugins/hooks/auth/auth_cost.py new file mode 100644 index 00000000..53da21a9 --- /dev/null +++ b/zhenxun/builtin_plugins/hooks/auth/auth_cost.py @@ -0,0 +1,41 @@ +import time + +from nonebot_plugin_uninfo import Uninfo + +from zhenxun.models.plugin_info import PluginInfo +from zhenxun.models.user_console import UserConsole +from zhenxun.services.log import logger + +from .config import LOGGER_COMMAND, WARNING_THRESHOLD +from .exception import SkipPluginException +from .utils import send_message + + +async def auth_cost(user: UserConsole, plugin: PluginInfo, session: Uninfo) -> int: + """检测是否满足金币条件 + + 参数: + user: UserConsole + plugin: PluginInfo + session: Uninfo + + 返回: + int: 需要消耗的金币 + """ + start_time = time.time() + + try: + if user.gold < plugin.cost_gold: + """插件消耗金币不足""" + await send_message(session, f"金币不足..该功能需要{plugin.cost_gold}金币..") + raise SkipPluginException(f"{plugin.name}({plugin.module}) 金币限制...") + return plugin.cost_gold + finally: + # 记录执行时间 + elapsed = time.time() - start_time + if elapsed > WARNING_THRESHOLD: # 记录耗时超过500ms的检查 + logger.warning( + f"auth_cost 耗时: {elapsed:.3f}s, plugin={plugin.module}", + LOGGER_COMMAND, + session=session, + ) diff --git a/zhenxun/builtin_plugins/hooks/auth/auth_group.py b/zhenxun/builtin_plugins/hooks/auth/auth_group.py new file mode 100644 index 00000000..24086812 --- /dev/null +++ b/zhenxun/builtin_plugins/hooks/auth/auth_group.py @@ -0,0 +1,68 @@ +import asyncio +import time + +from nonebot_plugin_alconna import UniMsg + +from zhenxun.models.group_console import GroupConsole +from zhenxun.models.plugin_info import PluginInfo +from zhenxun.services.data_access import DataAccess +from zhenxun.services.db_context import DB_TIMEOUT_SECONDS +from zhenxun.services.log import logger +from zhenxun.utils.utils import EntityIDs + +from .config import LOGGER_COMMAND, WARNING_THRESHOLD, SwitchEnum +from .exception import SkipPluginException + + +async def auth_group(plugin: PluginInfo, entity: EntityIDs, message: UniMsg): + """群黑名单检测 群总开关检测 + + 参数: + plugin: PluginInfo + entity: EntityIDs + message: UniMsg + """ + start_time = time.time() + + if not entity.group_id: + return + + try: + text = message.extract_plain_text() + + # 从数据库或缓存中获取群组信息 + group_dao = DataAccess(GroupConsole) + + try: + group: GroupConsole | None = await asyncio.wait_for( + group_dao.safe_get_or_none( + group_id=entity.group_id, channel_id__isnull=True + ), + timeout=DB_TIMEOUT_SECONDS, + ) + except asyncio.TimeoutError: + logger.error("查询群组信息超时", LOGGER_COMMAND, session=entity.user_id) + # 超时时不阻塞,继续执行 + return + + if not group: + raise SkipPluginException("群组信息不存在...") + if group.level < 0: + raise SkipPluginException("群组黑名单, 目标群组群权限权限-1...") + if text.strip() != SwitchEnum.ENABLE and not group.status: + raise SkipPluginException("群组休眠状态...") + if plugin.level > group.level: + raise SkipPluginException( + f"{plugin.name}({plugin.module}) 群等级限制," + f"该功能需要的群等级: {plugin.level}..." + ) + finally: + # 记录执行时间 + elapsed = time.time() - start_time + if elapsed > WARNING_THRESHOLD: # 记录耗时超过500ms的检查 + logger.warning( + f"auth_group 耗时: {elapsed:.3f}s, plugin={plugin.module}", + LOGGER_COMMAND, + session=entity.user_id, + group_id=entity.group_id, + ) diff --git a/zhenxun/builtin_plugins/hooks/auth/auth_limit.py b/zhenxun/builtin_plugins/hooks/auth/auth_limit.py new file mode 100644 index 00000000..d199ff0d --- /dev/null +++ b/zhenxun/builtin_plugins/hooks/auth/auth_limit.py @@ -0,0 +1,318 @@ +import asyncio +import time +from typing import ClassVar + +import nonebot +from nonebot_plugin_uninfo import Uninfo +from pydantic import BaseModel + +from zhenxun.models.plugin_info import PluginInfo +from zhenxun.models.plugin_limit import PluginLimit +from zhenxun.services.db_context import DB_TIMEOUT_SECONDS +from zhenxun.services.log import logger +from zhenxun.utils.enum import LimitWatchType, PluginLimitType +from zhenxun.utils.manager.priority_manager import PriorityLifecycle +from zhenxun.utils.message import MessageUtils +from zhenxun.utils.utils import ( + CountLimiter, + FreqLimiter, + UserBlockLimiter, + get_entity_ids, +) + +from .config import LOGGER_COMMAND, WARNING_THRESHOLD +from .exception import SkipPluginException + +driver = nonebot.get_driver() + + +@PriorityLifecycle.on_startup(priority=5) +async def _(): + """初始化限制""" + await LimitManager.init_limit() + + +class Limit(BaseModel): + limit: PluginLimit + limiter: FreqLimiter | UserBlockLimiter | CountLimiter + + class Config: + arbitrary_types_allowed = True + + +class LimitManager: + add_module: ClassVar[list] = [] + last_update_time: ClassVar[float] = 0 + update_interval: ClassVar[float] = 6000 # 1小时更新一次 + is_updating: ClassVar[bool] = False # 防止并发更新 + + cd_limit: ClassVar[dict[str, Limit]] = {} + block_limit: ClassVar[dict[str, Limit]] = {} + count_limit: ClassVar[dict[str, Limit]] = {} + + # 模块限制缓存,避免频繁查询数据库 + module_limit_cache: ClassVar[dict[str, tuple[float, list[PluginLimit]]]] = {} + module_cache_ttl: ClassVar[float] = 60 # 模块缓存有效期(秒) + + @classmethod + async def init_limit(cls): + """初始化限制""" + cls.last_update_time = time.time() + try: + await asyncio.wait_for(cls.update_limits(), timeout=DB_TIMEOUT_SECONDS * 2) + except asyncio.TimeoutError: + logger.error("初始化限制超时", LOGGER_COMMAND) + + @classmethod + async def update_limits(cls): + """更新限制信息""" + # 防止并发更新 + if cls.is_updating: + return + + cls.is_updating = True + try: + start_time = time.time() + try: + limit_list = await asyncio.wait_for( + PluginLimit.filter(status=True).all(), timeout=DB_TIMEOUT_SECONDS + ) + except asyncio.TimeoutError: + logger.error("查询限制信息超时", LOGGER_COMMAND) + cls.is_updating = False + return + + # 清空旧数据 + cls.add_module = [] + cls.cd_limit = {} + cls.block_limit = {} + cls.count_limit = {} + # 添加新数据 + for limit in limit_list: + cls.add_limit(limit) + + cls.last_update_time = time.time() + elapsed = time.time() - start_time + if elapsed > WARNING_THRESHOLD: # 记录耗时超过500ms的更新 + logger.warning(f"更新限制信息耗时: {elapsed:.3f}s", LOGGER_COMMAND) + finally: + cls.is_updating = False + + @classmethod + def add_limit(cls, limit: PluginLimit): + """添加限制 + + 参数: + limit: PluginLimit + """ + if limit.module not in cls.add_module: + cls.add_module.append(limit.module) + if limit.limit_type == PluginLimitType.BLOCK: + cls.block_limit[limit.module] = Limit( + limit=limit, limiter=UserBlockLimiter() + ) + elif limit.limit_type == PluginLimitType.CD: + cls.cd_limit[limit.module] = Limit( + limit=limit, limiter=FreqLimiter(limit.cd) + ) + elif limit.limit_type == PluginLimitType.COUNT: + cls.count_limit[limit.module] = Limit( + limit=limit, limiter=CountLimiter(limit.max_count) + ) + + @classmethod + def unblock( + cls, module: str, user_id: str, group_id: str | None, channel_id: str | None + ): + """解除插件block + + 参数: + module: 模块名 + user_id: 用户id + group_id: 群组id + channel_id: 频道id + """ + if limit_model := cls.block_limit.get(module): + limit = limit_model.limit + limiter: UserBlockLimiter = limit_model.limiter # type: ignore + key_type = user_id + if group_id and limit.watch_type == LimitWatchType.GROUP: + key_type = channel_id or group_id + logger.debug( + f"解除对象: {key_type} 的block限制", + LOGGER_COMMAND, + session=user_id, + group_id=group_id, + ) + limiter.set_false(key_type) + + @classmethod + async def get_module_limits(cls, module: str) -> list[PluginLimit]: + """获取模块的限制信息,使用缓存减少数据库查询 + + 参数: + module: 模块名 + + 返回: + list[PluginLimit]: 限制列表 + """ + current_time = time.time() + + # 检查缓存 + if module in cls.module_limit_cache: + cache_time, limits = cls.module_limit_cache[module] + if current_time - cache_time < cls.module_cache_ttl: + return limits + + # 缓存不存在或已过期,从数据库查询 + try: + start_time = time.time() + limits = await asyncio.wait_for( + PluginLimit.filter(module=module, status=True).all(), + timeout=DB_TIMEOUT_SECONDS, + ) + elapsed = time.time() - start_time + if elapsed > WARNING_THRESHOLD: # 记录耗时超过500ms的查询 + logger.warning( + f"查询模块限制信息耗时: {elapsed:.3f}s, 模块: {module}", + LOGGER_COMMAND, + ) + + # 更新缓存 + cls.module_limit_cache[module] = (current_time, limits) + return limits + except asyncio.TimeoutError: + logger.error(f"查询模块限制信息超时: {module}", LOGGER_COMMAND) + # 超时时返回空列表,避免阻塞 + return [] + + @classmethod + async def check( + cls, + module: str, + user_id: str, + group_id: str | None, + channel_id: str | None, + ): + """检测限制 + + 参数: + module: 模块名 + user_id: 用户id + group_id: 群组id + channel_id: 频道id + + 异常: + IgnoredException: IgnoredException + """ + start_time = time.time() + + # 定期更新全局限制信息 + if ( + time.time() - cls.last_update_time > cls.update_interval + and not cls.is_updating + ): + # 使用异步任务更新,避免阻塞当前请求 + asyncio.create_task(cls.update_limits()) # noqa: RUF006 + + # 如果模块不在已加载列表中,只加载该模块的限制 + if module not in cls.add_module: + limits = await cls.get_module_limits(module) + for limit in limits: + cls.add_limit(limit) + + # 检查各种限制 + try: + if limit_model := cls.cd_limit.get(module): + await cls.__check(limit_model, user_id, group_id, channel_id) + if limit_model := cls.block_limit.get(module): + await cls.__check(limit_model, user_id, group_id, channel_id) + if limit_model := cls.count_limit.get(module): + await cls.__check(limit_model, user_id, group_id, channel_id) + finally: + # 记录总执行时间 + elapsed = time.time() - start_time + if elapsed > WARNING_THRESHOLD: # 记录耗时超过500ms的检查 + logger.warning( + f"限制检查耗时: {elapsed:.3f}s, 模块: {module}", + LOGGER_COMMAND, + session=user_id, + group_id=group_id, + ) + + @classmethod + async def __check( + cls, + limit_model: Limit | None, + user_id: str, + group_id: str | None, + channel_id: str | None, + ): + """检测限制 + + 参数: + limit_model: Limit + user_id: 用户id + group_id: 群组id + channel_id: 频道id + + 异常: + IgnoredException: IgnoredException + """ + if not limit_model: + return + limit = limit_model.limit + limiter = limit_model.limiter + is_limit = ( + LimitWatchType.ALL + or (group_id and limit.watch_type == LimitWatchType.GROUP) + or (not group_id and limit.watch_type == LimitWatchType.USER) + ) + key_type = user_id + if group_id and limit.watch_type == LimitWatchType.GROUP: + key_type = channel_id or group_id + if is_limit and not limiter.check(key_type): + if limit.result: + try: + await asyncio.wait_for( + MessageUtils.build_message(limit.result).send(), + timeout=DB_TIMEOUT_SECONDS, + ) + except asyncio.TimeoutError: + logger.error(f"发送限制消息超时: {limit.module}", LOGGER_COMMAND) + raise SkipPluginException( + f"{limit.module}({limit.limit_type}) 正在限制中..." + ) + else: + logger.debug( + f"开始进行限制 {limit.module}({limit.limit_type})...", + LOGGER_COMMAND, + session=user_id, + group_id=group_id, + ) + if isinstance(limiter, FreqLimiter): + limiter.start_cd(key_type) + if isinstance(limiter, UserBlockLimiter): + limiter.set_true(key_type) + if isinstance(limiter, CountLimiter): + limiter.increase(key_type) + + +async def auth_limit(plugin: PluginInfo, session: Uninfo): + """插件限制 + + 参数: + plugin: PluginInfo + session: Uninfo + """ + entity = get_entity_ids(session) + try: + await asyncio.wait_for( + LimitManager.check( + plugin.module, entity.user_id, entity.group_id, entity.channel_id + ), + timeout=DB_TIMEOUT_SECONDS * 2, # 给予更长的超时时间 + ) + except asyncio.TimeoutError: + logger.error(f"检查插件限制超时: {plugin.module}", LOGGER_COMMAND) + # 超时时不抛出异常,允许继续执行 diff --git a/zhenxun/builtin_plugins/hooks/auth/auth_plugin.py b/zhenxun/builtin_plugins/hooks/auth/auth_plugin.py new file mode 100644 index 00000000..002c97b4 --- /dev/null +++ b/zhenxun/builtin_plugins/hooks/auth/auth_plugin.py @@ -0,0 +1,242 @@ +import asyncio +import time + +from nonebot.adapters import Event +from nonebot_plugin_uninfo import Uninfo + +from zhenxun.models.group_console import GroupConsole +from zhenxun.models.plugin_info import PluginInfo +from zhenxun.services.data_access import DataAccess +from zhenxun.services.db_context import DB_TIMEOUT_SECONDS +from zhenxun.services.log import logger +from zhenxun.utils.common_utils import CommonUtils +from zhenxun.utils.enum import BlockType +from zhenxun.utils.utils import get_entity_ids + +from .config import LOGGER_COMMAND, WARNING_THRESHOLD +from .exception import IsSuperuserException, SkipPluginException +from .utils import freq, is_poke, send_message + + +class GroupCheck: + def __init__( + self, plugin: PluginInfo, group_id: str, session: Uninfo, is_poke: bool + ) -> None: + self.group_id = group_id + self.session = session + self.is_poke = is_poke + self.plugin = plugin + self.group_dao = DataAccess(GroupConsole) + self.group_data = None + + async def check(self): + start_time = time.time() + try: + # 只查询一次数据库,使用 DataAccess 的缓存机制 + try: + self.group_data = await asyncio.wait_for( + self.group_dao.safe_get_or_none( + group_id=self.group_id, channel_id__isnull=True + ), + timeout=DB_TIMEOUT_SECONDS, + ) + except asyncio.TimeoutError: + logger.error(f"查询群组数据超时: {self.group_id}", LOGGER_COMMAND) + return # 超时时不阻塞,继续执行 + + # 检查超级用户禁用 + if ( + self.group_data + and CommonUtils.format(self.plugin.module) + in self.group_data.superuser_block_plugin + ): + if freq.is_send_limit_message(self.plugin, self.group_id, self.is_poke): + try: + await asyncio.wait_for( + send_message( + self.session, + "超级管理员禁用了该群此功能...", + self.group_id, + ), + timeout=DB_TIMEOUT_SECONDS, + ) + except asyncio.TimeoutError: + logger.error(f"发送消息超时: {self.group_id}", LOGGER_COMMAND) + raise SkipPluginException( + f"{self.plugin.name}({self.plugin.module})" + f" 超级管理员禁用了该群此功能..." + ) + + # 检查普通禁用 + if ( + self.group_data + and CommonUtils.format(self.plugin.module) + in self.group_data.block_plugin + ): + if freq.is_send_limit_message(self.plugin, self.group_id, self.is_poke): + try: + await asyncio.wait_for( + send_message( + self.session, "该群未开启此功能...", self.group_id + ), + timeout=DB_TIMEOUT_SECONDS, + ) + except asyncio.TimeoutError: + logger.error(f"发送消息超时: {self.group_id}", LOGGER_COMMAND) + raise SkipPluginException( + f"{self.plugin.name}({self.plugin.module}) 未开启此功能..." + ) + + # 检查全局禁用 + if self.plugin.block_type == BlockType.GROUP: + if freq.is_send_limit_message(self.plugin, self.group_id, self.is_poke): + try: + await asyncio.wait_for( + send_message( + self.session, "该功能在群组中已被禁用...", self.group_id + ), + timeout=DB_TIMEOUT_SECONDS, + ) + except asyncio.TimeoutError: + logger.error(f"发送消息超时: {self.group_id}", LOGGER_COMMAND) + raise SkipPluginException( + f"{self.plugin.name}({self.plugin.module})该插件在群组中已被禁用..." + ) + finally: + # 记录执行时间 + elapsed = time.time() - start_time + if elapsed > WARNING_THRESHOLD: # 记录耗时超过500ms的检查 + logger.warning( + f"GroupCheck.check 耗时: {elapsed:.3f}s, 群组: {self.group_id}", + LOGGER_COMMAND, + ) + + +class PluginCheck: + def __init__(self, group_id: str | None, session: Uninfo, is_poke: bool): + self.session = session + self.is_poke = is_poke + self.group_id = group_id + self.group_dao = DataAccess(GroupConsole) + self.group_data = None + + async def check_user(self, plugin: PluginInfo): + """全局私聊禁用检测 + + 参数: + plugin: PluginInfo + + 异常: + IgnoredException: 忽略插件 + """ + if plugin.block_type == BlockType.PRIVATE: + if freq.is_send_limit_message(plugin, self.session.user.id, self.is_poke): + try: + await asyncio.wait_for( + send_message(self.session, "该功能在私聊中已被禁用..."), + timeout=DB_TIMEOUT_SECONDS, + ) + except asyncio.TimeoutError: + logger.error("发送消息超时", LOGGER_COMMAND) + raise SkipPluginException( + f"{plugin.name}({plugin.module}) 该插件在私聊中已被禁用..." + ) + + async def check_global(self, plugin: PluginInfo): + """全局状态 + + 参数: + plugin: PluginInfo + + 异常: + IgnoredException: 忽略插件 + """ + start_time = time.time() + try: + if plugin.status or plugin.block_type != BlockType.ALL: + return + """全局状态""" + if self.group_id: + # 使用 DataAccess 的缓存机制 + try: + self.group_data = await asyncio.wait_for( + self.group_dao.safe_get_or_none( + group_id=self.group_id, channel_id__isnull=True + ), + timeout=DB_TIMEOUT_SECONDS, + ) + except asyncio.TimeoutError: + logger.error(f"查询群组数据超时: {self.group_id}", LOGGER_COMMAND) + return # 超时时不阻塞,继续执行 + + if self.group_data and self.group_data.is_super: + raise IsSuperuserException() + + sid = self.group_id or self.session.user.id + if freq.is_send_limit_message(plugin, sid, self.is_poke): + try: + await asyncio.wait_for( + send_message(self.session, "全局未开启此功能...", sid), + timeout=DB_TIMEOUT_SECONDS, + ) + except asyncio.TimeoutError: + logger.error(f"发送消息超时: {sid}", LOGGER_COMMAND) + raise SkipPluginException( + f"{plugin.name}({plugin.module}) 全局未开启此功能..." + ) + finally: + # 记录执行时间 + elapsed = time.time() - start_time + if elapsed > WARNING_THRESHOLD: # 记录耗时超过500ms的检查 + logger.warning( + f"PluginCheck.check_global 耗时: {elapsed:.3f}s", LOGGER_COMMAND + ) + + +async def auth_plugin(plugin: PluginInfo, session: Uninfo, event: Event): + """插件状态 + + 参数: + plugin: PluginInfo + session: Uninfo + event: Event + """ + start_time = time.time() + try: + entity = get_entity_ids(session) + is_poke_event = is_poke(event) + user_check = PluginCheck(entity.group_id, session, is_poke_event) + + if entity.group_id: + group_check = GroupCheck(plugin, entity.group_id, session, is_poke_event) + try: + await asyncio.wait_for( + group_check.check(), timeout=DB_TIMEOUT_SECONDS * 2 + ) + except asyncio.TimeoutError: + logger.error(f"群组检查超时: {entity.group_id}", LOGGER_COMMAND) + # 超时时不阻塞,继续执行 + else: + try: + await asyncio.wait_for( + user_check.check_user(plugin), timeout=DB_TIMEOUT_SECONDS + ) + except asyncio.TimeoutError: + logger.error("用户检查超时", LOGGER_COMMAND) + # 超时时不阻塞,继续执行 + + try: + await asyncio.wait_for( + user_check.check_global(plugin), timeout=DB_TIMEOUT_SECONDS + ) + except asyncio.TimeoutError: + logger.error("全局检查超时", LOGGER_COMMAND) + # 超时时不阻塞,继续执行 + finally: + # 记录总执行时间 + elapsed = time.time() - start_time + if elapsed > WARNING_THRESHOLD: # 记录耗时超过500ms的检查 + logger.warning( + f"auth_plugin 总耗时: {elapsed:.3f}s, 模块: {plugin.module}", + LOGGER_COMMAND, + ) diff --git a/zhenxun/builtin_plugins/hooks/auth/bot_filter.py b/zhenxun/builtin_plugins/hooks/auth/bot_filter.py new file mode 100644 index 00000000..04e47372 --- /dev/null +++ b/zhenxun/builtin_plugins/hooks/auth/bot_filter.py @@ -0,0 +1,35 @@ +import nonebot +from nonebot_plugin_uninfo import Uninfo + +from zhenxun.configs.config import Config + +from .exception import SkipPluginException + +Config.add_plugin_config( + "hook", + "FILTER_BOT", + True, + help="过滤当前连接bot(防止bot互相调用)", + default_value=True, + type=bool, +) + + +def bot_filter(session: Uninfo): + """过滤bot调用bot + + 参数: + session: Uninfo + + 异常: + SkipPluginException: bot互相调用 + """ + if not Config.get_config("hook", "FILTER_BOT"): + return + bot_ids = list(nonebot.get_bots().keys()) + if session.user.id == session.self_id: + return + if session.user.id in bot_ids: + raise SkipPluginException( + f"bot:{session.self_id} 尝试调用 bot:{session.user.id}" + ) diff --git a/zhenxun/builtin_plugins/hooks/auth/config.py b/zhenxun/builtin_plugins/hooks/auth/config.py new file mode 100644 index 00000000..9394b585 --- /dev/null +++ b/zhenxun/builtin_plugins/hooks/auth/config.py @@ -0,0 +1,16 @@ +import sys + +if sys.version_info >= (3, 11): + from enum import StrEnum +else: + from strenum import StrEnum + +LOGGER_COMMAND = "AuthChecker" + + +class SwitchEnum(StrEnum): + ENABLE = "醒来" + DISABLE = "休息吧" + + +WARNING_THRESHOLD = 0.5 # 警告阈值(秒) diff --git a/zhenxun/builtin_plugins/hooks/auth/exception.py b/zhenxun/builtin_plugins/hooks/auth/exception.py new file mode 100644 index 00000000..392a6718 --- /dev/null +++ b/zhenxun/builtin_plugins/hooks/auth/exception.py @@ -0,0 +1,26 @@ +class IsSuperuserException(Exception): + pass + + +class SkipPluginException(Exception): + def __init__(self, info: str, *args: object) -> None: + super().__init__(*args) + self.info = info + + def __str__(self) -> str: + return self.info + + def __repr__(self) -> str: + return self.info + + +class PermissionExemption(Exception): + def __init__(self, info: str, *args: object) -> None: + super().__init__(*args) + self.info = info + + def __str__(self) -> str: + return self.info + + def __repr__(self) -> str: + return self.info diff --git a/zhenxun/builtin_plugins/hooks/auth/utils.py b/zhenxun/builtin_plugins/hooks/auth/utils.py new file mode 100644 index 00000000..0f925590 --- /dev/null +++ b/zhenxun/builtin_plugins/hooks/auth/utils.py @@ -0,0 +1,91 @@ +import contextlib + +from nonebot.adapters import Event +from nonebot_plugin_uninfo import Uninfo + +from zhenxun.configs.config import Config +from zhenxun.models.plugin_info import PluginInfo +from zhenxun.services.log import logger +from zhenxun.utils.enum import PluginType +from zhenxun.utils.message import MessageUtils +from zhenxun.utils.utils import FreqLimiter + +from .config import LOGGER_COMMAND + +base_config = Config.get("hook") + + +def is_poke(event: Event) -> bool: + """判断是否为poke类型 + + 参数: + event: Event + + 返回: + bool: 是否为poke类型 + """ + with contextlib.suppress(ImportError): + from nonebot.adapters.onebot.v11 import PokeNotifyEvent + + return isinstance(event, PokeNotifyEvent) + return False + + +async def send_message( + session: Uninfo, message: list | str, check_tag: str | None = None +): + """发送消息 + + 参数: + session: Uninfo + message: 消息 + check_tag: cd flag + """ + try: + if not check_tag: + await MessageUtils.build_message(message).send(reply_to=True) + elif freq._flmt.check(check_tag): + freq._flmt.start_cd(check_tag) + await MessageUtils.build_message(message).send(reply_to=True) + except Exception as e: + logger.error( + "发送消息失败", + LOGGER_COMMAND, + session=session, + e=e, + ) + + +class FreqUtils: + def __init__(self): + check_notice_info_cd = Config.get_config("hook", "CHECK_NOTICE_INFO_CD") + if check_notice_info_cd is None or check_notice_info_cd < 0: + raise ValueError("模块: [hook], 配置项: [CHECK_NOTICE_INFO_CD] 为空或小于0") + self._flmt = FreqLimiter(check_notice_info_cd) + self._flmt_g = FreqLimiter(check_notice_info_cd) + self._flmt_s = FreqLimiter(check_notice_info_cd) + self._flmt_c = FreqLimiter(check_notice_info_cd) + + def is_send_limit_message( + self, plugin: PluginInfo, sid: str, is_poke: bool + ) -> bool: + """是否发送提示消息 + + 参数: + plugin: PluginInfo + sid: 检测键 + is_poke: 是否是戳一戳 + + 返回: + bool: 是否发送提示消息 + """ + if is_poke: + return False + if not base_config.get("IS_SEND_TIP_MESSAGE"): + return False + if plugin.plugin_type == PluginType.DEPENDANT: + return False + return plugin.module != "ai" if self._flmt_s.check(sid) else False + + +freq = FreqUtils() diff --git a/zhenxun/builtin_plugins/hooks/auth_checker.py b/zhenxun/builtin_plugins/hooks/auth_checker.py new file mode 100644 index 00000000..760b02f4 --- /dev/null +++ b/zhenxun/builtin_plugins/hooks/auth_checker.py @@ -0,0 +1,375 @@ +import asyncio +import time + +from nonebot.adapters import Bot, Event +from nonebot.exception import IgnoredException +from nonebot.matcher import Matcher +from nonebot_plugin_alconna import UniMsg +from nonebot_plugin_uninfo import Uninfo +from tortoise.exceptions import IntegrityError + +from zhenxun.models.plugin_info import PluginInfo +from zhenxun.models.user_console import UserConsole +from zhenxun.services.data_access import DataAccess +from zhenxun.services.log import logger +from zhenxun.utils.enum import GoldHandle, PluginType +from zhenxun.utils.exception import InsufficientGold +from zhenxun.utils.platform import PlatformUtils +from zhenxun.utils.utils import get_entity_ids + +from .auth.auth_admin import auth_admin +from .auth.auth_ban import auth_ban +from .auth.auth_bot import auth_bot +from .auth.auth_cost import auth_cost +from .auth.auth_group import auth_group +from .auth.auth_limit import LimitManager, auth_limit +from .auth.auth_plugin import auth_plugin +from .auth.bot_filter import bot_filter +from .auth.config import LOGGER_COMMAND, WARNING_THRESHOLD +from .auth.exception import ( + IsSuperuserException, + PermissionExemption, + SkipPluginException, +) + +# 超时设置(秒) +TIMEOUT_SECONDS = 5.0 +# 熔断计数器 +CIRCUIT_BREAKERS = { + "auth_ban": {"failures": 0, "threshold": 3, "active": False, "reset_time": 0}, + "auth_bot": {"failures": 0, "threshold": 3, "active": False, "reset_time": 0}, + "auth_group": {"failures": 0, "threshold": 3, "active": False, "reset_time": 0}, + "auth_admin": {"failures": 0, "threshold": 3, "active": False, "reset_time": 0}, + "auth_plugin": {"failures": 0, "threshold": 3, "active": False, "reset_time": 0}, + "auth_limit": {"failures": 0, "threshold": 3, "active": False, "reset_time": 0}, +} +# 熔断重置时间(秒) +CIRCUIT_RESET_TIME = 300 # 5分钟 + + +# 超时装饰器 +async def with_timeout(coro, timeout=TIMEOUT_SECONDS, name=None): + """带超时控制的协程执行 + + 参数: + coro: 要执行的协程 + timeout: 超时时间(秒) + name: 操作名称,用于日志记录 + + 返回: + 协程的返回值,或者在超时时抛出 TimeoutError + """ + try: + return await asyncio.wait_for(coro, timeout=timeout) + except asyncio.TimeoutError: + if name: + logger.error(f"{name} 操作超时 (>{timeout}s)", LOGGER_COMMAND) + # 更新熔断计数器 + if name in CIRCUIT_BREAKERS: + CIRCUIT_BREAKERS[name]["failures"] += 1 + if ( + CIRCUIT_BREAKERS[name]["failures"] + >= CIRCUIT_BREAKERS[name]["threshold"] + and not CIRCUIT_BREAKERS[name]["active"] + ): + CIRCUIT_BREAKERS[name]["active"] = True + CIRCUIT_BREAKERS[name]["reset_time"] = ( + time.time() + CIRCUIT_RESET_TIME + ) + logger.warning( + f"{name} 熔断器已激活,将在 {CIRCUIT_RESET_TIME} 秒后重置", + LOGGER_COMMAND, + ) + raise + + +# 检查熔断状态 +def check_circuit_breaker(name): + """检查熔断器状态 + + 参数: + name: 操作名称 + + 返回: + bool: 是否已熔断 + """ + if name not in CIRCUIT_BREAKERS: + return False + + # 检查是否需要重置熔断器 + if ( + CIRCUIT_BREAKERS[name]["active"] + and time.time() > CIRCUIT_BREAKERS[name]["reset_time"] + ): + CIRCUIT_BREAKERS[name]["active"] = False + CIRCUIT_BREAKERS[name]["failures"] = 0 + logger.info(f"{name} 熔断器已重置", LOGGER_COMMAND) + + return CIRCUIT_BREAKERS[name]["active"] + + +async def get_plugin_and_user( + module: str, user_id: str +) -> tuple[PluginInfo, UserConsole]: + """获取用户数据和插件信息 + + 参数: + module: 模块名 + user_id: 用户id + + 异常: + PermissionExemption: 插件数据不存在 + PermissionExemption: 插件类型为HIDDEN + PermissionExemption: 重复创建用户 + PermissionExemption: 用户数据不存在 + + 返回: + tuple[PluginInfo, UserConsole]: 插件信息,用户信息 + """ + user_dao = DataAccess(UserConsole) + plugin_dao = DataAccess(PluginInfo) + + # 并行查询插件和用户数据 + plugin_task = plugin_dao.safe_get_or_none(module=module) + user_task = user_dao.safe_get_or_none(user_id=user_id) + + try: + plugin, user = await with_timeout( + asyncio.gather(plugin_task, user_task), name="get_plugin_and_user" + ) + except asyncio.TimeoutError: + # 如果并行查询超时,尝试串行查询 + logger.warning("并行查询超时,尝试串行查询", LOGGER_COMMAND) + plugin = await with_timeout( + plugin_dao.safe_get_or_none(module=module), name="get_plugin" + ) + user = await with_timeout( + user_dao.safe_get_or_none(user_id=user_id), name="get_user" + ) + + if not plugin: + raise PermissionExemption(f"插件:{module} 数据不存在,已跳过权限检查...") + if plugin.plugin_type == PluginType.HIDDEN: + raise PermissionExemption( + f"插件: {plugin.name}:{plugin.module} 为HIDDEN,已跳过权限检查..." + ) + user = None + try: + user = await user_dao.safe_get_or_none(user_id=user_id) + except IntegrityError as e: + raise PermissionExemption("重复创建用户,已跳过该次权限检查...") from e + if not user: + raise PermissionExemption("用户数据不存在,已跳过权限检查...") + return plugin, user + + +async def get_plugin_cost( + bot: Bot, user: UserConsole, plugin: PluginInfo, session: Uninfo +) -> int: + """获取插件费用 + + 参数: + bot: Bot + user: 用户数据 + plugin: 插件数据 + session: Uninfo + + 异常: + IsSuperuserException: 超级用户 + IsSuperuserException: 超级用户 + + 返回: + int: 调用插件金币费用 + """ + cost_gold = await with_timeout(auth_cost(user, plugin, session), name="auth_cost") + if session.user.id in bot.config.superusers: + if plugin.plugin_type == PluginType.SUPERUSER: + raise IsSuperuserException() + if not plugin.limit_superuser: + raise IsSuperuserException() + return cost_gold + + +async def reduce_gold(user_id: str, module: str, cost_gold: int, session: Uninfo): + """扣除用户金币 + + 参数: + user_id: 用户id + module: 插件模块名称 + cost_gold: 消耗金币 + session: Uninfo + """ + user_dao = DataAccess(UserConsole) + try: + await with_timeout( + UserConsole.reduce_gold( + user_id, + cost_gold, + GoldHandle.PLUGIN, + module, + PlatformUtils.get_platform(session), + ), + name="reduce_gold", + ) + except InsufficientGold: + if u := await UserConsole.get_user(user_id): + u.gold = 0 + await u.save(update_fields=["gold"]) + except asyncio.TimeoutError: + logger.error( + f"扣除金币超时,用户: {user_id}, 金币: {cost_gold}", + LOGGER_COMMAND, + session=session, + ) + + # 清除缓存,使下次查询时从数据库获取最新数据 + await user_dao.clear_cache(user_id=user_id) + logger.debug(f"调用功能花费金币: {cost_gold}", LOGGER_COMMAND, session=session) + + +# 辅助函数,用于记录每个 hook 的执行时间 +async def time_hook(coro, name, time_dict): + start = time.time() + try: + # 检查熔断状态 + if check_circuit_breaker(name): + logger.info(f"{name} 熔断器激活中,跳过执行", LOGGER_COMMAND) + time_dict[name] = "熔断跳过" + return + + # 添加超时控制 + return await with_timeout(coro, name=name) + except asyncio.TimeoutError: + time_dict[name] = f"超时 (>{TIMEOUT_SECONDS}s)" + finally: + if name not in time_dict: + time_dict[name] = f"{time.time() - start:.3f}s" + + +async def auth( + matcher: Matcher, + event: Event, + bot: Bot, + session: Uninfo, + message: UniMsg, +): + """权限检查 + + 参数: + matcher: matcher + event: Event + bot: bot + session: Uninfo + message: UniMsg + """ + start_time = time.time() + cost_gold = 0 + ignore_flag = False + entity = get_entity_ids(session) + module = matcher.plugin_name or "" + + # 用于记录各个 hook 的执行时间 + hook_times = {} + hooks_time = 0 # 初始化 hooks_time 变量 + + try: + if not module: + raise PermissionExemption("Matcher插件名称不存在...") + + # 获取插件和用户数据 + plugin_user_start = time.time() + try: + plugin, user = await with_timeout( + get_plugin_and_user(module, entity.user_id), name="get_plugin_and_user" + ) + hook_times["get_plugin_user"] = f"{time.time() - plugin_user_start:.3f}s" + except asyncio.TimeoutError: + logger.error( + f"获取插件和用户数据超时,模块: {module}", + LOGGER_COMMAND, + session=session, + ) + raise PermissionExemption("获取插件和用户数据超时,请稍后再试...") + + # 获取插件费用 + cost_start = time.time() + try: + cost_gold = await with_timeout( + get_plugin_cost(bot, user, plugin, session), name="get_plugin_cost" + ) + hook_times["cost_gold"] = f"{time.time() - cost_start:.3f}s" + except asyncio.TimeoutError: + logger.error( + f"获取插件费用超时,模块: {module}", LOGGER_COMMAND, session=session + ) + # 继续执行,不阻止权限检查 + + # 执行 bot_filter + bot_filter(session) + + # 并行执行所有 hook 检查,并记录执行时间 + hooks_start = time.time() + + # 创建所有 hook 任务 + hook_tasks = [ + time_hook(auth_ban(matcher, bot, session), "auth_ban", hook_times), + time_hook(auth_bot(plugin, bot.self_id), "auth_bot", hook_times), + time_hook(auth_group(plugin, entity, message), "auth_group", hook_times), + time_hook(auth_admin(plugin, session), "auth_admin", hook_times), + time_hook(auth_plugin(plugin, session, event), "auth_plugin", hook_times), + time_hook(auth_limit(plugin, session), "auth_limit", hook_times), + ] + + # 使用 gather 并行执行所有 hook,但添加总体超时控制 + try: + await with_timeout( + asyncio.gather(*hook_tasks), + timeout=TIMEOUT_SECONDS * 2, # 给总体执行更多时间 + name="auth_hooks_gather", + ) + except asyncio.TimeoutError: + logger.error( + f"权限检查 hooks 总体执行超时,模块: {module}", + LOGGER_COMMAND, + session=session, + ) + # 不抛出异常,允许继续执行 + + hooks_time = time.time() - hooks_start + + except SkipPluginException as e: + LimitManager.unblock(module, entity.user_id, entity.group_id, entity.channel_id) + logger.info(str(e), LOGGER_COMMAND, session=session) + ignore_flag = True + except IsSuperuserException: + logger.debug("超级用户跳过权限检测...", LOGGER_COMMAND, session=session) + except PermissionExemption as e: + logger.info(str(e), LOGGER_COMMAND, session=session) + + # 扣除金币 + if not ignore_flag and cost_gold > 0: + gold_start = time.time() + try: + await with_timeout( + reduce_gold(entity.user_id, module, cost_gold, session), + name="reduce_gold", + ) + hook_times["reduce_gold"] = f"{time.time() - gold_start:.3f}s" + except asyncio.TimeoutError: + logger.error( + f"扣除金币超时,模块: {module}", LOGGER_COMMAND, session=session + ) + + # 记录总执行时间 + total_time = time.time() - start_time + if total_time > WARNING_THRESHOLD: # 如果总时间超过500ms,记录详细信息 + logger.warning( + f"权限检查耗时过长: {total_time:.3f}s, 模块: {module}, " + f"hooks时间: {hooks_time:.3f}s, " + f"详情: {hook_times}", + LOGGER_COMMAND, + session=session, + ) + + if ignore_flag: + raise IgnoredException("权限检测 ignore") diff --git a/zhenxun/builtin_plugins/hooks/auth_hook.py b/zhenxun/builtin_plugins/hooks/auth_hook.py index 0ccca75c..34ea8018 100644 --- a/zhenxun/builtin_plugins/hooks/auth_hook.py +++ b/zhenxun/builtin_plugins/hooks/auth_hook.py @@ -1,41 +1,43 @@ -from nonebot.adapters.onebot.v11 import Bot, Event +import time + +from nonebot.adapters import Bot, Event from nonebot.matcher import Matcher from nonebot.message import run_postprocessor, run_preprocessor from nonebot_plugin_alconna import UniMsg -from nonebot_plugin_session import EventSession +from nonebot_plugin_uninfo import Uninfo -from ._auth_checker import LimitManage, checker +from zhenxun.services.log import logger + +from .auth.config import LOGGER_COMMAND +from .auth_checker import LimitManager, auth # # 权限检测 @run_preprocessor -async def _( - matcher: Matcher, event: Event, bot: Bot, session: EventSession, message: UniMsg -): - await checker.auth( +async def _(matcher: Matcher, event: Event, bot: Bot, session: Uninfo, message: UniMsg): + start_time = time.time() + await auth( matcher, event, bot, session, message, ) + logger.debug(f"权限检测耗时:{time.time() - start_time}秒", LOGGER_COMMAND) # 解除命令block阻塞 @run_postprocessor -async def _( - matcher: Matcher, - exception: Exception | None, - bot: Bot, - event: Event, - session: EventSession, -): - user_id = session.id1 - group_id = session.id3 - channel_id = session.id2 - if not group_id: - group_id = channel_id - channel_id = None +async def _(matcher: Matcher, session: Uninfo): + user_id = session.user.id + group_id = None + channel_id = None + if session.group: + if session.group.parent: + group_id = session.group.parent.id + channel_id = session.group.id + else: + group_id = session.group.id if user_id and matcher.plugin: module = matcher.plugin.name - LimitManage.unblock(module, user_id, group_id, channel_id) + LimitManager.unblock(module, user_id, group_id, channel_id) diff --git a/zhenxun/builtin_plugins/hooks/ban_hook.py b/zhenxun/builtin_plugins/hooks/ban_hook.py deleted file mode 100644 index 66dfb11c..00000000 --- a/zhenxun/builtin_plugins/hooks/ban_hook.py +++ /dev/null @@ -1,84 +0,0 @@ -from nonebot.adapters import Bot, Event -from nonebot.exception import IgnoredException -from nonebot.matcher import Matcher -from nonebot.message import run_preprocessor -from nonebot.typing import T_State -from nonebot_plugin_alconna import At -from nonebot_plugin_session import EventSession - -from zhenxun.configs.config import Config -from zhenxun.models.ban_console import BanConsole -from zhenxun.models.group_console import GroupConsole -from zhenxun.services.log import logger -from zhenxun.utils.enum import PluginType -from zhenxun.utils.message import MessageUtils -from zhenxun.utils.utils import FreqLimiter - -Config.add_plugin_config( - "hook", - "BAN_RESULT", - "才不会给你发消息.", - help="对被ban用户发送的消息", -) - -_flmt = FreqLimiter(300) - - -# 检查是否被ban -@run_preprocessor -async def _( - matcher: Matcher, bot: Bot, event: Event, state: T_State, session: EventSession -): - extra = {} - if plugin := matcher.plugin: - if metadata := plugin.metadata: - extra = metadata.extra - if extra.get("plugin_type") in [PluginType.HIDDEN]: - return - user_id = session.id1 - group_id = session.id3 or session.id2 - if group_id: - if user_id in bot.config.superusers: - return - if await BanConsole.is_ban(None, group_id): - logger.debug("群组处于黑名单中...", "ban_hook") - raise IgnoredException("群组处于黑名单中...") - if g := await GroupConsole.get_group(group_id): - if g.level < 0: - logger.debug("群黑名单, 群权限-1...", "ban_hook") - raise IgnoredException("群黑名单, 群权限-1..") - if user_id: - ban_result = Config.get_config("hook", "BAN_RESULT") - if user_id in bot.config.superusers: - return - if await BanConsole.is_ban(user_id, group_id): - time = await BanConsole.check_ban_time(user_id, group_id) - if time == -1: - time_str = "∞" - else: - time = abs(int(time)) - if time < 60: - time_str = f"{time!s} 秒" - else: - minute = int(time / 60) - if minute > 60: - hours = minute // 60 - minute %= 60 - time_str = f"{hours} 小时 {minute}分钟" - else: - time_str = f"{minute} 分钟" - if ( - not extra.get("ignore_prompt") - and time != -1 - and ban_result - and _flmt.check(user_id) - ): - _flmt.start_cd(user_id) - await MessageUtils.build_message( - [ - At(flag="user", target=user_id), - f"{ban_result}\n在..在 {time_str} 后才会理你喔", - ] - ).send() - logger.debug("用户处于黑名单中...", "ban_hook") - raise IgnoredException("用户处于黑名单中...") diff --git a/zhenxun/builtin_plugins/hooks/call_hook.py b/zhenxun/builtin_plugins/hooks/call_hook.py index 1893754d..1695a48e 100644 --- a/zhenxun/builtin_plugins/hooks/call_hook.py +++ b/zhenxun/builtin_plugins/hooks/call_hook.py @@ -9,6 +9,8 @@ from zhenxun.utils.enum import BotSentType from zhenxun.utils.manager.message_manager import MessageManager from zhenxun.utils.platform import PlatformUtils +LOG_COMMAND = "MessageHook" + def replace_message(message: Message) -> str: """将消息中的at、image、record、face替换为字符串 @@ -54,11 +56,11 @@ async def handle_api_result( if user_id and message_id: MessageManager.add(str(user_id), str(message_id)) logger.debug( - f"收集消息id,user_id: {user_id}, msg_id: {message_id}", "msg_hook" + f"收集消息id,user_id: {user_id}, msg_id: {message_id}", LOG_COMMAND ) except Exception as e: logger.warning( - f"收集消息id发生错误...data: {data}, result: {result}", "msg_hook", e=e + f"收集消息id发生错误...data: {data}, result: {result}", LOG_COMMAND, e=e ) if not Config.get_config("hook", "RECORD_BOT_SENT_MESSAGES"): return @@ -80,6 +82,6 @@ async def handle_api_result( except Exception as e: logger.warning( f"消息发送记录发生错误...data: {data}, result: {result}", - "msg_hook", + LOG_COMMAND, e=e, ) diff --git a/zhenxun/builtin_plugins/init/__init__.py b/zhenxun/builtin_plugins/init/__init__.py index 3d97a47c..1bc259fc 100644 --- a/zhenxun/builtin_plugins/init/__init__.py +++ b/zhenxun/builtin_plugins/init/__init__.py @@ -4,15 +4,27 @@ import nonebot from nonebot.adapters import Bot from zhenxun.models.group_console import GroupConsole +from zhenxun.services.cache import CacheException from zhenxun.services.log import logger +from zhenxun.utils.manager.priority_manager import PriorityLifecycle from zhenxun.utils.platform import PlatformUtils nonebot.load_plugins(str(Path(__file__).parent.resolve())) +try: + from .__init_cache import register_cache_types +except CacheException as e: + raise SystemError(f"ERROR:{e}") driver = nonebot.get_driver() +@PriorityLifecycle.on_startup(priority=5) +async def _(): + register_cache_types() + logger.info("缓存类型注册完成") + + @driver.on_bot_connect async def _(bot: Bot): """将bot已存在的群组添加群认证 diff --git a/zhenxun/builtin_plugins/init/__init_cache.py b/zhenxun/builtin_plugins/init/__init_cache.py new file mode 100644 index 00000000..29653e12 --- /dev/null +++ b/zhenxun/builtin_plugins/init/__init_cache.py @@ -0,0 +1,35 @@ +""" +缓存初始化模块 + +负责注册各种缓存类型,实现按需缓存机制 +""" + +from zhenxun.models.ban_console import BanConsole +from zhenxun.models.bot_console import BotConsole +from zhenxun.models.group_console import GroupConsole +from zhenxun.models.level_user import LevelUser +from zhenxun.models.plugin_info import PluginInfo +from zhenxun.models.user_console import UserConsole +from zhenxun.services.cache import CacheRegistry, cache_config +from zhenxun.services.cache.config import CacheMode +from zhenxun.services.log import logger +from zhenxun.utils.enum import CacheType + + +# 注册缓存类型 +def register_cache_types(): + """注册所有缓存类型""" + CacheRegistry.register(CacheType.PLUGINS, PluginInfo) + CacheRegistry.register(CacheType.GROUPS, GroupConsole) + CacheRegistry.register(CacheType.BOT, BotConsole) + CacheRegistry.register(CacheType.USERS, UserConsole) + CacheRegistry.register( + CacheType.LEVEL, LevelUser, key_format="{user_id}_{group_id}" + ) + CacheRegistry.register(CacheType.BAN, BanConsole, key_format="{user_id}_{group_id}") + + if cache_config.cache_mode == CacheMode.NONE: + logger.info("缓存功能已禁用,将直接从数据库获取数据") + else: + logger.info(f"已注册所有缓存类型,缓存模式: {cache_config.cache_mode}") + logger.info("使用增量缓存模式,数据将按需加载到缓存中") diff --git a/zhenxun/builtin_plugins/init/init_plugin.py b/zhenxun/builtin_plugins/init/init_plugin.py index 5bf50409..95433909 100644 --- a/zhenxun/builtin_plugins/init/init_plugin.py +++ b/zhenxun/builtin_plugins/init/init_plugin.py @@ -1,3 +1,5 @@ +import asyncio + import aiofiles import nonebot from nonebot import get_loaded_plugins @@ -112,24 +114,29 @@ async def _(): await _handle_setting(plugin, plugin_list, limit_list) create_list = [] update_list = [] + update_task_list = [] for plugin in plugin_list: if plugin.module_path not in module2id: create_list.append(plugin) else: plugin.id = module2id[plugin.module_path] - await plugin.save( - update_fields=[ - "name", - "author", - "version", - "admin_level", - "plugin_type", - "is_show", - ] + update_task_list.append( + plugin.save( + update_fields=[ + "name", + "author", + "version", + "admin_level", + "plugin_type", + "is_show", + ] + ) ) update_list.append(plugin) if create_list: await PluginInfo.bulk_create(create_list, 10) + if update_task_list: + await asyncio.gather(*update_task_list) # if update_list: # # TODO: 批量更新无法更新plugin_type: tortoise.exceptions.OperationalError: # column "superuser" does not exist diff --git a/zhenxun/builtin_plugins/init/manager.py b/zhenxun/builtin_plugins/init/manager.py index d6ffa223..9fab6a1d 100644 --- a/zhenxun/builtin_plugins/init/manager.py +++ b/zhenxun/builtin_plugins/init/manager.py @@ -205,7 +205,7 @@ class Manager: self.cd_data: dict[str, PluginCdBlock] = {} if self.cd_file.exists(): with open(self.cd_file, encoding="utf8") as f: - temp = _yaml.load(f) + temp = _yaml.load(f) or {} if "PluginCdLimit" in temp.keys(): for k, v in temp["PluginCdLimit"].items(): if "." in k: @@ -216,7 +216,7 @@ class Manager: self.block_data: dict[str, BaseBlock] = {} if self.block_file.exists(): with open(self.block_file, encoding="utf8") as f: - temp = _yaml.load(f) + temp = _yaml.load(f) or {} if "PluginBlockLimit" in temp.keys(): for k, v in temp["PluginBlockLimit"].items(): if "." in k: @@ -227,7 +227,7 @@ class Manager: self.count_data: dict[str, PluginCountBlock] = {} if self.count_file.exists(): with open(self.count_file, encoding="utf8") as f: - temp = _yaml.load(f) + temp = _yaml.load(f) or {} if "PluginCountLimit" in temp.keys(): for k, v in temp["PluginCountLimit"].items(): if "." in k: diff --git a/zhenxun/builtin_plugins/platform/qq/group_handle/data_source.py b/zhenxun/builtin_plugins/platform/qq/group_handle/data_source.py index 1190fb5e..9e8d7ea2 100644 --- a/zhenxun/builtin_plugins/platform/qq/group_handle/data_source.py +++ b/zhenxun/builtin_plugins/platform/qq/group_handle/data_source.py @@ -55,15 +55,17 @@ class GroupManager: if plugin_list := await PluginInfo.filter(default_status=False).all(): for plugin in plugin_list: block_plugin += f"<{plugin.module}," - group_info = await bot.get_group_info(group_id=group_id, no_cache=True) - await GroupConsole.create( + group_info = await bot.get_group_info(group_id=group_id) + await GroupConsole.update_or_create( group_id=group_info["group_id"], - group_name=group_info["group_name"], - max_member_count=group_info["max_member_count"], - member_count=group_info["member_count"], - group_flag=1, - block_plugin=block_plugin, - platform="qq", + defaults={ + "group_name": group_info["group_name"], + "max_member_count": group_info["max_member_count"], + "member_count": group_info["member_count"], + "group_flag": 1, + "block_plugin": block_plugin, + "platform": "qq", + }, ) @classmethod @@ -145,7 +147,7 @@ class GroupManager: e=e, ) raise ForceAddGroupError("强制拉群或未有群信息,退出群聊失败...") from e - await GroupConsole.filter(group_id=group_id).delete() + # await GroupConsole.filter(group_id=group_id).delete() raise ForceAddGroupError(f"触发强制入群保护,已成功退出群聊 {group_id}...") else: await cls.__handle_add_group(bot, group_id, group) diff --git a/zhenxun/builtin_plugins/platform/qq_api/ug_watch.py b/zhenxun/builtin_plugins/platform/qq_api/ug_watch.py index 4e7a708c..4435e880 100644 --- a/zhenxun/builtin_plugins/platform/qq_api/ug_watch.py +++ b/zhenxun/builtin_plugins/platform/qq_api/ug_watch.py @@ -1,4 +1,4 @@ -from nonebot.message import run_preprocessor +from nonebot import on_message from nonebot_plugin_uninfo import Uninfo from zhenxun.models.friend_user import FriendUser @@ -8,24 +8,27 @@ from zhenxun.services.log import logger from zhenxun.utils.platform import PlatformUtils -@run_preprocessor -async def do_something(session: Uninfo): +def rule(session: Uninfo) -> bool: + return PlatformUtils.is_qbot(session) + + +_matcher = on_message(priority=999, block=False, rule=rule) + + +@_matcher.handle() +async def _(session: Uninfo): platform = PlatformUtils.get_platform(session) if session.group: if not await GroupConsole.exists(group_id=session.group.id): await GroupConsole.create(group_id=session.group.id) - logger.info("添加当前群组ID信息" "", session=session) - - if not await GroupInfoUser.exists( - user_id=session.user.id, group_id=session.group.id - ): - await GroupInfoUser.create( - user_id=session.user.id, group_id=session.group.id, platform=platform - ) - logger.info("添加当前用户群组ID信息", "", session=session) + logger.info("添加当前群组ID信息", session=session) + await GroupInfoUser.update_or_create( + user_id=session.user.id, + group_id=session.group.id, + platform=PlatformUtils.get_platform(session), + ) elif not await FriendUser.exists(user_id=session.user.id, platform=platform): - try: - await FriendUser.create(user_id=session.user.id, platform=platform) - logger.info("添加当前好友用户信息", "", session=session) - except Exception as e: - logger.error("添加当前好友用户信息失败", session=session, e=e) + await FriendUser.create( + user_id=session.user.id, platform=PlatformUtils.get_platform(session) + ) + logger.info("添加当前好友用户信息", "", session=session) diff --git a/zhenxun/builtin_plugins/scripts.py b/zhenxun/builtin_plugins/scripts.py deleted file mode 100644 index b5fca300..00000000 --- a/zhenxun/builtin_plugins/scripts.py +++ /dev/null @@ -1,30 +0,0 @@ -from zhenxun.models.group_console import GroupConsole -from zhenxun.utils.manager.priority_manager import PriorityLifecycle - - -@PriorityLifecycle.on_startup(priority=5) -async def _(): - """开启/禁用插件格式修改""" - _, is_create = await GroupConsole.get_or_create(group_id=133133133) - """标记""" - if is_create: - data_list = [] - for group in await GroupConsole.all(): - if group.block_plugin: - if modules := group.block_plugin.split(","): - block_plugin = "".join( - (f"{module}," if module.startswith("<") else f"<{module},") - for module in modules - if module.strip() - ) - group.block_plugin = block_plugin.replace("<,", "") - if group.block_task: - if modules := group.block_task.split(","): - block_task = "".join( - (f"{module}," if module.startswith("<") else f"<{module},") - for module in modules - if module.strip() - ) - group.block_task = block_task.replace("<,", "") - data_list.append(group) - await GroupConsole.bulk_update(data_list, ["block_plugin", "block_task"], 10) diff --git a/zhenxun/builtin_plugins/statistics/_data_source.py b/zhenxun/builtin_plugins/statistics/_data_source.py index 2ceb4590..81e2b035 100644 --- a/zhenxun/builtin_plugins/statistics/_data_source.py +++ b/zhenxun/builtin_plugins/statistics/_data_source.py @@ -44,9 +44,7 @@ class StatisticsManage: title = f"{user.user_name if user else user_id} {day_type}功能调用统计" elif group_id: """查群组""" - group = await GroupConsole.get_or_none( - group_id=group_id, channel_id__isnull=True - ) + group = await GroupConsole.get_group(group_id=group_id) title = f"{group.group_name if group else group_id} {day_type}功能调用统计" else: title = "功能调用统计" diff --git a/zhenxun/builtin_plugins/superuser/group_manage.py b/zhenxun/builtin_plugins/superuser/group_manage.py index fb8c0d2e..b2f77f47 100644 --- a/zhenxun/builtin_plugins/superuser/group_manage.py +++ b/zhenxun/builtin_plugins/superuser/group_manage.py @@ -163,7 +163,7 @@ async def _(session: EventSession, arparma: Arparma, state: T_State, level: int) @_matcher.assign("super-handle", parameterless=[CheckGroupId()]) async def _(session: EventSession, arparma: Arparma, state: T_State): gid = state["group_id"] - group = await GroupConsole.get_or_none(group_id=gid) + group = await GroupConsole.get_group(group_id=gid) if not group: await MessageUtils.build_message("群组信息不存在, 请更新群组信息...").finish() s = "删除" if arparma.find("delete") else "添加" @@ -177,7 +177,9 @@ async def _(session: EventSession, arparma: Arparma, state: T_State): async def _(session: EventSession, arparma: Arparma, state: T_State): gid = state["group_id"] await GroupConsole.update_or_create( - group_id=gid, defaults={"group_flag": 0 if arparma.find("delete") else 1} + group_id=gid, + channel_id__isnull=True, + defaults={"group_flag": 0 if arparma.find("delete") else 1}, ) s = "删除" if arparma.find("delete") else "添加" await MessageUtils.build_message(f"{s}群认证成功!").send(reply_to=True) diff --git a/zhenxun/builtin_plugins/web_ui/api/tabs/main/data_source.py b/zhenxun/builtin_plugins/web_ui/api/tabs/main/data_source.py index e87647dd..2a783b22 100644 --- a/zhenxun/builtin_plugins/web_ui/api/tabs/main/data_source.py +++ b/zhenxun/builtin_plugins/web_ui/api/tabs/main/data_source.py @@ -119,7 +119,7 @@ class ApiDataSource: (await PlatformUtils.get_friend_list(select_bot.bot))[0] ) except Exception as e: - logger.warning("获取bot好友/群组信息失败...", "WebUi", e=e) + logger.warning("获取bot好友/群组数量失败...", "WebUi", e=e) select_bot.group_count = 0 select_bot.friend_count = 0 select_bot.status = await BotConsole.get_bot_status(select_bot.self_id) diff --git a/zhenxun/builtin_plugins/web_ui/api/tabs/manage/data_source.py b/zhenxun/builtin_plugins/web_ui/api/tabs/manage/data_source.py index 39de7736..0b068e17 100644 --- a/zhenxun/builtin_plugins/web_ui/api/tabs/manage/data_source.py +++ b/zhenxun/builtin_plugins/web_ui/api/tabs/manage/data_source.py @@ -250,7 +250,7 @@ class ApiDataSource: 返回: GroupDetail | None: 群组详情数据 """ - group = await GroupConsole.get_or_none(group_id=group_id) + group = await GroupConsole.get_group(group_id=group_id) if not group: return None like_plugin = await cls.__get_group_detail_like_plugin(group_id) diff --git a/zhenxun/builtin_plugins/web_ui/api/tabs/system/__init__.py b/zhenxun/builtin_plugins/web_ui/api/tabs/system/__init__.py index 949a69de..778ca846 100644 --- a/zhenxun/builtin_plugins/web_ui/api/tabs/system/__init__.py +++ b/zhenxun/builtin_plugins/web_ui/api/tabs/system/__init__.py @@ -45,6 +45,7 @@ async def _(path: str | None = None) -> Result[list[DirFile]]: mtime=file_path.stat().st_mtime, ) ) + data_list.sort(key=lambda f: f.name) return Result.ok(data_list) except Exception as e: return Result.fail(f"获取文件列表失败: {e!s}") diff --git a/zhenxun/configs/config.py b/zhenxun/configs/config.py index 83937201..9f4b5229 100644 --- a/zhenxun/configs/config.py +++ b/zhenxun/configs/config.py @@ -13,8 +13,8 @@ class BotSetting(BaseModel): """回复时NICKNAME""" system_proxy: str | None = None """系统代理""" - db_url: str = "" - """数据库链接""" + db_url: str = "sqlite:data/zhenxun.db" + """数据库链接, 默认值为sqlite:data/zhenxun.db""" platform_superusers: dict[str, list[str]] = Field(default_factory=dict) """平台超级用户""" qbot_id_data: dict[str, str] = Field(default_factory=dict) diff --git a/zhenxun/configs/utils/models.py b/zhenxun/configs/utils/models.py index bb51a6c5..bc850cc4 100644 --- a/zhenxun/configs/utils/models.py +++ b/zhenxun/configs/utils/models.py @@ -155,8 +155,6 @@ class AICallableProperties(BaseModel): """参数类型""" description: str """参数描述""" - enums: list[str] | None = None - """参数枚举""" class AICallableParam(BaseModel): diff --git a/zhenxun/models/ban_console.py b/zhenxun/models/ban_console.py index 39907ff0..a6f5c3bb 100644 --- a/zhenxun/models/ban_console.py +++ b/zhenxun/models/ban_console.py @@ -1,10 +1,12 @@ import time +from typing import ClassVar from typing_extensions import Self from tortoise import fields from zhenxun.services.db_context import Model from zhenxun.services.log import logger +from zhenxun.utils.enum import CacheType, DbLockType from zhenxun.utils.exception import UserAndGroupIsNone @@ -27,6 +29,15 @@ class BanConsole(Model): class Meta: # pyright: ignore [reportIncompatibleVariableOverride] table = "ban_console" table_description = "封禁人员/群组数据表" + unique_together = ("user_id", "group_id") + indexes = [("user_id",), ("group_id",)] # noqa: RUF012 + + cache_type = CacheType.BAN + """缓存类型""" + cache_key_field = ("user_id", "group_id") + """缓存键字段""" + enable_lock: ClassVar[list[DbLockType]] = [DbLockType.CREATE, DbLockType.UPSERT] + """开启锁""" @classmethod async def _get_data(cls, user_id: str | None, group_id: str | None) -> Self | None: @@ -46,12 +57,12 @@ class BanConsole(Model): raise UserAndGroupIsNone() if user_id: return ( - await cls.get_or_none(user_id=user_id, group_id=group_id) + await cls.safe_get_or_none(user_id=user_id, group_id=group_id) if group_id - else await cls.get_or_none(user_id=user_id, group_id__isnull=True) + else await cls.safe_get_or_none(user_id=user_id, group_id__isnull=True) ) else: - return await cls.get_or_none(user_id="", group_id=group_id) + return await cls.safe_get_or_none(user_id="", group_id=group_id) @classmethod async def check_ban_level( @@ -167,3 +178,32 @@ class BanConsole(Model): await user.delete() return True return False + + @classmethod + async def get_ban( + cls, + *, + id: int | None = None, + user_id: str | None = None, + group_id: str | None = None, + ) -> Self | None: + """安全地获取ban记录 + + 参数: + id: 记录id + user_id: 用户id + group_id: 群组id + + 返回: + Self | None: ban记录 + """ + if id is not None: + return await cls.safe_get_or_none(id=id) + return await cls._get_data(user_id, group_id) + + @classmethod + async def _run_script(cls): + return [ + "CREATE INDEX idx_ban_console_user_id ON ban_console(user_id);", + "CREATE INDEX idx_ban_console_group_id ON ban_console(group_id);", + ] diff --git a/zhenxun/models/bot_console.py b/zhenxun/models/bot_console.py index 30e981ef..01a93535 100644 --- a/zhenxun/models/bot_console.py +++ b/zhenxun/models/bot_console.py @@ -3,6 +3,7 @@ from typing import Literal, overload from tortoise import fields from zhenxun.services.db_context import Model +from zhenxun.utils.enum import CacheType class BotConsole(Model): @@ -29,6 +30,11 @@ class BotConsole(Model): table = "bot_console" table_description = "Bot数据表" + cache_type = CacheType.BOT + """缓存类型""" + cache_key_field = "bot_id" + """缓存键字段""" + @staticmethod def format(name: str) -> str: return f"<{name}," diff --git a/zhenxun/models/group_console.py b/zhenxun/models/group_console.py index 08406fa7..ad36dccd 100644 --- a/zhenxun/models/group_console.py +++ b/zhenxun/models/group_console.py @@ -1,4 +1,4 @@ -from typing import Any, cast, overload +from typing import Any, ClassVar, cast, overload from typing_extensions import Self from tortoise import fields @@ -6,8 +6,9 @@ from tortoise.backends.base.client import BaseDBAsyncClient from zhenxun.models.plugin_info import PluginInfo from zhenxun.models.task_info import TaskInfo +from zhenxun.services.cache import CacheRoot from zhenxun.services.db_context import Model -from zhenxun.utils.enum import PluginType +from zhenxun.utils.enum import CacheType, DbLockType, PluginType def add_disable_marker(name: str) -> str: @@ -86,6 +87,16 @@ class GroupConsole(Model): table = "group_console" table_description = "群组信息表" unique_together = ("group_id", "channel_id") + indexes = [ # noqa: RUF012 + ("group_id",) + ] + + cache_type = CacheType.GROUPS + """缓存类型""" + cache_key_field = ("group_id", "channel_id") + """缓存键字段""" + enable_lock: ClassVar[list[DbLockType]] = [DbLockType.CREATE, DbLockType.UPSERT] + """开启锁""" @classmethod async def _get_task_modules(cls, *, default_status: bool) -> list[str]: @@ -116,6 +127,18 @@ class GroupConsole(Model): ).values_list("module", flat=True), ) + @classmethod + async def _update_cache(cls, instance): + """更新缓存 + + 参数: + instance: 需要更新缓存的实例 + """ + if cache_type := cls.get_cache_type(): + key = cls.get_cache_key(instance) + if key is not None: + await CacheRoot.invalidate_cache(cache_type, key) + @classmethod async def create( cls, using_db: BaseDBAsyncClient | None = None, **kwargs: Any @@ -129,6 +152,9 @@ class GroupConsole(Model): if task_modules or plugin_modules: await cls._update_modules(group, task_modules, plugin_modules, using_db) + # 更新缓存 + await cls._update_cache(group) + return group @classmethod @@ -180,6 +206,10 @@ class GroupConsole(Model): if task_modules or plugin_modules: await cls._update_modules(group, task_modules, plugin_modules, using_db) + # 更新缓存 + if is_create: + await cls._update_cache(group) + return group, is_create @classmethod @@ -202,24 +232,39 @@ class GroupConsole(Model): if task_modules or plugin_modules: await cls._update_modules(group, task_modules, plugin_modules, using_db) + # 更新缓存 + await cls._update_cache(group) + return group, is_create @classmethod async def get_group( - cls, group_id: str, channel_id: str | None = None + cls, + group_id: str, + channel_id: str | None = None, + clean_duplicates: bool = True, ) -> Self | None: """获取群组 参数: group_id: 群组id - channel_id: 频道id. + channel_id: 频道id + clean_duplicates: 是否删除重复的记录,仅保留最新的 返回: Self: GroupConsole """ if channel_id: - return await cls.get_or_none(group_id=group_id, channel_id=channel_id) - return await cls.get_or_none(group_id=group_id, channel_id__isnull=True) + return await cls.safe_get_or_none( + group_id=group_id, + channel_id=channel_id, + clean_duplicates=clean_duplicates, + ) + return await cls.safe_get_or_none( + group_id=group_id, + channel_id__isnull=True, + clean_duplicates=clean_duplicates, + ) @classmethod async def is_super_group(cls, group_id: str) -> bool: @@ -303,6 +348,9 @@ class GroupConsole(Model): if update_fields: await group.save(update_fields=update_fields) + # 更新缓存 + await cls._update_cache(group) + @classmethod async def set_unblock_plugin( cls, @@ -339,6 +387,9 @@ class GroupConsole(Model): if update_fields: await group.save(update_fields=update_fields) + # 更新缓存 + await cls._update_cache(group) + @classmethod async def is_normal_block_plugin( cls, group_id: str, module: str, channel_id: str | None = None @@ -442,6 +493,9 @@ class GroupConsole(Model): if update_fields: await group.save(update_fields=update_fields) + # 更新缓存 + await cls._update_cache(group) + @classmethod async def set_unblock_task( cls, @@ -476,6 +530,9 @@ class GroupConsole(Model): if update_fields: await group.save(update_fields=update_fields) + # 更新缓存 + await cls._update_cache(group) + @classmethod def _run_script(cls): return [ @@ -483,4 +540,6 @@ class GroupConsole(Model): " character varying(255) NOT NULL DEFAULT '';", "ALTER TABLE group_console ADD superuser_block_task" " character varying(255) NOT NULL DEFAULT '';", + "CREATE INDEX idx_group_console_group_id ON group_console(group_id);", + "CREATE INDEX idx_group_console_group_null_channel ON group_console(group_id) WHERE channel_id IS NULL;", # 单独创建channel为空的索引 # noqa: E501 ] diff --git a/zhenxun/models/level_user.py b/zhenxun/models/level_user.py index 4269f315..644c38d3 100644 --- a/zhenxun/models/level_user.py +++ b/zhenxun/models/level_user.py @@ -1,6 +1,7 @@ from tortoise import fields from zhenxun.services.db_context import Model +from zhenxun.utils.enum import CacheType class LevelUser(Model): @@ -20,6 +21,11 @@ class LevelUser(Model): table_description = "用户权限数据库" unique_together = ("user_id", "group_id") + cache_type = CacheType.LEVEL + """缓存类型""" + cache_key_field = ("user_id", "group_id") + """缓存键字段""" + @classmethod async def get_user_level(cls, user_id: str, group_id: str | None) -> int: """获取用户在群内的等级 @@ -53,6 +59,9 @@ class LevelUser(Model): level: 权限等级 group_flag: 是否被自动更新刷新权限 0:是, 1:否. """ + if await cls.exists(user_id=user_id, group_id=group_id, user_level=level): + # 权限相同时跳过 + return await cls.update_or_create( user_id=user_id, group_id=group_id, diff --git a/zhenxun/models/plugin_info.py b/zhenxun/models/plugin_info.py index 862aea8c..177ab70e 100644 --- a/zhenxun/models/plugin_info.py +++ b/zhenxun/models/plugin_info.py @@ -4,7 +4,7 @@ from tortoise import fields from zhenxun.models.plugin_limit import PluginLimit # noqa: F401 from zhenxun.services.db_context import Model -from zhenxun.utils.enum import BlockType, PluginType +from zhenxun.utils.enum import BlockType, CacheType, PluginType class PluginInfo(Model): @@ -59,6 +59,11 @@ class PluginInfo(Model): table = "plugin_info" table_description = "插件基本信息" + cache_type = CacheType.PLUGINS + """缓存类型""" + cache_key_field = "module" + """缓存键字段""" + @classmethod async def get_plugin( cls, load_status: bool = True, filter_parent: bool = True, **kwargs diff --git a/zhenxun/models/user_console.py b/zhenxun/models/user_console.py index b590a802..27cd582f 100644 --- a/zhenxun/models/user_console.py +++ b/zhenxun/models/user_console.py @@ -2,7 +2,7 @@ from tortoise import fields from zhenxun.models.goods_info import GoodsInfo from zhenxun.services.db_context import Model -from zhenxun.utils.enum import GoldHandle +from zhenxun.utils.enum import CacheType, GoldHandle from zhenxun.utils.exception import GoodsNotFound, InsufficientGold from .user_gold_log import UserGoldLog @@ -29,6 +29,12 @@ class UserConsole(Model): class Meta: # pyright: ignore [reportIncompatibleVariableOverride] table = "user_console" table_description = "用户数据表" + indexes = [("user_id",), ("uid",)] # noqa: RUF012 + + cache_type = CacheType.USERS + """缓存类型""" + cache_key_field = "user_id" + """缓存键字段""" @classmethod async def get_user(cls, user_id: str, platform: str | None = None) -> "UserConsole": @@ -193,3 +199,10 @@ class UserConsole(Model): if goods := await GoodsInfo.get_or_none(goods_name=name): return await cls.use_props(user_id, goods.uuid, num, platform) raise GoodsNotFound("未找到商品...") + + @classmethod + async def _run_script(cls): + return [ + "CREATE INDEX idx_user_console_user_id ON user_console(user_id);", + "CREATE INDEX idx_user_console_uid ON user_console(uid);", + ] diff --git a/zhenxun/services/cache/__init__.py b/zhenxun/services/cache/__init__.py new file mode 100644 index 00000000..76b05a5c --- /dev/null +++ b/zhenxun/services/cache/__init__.py @@ -0,0 +1,1065 @@ +""" +缓存系统模块 + +提供统一的缓存访问接口,支持内存缓存和Redis缓存 + +使用示例: +1. 使用Cache类进行缓存操作 +```python +from zhenxun.services.cache import Cache +from zhenxun.utils.enum import CacheType + +# 创建缓存访问对象 +level_cache = Cache[list[LevelUser]](CacheType.LEVEL) + +# 获取缓存数据 +users = await level_cache.get({"user_id": "123", "group_id": "456"}) + +# 设置缓存数据 +await level_cache.set({"user_id": "123", "group_id": "456"}, users) +``` + +2. 使用CacheDict作为全局字典 +```python +from zhenxun.services.cache.cache_containers import CacheDict + +# 创建缓存字典(默认永不过期) +config_dict = CacheDict("global_config") + +# 创建有过期时间的缓存字典(1小时后过期) +temp_dict = CacheDict("temp_config", expire=3600) + +# 使用字典操作 +config_dict["key"] = "value" +value = config_dict["key"] + +# 保存缓存数据(可选) +await config_dict.save() +``` + +3. 使用CacheList作为全局列表 +```python +from zhenxun.services.cache.cache_containers import CacheList + +# 创建缓存列表(默认永不过期) +message_list = CacheList("recent_messages") + +# 创建有过期时间的缓存列表(30分钟后过期) +temp_list = CacheList("temp_messages", expire=1800) + +# 使用列表操作 +message_list.append("新消息") +message = message_list[0] + +# 保存缓存数据(可选) +await message_list.save() +``` +""" + +import asyncio +from collections.abc import Callable +from datetime import datetime +from functools import wraps +from typing import Any, ClassVar, Generic, TypeVar, get_type_hints + +from aiocache import Cache as AioCache +from aiocache.base import BaseCache +from aiocache.serializers import JsonSerializer +import nonebot +from nonebot.compat import model_dump +from nonebot.utils import is_coroutine_callable +from pydantic import BaseModel + +from zhenxun.services.log import logger + +from .cache_containers import CacheDict, CacheList +from .config import ( + CACHE_KEY_PREFIX, + CACHE_KEY_SEPARATOR, + DEFAULT_EXPIRE, + LOG_COMMAND, + SPECIAL_KEY_FORMATS, + CacheMode, +) + +__all__ = [ + "Cache", + "CacheData", + "CacheDict", + "CacheList", + "CacheManager", + "CacheRegistry", + "CacheRoot", +] + +T = TypeVar("T") + + +class Config(BaseModel): + """缓存配置""" + + cache_mode: str = CacheMode.NONE + """缓存模式: MEMORY(内存缓存), REDIS(Redis缓存), NONE(不使用缓存)""" + redis_host: str | None = None + """redis地址""" + redis_port: int | None = None + """redis端口""" + redis_password: str | None = None + """redis密码""" + redis_expire: int = DEFAULT_EXPIRE + """redis过期时间""" + + +# 获取配置 +driver = nonebot.get_driver() +cache_config = nonebot.get_plugin_config(Config) + + +class CacheException(Exception): + """缓存相关异常""" + + def __init__(self, info: str): + self.info = info + + def __str__(self) -> str: + return self.info + + +class CacheModel(BaseModel): + """缓存数据模型""" + + name: str + """缓存名称""" + expire: int = DEFAULT_EXPIRE + """过期时间(秒)""" + result_type: type | None = None + """结果类型""" + key_format: str | None = None + """键格式""" + + class Config: + arbitrary_types_allowed = True + + +""" +CacheData类是缓存系统的核心组件,它负责管理单个缓存项的数据和生命周期。 + +设计思路: +1. 每个CacheData实例代表一个具名的缓存项,如"用户列表"、"配置数据"等 +2. 它提供了数据的懒加载、自动过期和持久化等功能 +3. 可以通过func参数提供一个获取数据的函数,在数据不存在或过期时自动调用 +4. 支持直接设置_data属性,方便外部直接操作数据 + +主要用途: +1. 作为CacheDict和CacheList的后端存储 +2. 被CacheManager管理,实现统一的缓存生命周期控制 +3. 提供数据过期和自动刷新机制 + +通常情况下,用户不需要直接使用CacheData,而是通过Cache、CacheDict或CacheList来操作缓存。 +""" + + +class CacheData: + """缓存数据类""" + + def __init__( + self, + name: str, + func: Callable, + expire: int = DEFAULT_EXPIRE, + lazy_load: bool = True, + cache: BaseCache | AioCache | None = None, + ): + """初始化缓存数据 + + 参数: + name: 缓存名称 + func: 获取数据的函数 + expire: 过期时间(秒) + lazy_load: 是否延迟加载 + cache: 缓存后端 + """ + self.name = name.upper() + self.func = func + self.expire = expire + self.lazy_load = lazy_load + self.cache = cache + self._data = None + self._last_update = 0 + + # 如果不是延迟加载,立即加载数据 + if not lazy_load: + import asyncio + + try: + loop = asyncio.get_event_loop() + if not loop.is_running(): + loop.run_until_complete(self.get_data()) + except Exception: + pass + + async def get_data(self) -> Any: + """获取数据 + + 返回: + Any: 缓存数据 + """ + # 检查是否需要更新 + now = datetime.now().timestamp() + if self._data is None or ( + self.expire > 0 and now - self._last_update > self.expire + ): + # 更新数据 + try: + self._data = await self.func() + self._last_update = now + except Exception as e: + logger.error(f"获取缓存数据 {self.name} 失败", LOG_COMMAND, e=e) + + return self._data + + async def set_data(self, data: Any) -> bool: + """设置数据 + + 参数: + data: 缓存数据 + + 返回: + bool: 是否成功 + """ + try: + self._data = data + self._last_update = datetime.now().timestamp() + # 如果有缓存后端,保存到缓存 + if self.cache and cache_config.cache_mode != CacheMode.NONE: + await self.cache.set(self.name, data, ttl=self.expire) # type: ignore + return True + except Exception as e: + logger.error(f"设置缓存数据 {self.name} 失败", LOG_COMMAND, e=e) + return False + + async def clear(self) -> bool: + """清除数据 + + 返回: + bool: 是否成功 + """ + try: + self._data = None + self._last_update = 0 + # 如果有缓存后端,清除缓存 + if self.cache and cache_config.cache_mode != CacheMode.NONE: + await self.cache.delete(self.name) # type: ignore + return True + except Exception as e: + logger.error(f"清除缓存数据 {self.name} 失败", LOG_COMMAND, e=e) + return False + + +class CacheManager: + """缓存管理器""" + + _instance: ClassVar["CacheManager | None"] = None + _cache_backend: BaseCache | AioCache | None = None + _registry: ClassVar[dict[str, CacheModel]] = {} + _data: ClassVar[dict[str, CacheData]] = {} + _list_caches: ClassVar[dict[str, "CacheList"]] = {} + _dict_caches: ClassVar[dict[str, "CacheDict"]] = {} + _enabled = False # 缓存启用标记 + + def __new__(cls) -> "CacheManager": + """单例模式""" + if cls._instance is None: + cls._instance = super().__new__(cls) + return cls._instance + + @property + def enabled(self) -> bool: + """获取缓存启用状态""" + return self.__class__._enabled + + @enabled.setter + def enabled(self, value: bool): + """设置缓存启用状态""" + self.__class__._enabled = value + + def enable(self): + """启用缓存""" + self.__class__._enabled = True + logger.info("缓存功能已启用", LOG_COMMAND) + + def disable(self): + """禁用缓存""" + self.__class__._enabled = False + logger.info("缓存功能已禁用", LOG_COMMAND) + + def cache_dict(self, cache_type: str, expire: int = 0) -> CacheDict: + """获取缓存字典""" + if cache_type not in self._dict_caches: + self._dict_caches[cache_type] = CacheDict(cache_type, expire) + return self._dict_caches[cache_type] + + def cache_list(self, cache_type: str, expire: int = 0) -> CacheList: + """获取缓存列表""" + if cache_type not in self._list_caches: + self._list_caches[cache_type] = CacheList(cache_type, expire) + return self._list_caches[cache_type] + + def listener(self, cache_type: str): + """缓存监听器装饰器 + + 在方法调用后自动刷新缓存数据 + + 参数: + cache_type: 缓存类型 + + 返回: + Callable: 装饰器 + """ + + def decorator(func: Callable): + @wraps(func) + async def wrapper(cls, *args, **kwargs): + # 执行原函数 + result = await func(cls, *args, **kwargs) + + obj = None + # 如果启用了缓存,自动刷新缓存 + if cache_config.cache_mode != CacheMode.NONE: + # 根据返回值类型处理 + if isinstance(result, tuple) and len(result) > 0: + # 处理返回元组的情况,如 update_or_create 返回 (obj, created) + obj = result[0] + else: + # 处理返回单个对象的情况 + obj = result + + # 获取缓存键并刷新缓存 + if ( + obj + and hasattr(cls, "get_cache_key") + and hasattr(obj, cls.get_cache_key_field()) + ): + key = cls.get_cache_key(obj) + if key is not None: + await self.invalidate_cache(cache_type, key) + + return result + + return wrapper + + return decorator + + async def get_cache(self, cache_type: str) -> Any: + """获取指定类型的缓存对象 + + 此方法返回一个简单的缓存对象,具有 update 方法 + + 参数: + cache_type: 缓存类型 + + 返回: + Any: 缓存对象 + """ + + class CacheAdapter: + """缓存适配器""" + + def __init__(self, cache_manager: CacheManager, cache_type: str): + self.cache_manager = cache_manager + self.cache_type = cache_type + + async def update(self, key: Any, value: Any) -> None: + """更新缓存 + + 参数: + key: 缓存键 + value: 缓存值 + """ + # 先清除旧缓存 + await self.cache_manager.invalidate_cache(self.cache_type, key) + + # 如果需要,可以在这里添加重新设置缓存的逻辑 + # 目前我们只清除缓存,让下次查询时自动重建 + + return ( + CacheAdapter(self, cache_type) + if cache_config.cache_mode != CacheMode.NONE + else None + ) + + @property + def cache_backend(self) -> BaseCache | AioCache: + """获取缓存后端""" + if self._cache_backend is None: + try: + from aiocache import RedisCache, SimpleMemoryCache + + if cache_config.cache_mode == CacheMode.NONE: + # 使用内存缓存但禁用持久化 + self._cache_backend = SimpleMemoryCache( + serializer=JsonSerializer(), + namespace=CACHE_KEY_PREFIX, + timeout=30, + ttl=0, # 设置为0,不缓存 + ) + logger.info("缓存功能已禁用,使用非持久化内存缓存", LOG_COMMAND) + elif ( + cache_config.cache_mode == CacheMode.REDIS + and cache_config.redis_host + ): + # 使用Redis缓存 + self._cache_backend = RedisCache( + serializer=JsonSerializer(), + namespace=CACHE_KEY_PREFIX, + timeout=30, + ttl=cache_config.redis_expire, + endpoint=cache_config.redis_host, + port=cache_config.redis_port, + password=cache_config.redis_password, + ) + logger.info( + f"使用Redis缓存,地址: {cache_config.redis_host}", LOG_COMMAND + ) + else: + # 默认使用内存缓存 + self._cache_backend = SimpleMemoryCache( + serializer=JsonSerializer(), + namespace=CACHE_KEY_PREFIX, + timeout=30, + ttl=cache_config.redis_expire, + ) + logger.info("使用内存缓存", LOG_COMMAND) + except ImportError: + logger.error("导入aiocache模块失败,使用内存缓存", LOG_COMMAND) + # 使用内存缓存 + self._cache_backend = AioCache( + cache_class=AioCache.MEMORY, + serializer=JsonSerializer(), + namespace=CACHE_KEY_PREFIX, + timeout=30, + ttl=cache_config.redis_expire, + ) + return self._cache_backend + + @property + def _cache(self) -> BaseCache | AioCache: + """获取缓存后端(别名)""" + return self.cache_backend + + async def get_cache_data(self, name: str) -> Any: + """获取缓存数据 + + 参数: + name: 缓存名称 + + 返回: + Any: 缓存数据 + """ + name = name.upper() + # 检查是否存在缓存数据 + if name in self._data: + return await self._data[name].get_data() + + # 尝试从缓存后端获取 + if cache_config.cache_mode != CacheMode.NONE: + try: + data = await self.cache_backend.get(name) # type: ignore + if data is not None: + return data + except Exception as e: + logger.error(f"从缓存后端获取数据 {name} 失败", LOG_COMMAND, e=e) + return None + + async def invalidate_cache( + self, cache_type: str, key: str | dict[str, Any] | None = None + ) -> bool: + """使指定类型的缓存失效 + + 当数据库中的数据发生变化时,调用此方法清除对应类型的缓存 + + 参数: + cache_type: 缓存类型 + key: 缓存键或键参数,为None时清除该类型的所有缓存 + + 返回: + bool: 是否成功 + """ + # 如果缓存被禁用或缓存模式为NONE,直接返回True + if not self.enabled or cache_config.cache_mode == CacheMode.NONE: + return True + + try: + if key is not None: + # 只清除特定的缓存项 + cache_key = self._build_key(cache_type, key) + await self.cache_backend.delete(cache_key) # type: ignore + logger.debug(f"清除缓存: {cache_type}, 键: {key}", LOG_COMMAND) + return True + else: + # 清除指定类型的所有缓存 + logger.debug(f"清除所有 {cache_type} 缓存", LOG_COMMAND) + return await self.clear(cache_type) + except Exception as e: + if f"缓存类型 {cache_type} 不存在" not in str(e): + logger.warning(f"清除缓存 {cache_type} 失败", LOG_COMMAND, e=e) + return False + + async def get( + self, cache_type: str, key: str | dict[str, Any], default: Any = None + ) -> Any: + """获取缓存数据 + + 参数: + cache_type: 缓存类型 + key: 键或键参数 + default: 默认值 + + 返回: + Any: 缓存数据,如果不存在返回默认值 + """ + from zhenxun.services.db_context import DB_TIMEOUT_SECONDS + + # 如果缓存被禁用或缓存模式为NONE,直接返回默认值 + if not self.enabled or cache_config.cache_mode == CacheMode.NONE: + return default + cache_key = None + try: + cache_key = self._build_key(cache_type, key) + data = await asyncio.wait_for( + self.cache_backend.get(cache_key), # type: ignore + timeout=DB_TIMEOUT_SECONDS, + ) + + if data is None: + return default + + # 获取缓存模型 + model = self.get_model(cache_type) + + # 反序列化 + if model.result_type: + return self._deserialize_value(data, model.result_type) + return data + except asyncio.TimeoutError: + logger.error(f"获取缓存 {cache_type}:{cache_key} 超时", LOG_COMMAND) + return default + except Exception as e: + logger.error(f"获取缓存 {cache_type} 失败", LOG_COMMAND, e=e) + return default + + async def set( + self, + cache_type: str, + key: str | dict[str, Any], + value: Any, + expire: int | None = None, + ) -> bool: + """设置缓存数据 + + 参数: + cache_type: 缓存类型 + key: 键或键参数 + value: 值 + expire: 过期时间(秒),为None时使用默认值 + + 返回: + bool: 是否成功 + """ + from zhenxun.services.db_context import DB_TIMEOUT_SECONDS + + # 如果缓存被禁用或缓存模式为NONE,直接返回False + if not self.enabled or cache_config.cache_mode == CacheMode.NONE: + return False + cache_key = None + try: + cache_key = self._build_key(cache_type, key) + model = self.get_model(cache_type) + + # 序列化 + serialized_value = self._serialize_value(value) + + # 设置过期时间 + ttl = expire if expire is not None else model.expire + + # 设置缓存 + await asyncio.wait_for( + self.cache_backend.set(cache_key, serialized_value, ttl=ttl), # type: ignore + timeout=DB_TIMEOUT_SECONDS, + ) + return True + except asyncio.TimeoutError: + logger.error(f"设置缓存 {cache_type}:{cache_key} 超时", LOG_COMMAND) + return False + except Exception as e: + logger.error(f"设置缓存 {cache_type} 失败", LOG_COMMAND, e=e) + return False + + async def delete(self, cache_type: str, key: str | dict[str, Any]) -> bool: + """删除缓存数据 + + 参数: + cache_type: 缓存类型 + key: 键或键参数 + + 返回: + bool: 是否成功 + """ + # 如果缓存被禁用或缓存模式为NONE,直接返回False + if not self.enabled or cache_config.cache_mode == CacheMode.NONE: + return False + + try: + cache_key = self._build_key(cache_type, key) + await self.cache_backend.delete(cache_key) # type: ignore + return True + except Exception as e: + logger.error(f"删除缓存 {cache_type} 失败", LOG_COMMAND, e=e) + return False + + async def exists(self, cache_type: str, key: str | dict[str, Any]) -> bool: + """检查缓存是否存在 + + 参数: + cache_type: 缓存类型 + key: 键或键参数 + + 返回: + bool: 是否存在 + """ + # 如果缓存被禁用或缓存模式为NONE,直接返回False + if not self.enabled or cache_config.cache_mode == CacheMode.NONE: + return False + + try: + cache_key = self._build_key(cache_type, key) + # 由于aiocache可能没有exists方法,使用get检查 + data = await self.cache_backend.get(cache_key) # type: ignore + return data is not None + except Exception as e: + logger.error(f"检查缓存 {cache_type} 是否存在失败", LOG_COMMAND, e=e) + return False + + async def clear(self, cache_type: str | None = None) -> bool: + """清除缓存 + + 参数: + cache_type: 缓存类型,为None时清除所有缓存 + + 返回: + bool: 是否成功 + """ + # 如果缓存被禁用或缓存模式为NONE,直接返回False + if not self.enabled or cache_config.cache_mode == CacheMode.NONE: + return False + + try: + if cache_type: + # 清除指定类型的缓存 + # pattern = f"{cache_type.upper()}{CACHE_KEY_SEPARATOR}*" + # 由于aiocache可能没有delete_pattern方法,使用其他方式清除 + # 这里简化处理,直接清除所有缓存 + await self.cache_backend.clear() # type: ignore + else: + # 清除所有缓存 + await self.cache_backend.clear() # type: ignore + return True + except Exception as e: + if f"缓存类型 {cache_type} 不存在" not in str(e): + logger.warning("清除缓存失败", LOG_COMMAND, e=e) + return False + + async def close(self): + """关闭缓存连接""" + if self._cache_backend: + try: + await self._cache_backend.close() # type: ignore + except (AttributeError, Exception) as e: + logger.warning(f"关闭缓存连接失败: {e}", LOG_COMMAND) + self._cache_backend = None + + def register( + self, + name: str, + result_type: type | None = None, + expire: int = DEFAULT_EXPIRE, + key_format: str | None = None, + ) -> None: + """注册缓存类型 + + 参数: + name: 缓存名称 + result_type: 结果类型 + expire: 过期时间(秒) + key_format: 键格式 + """ + name = name.upper() + if name in self._registry: + logger.warning(f"缓存类型 {name} 已存在,将被覆盖", LOG_COMMAND) + + # 检查是否有特殊键格式 + if not key_format and name in SPECIAL_KEY_FORMATS: + key_format = SPECIAL_KEY_FORMATS[name] + + self._registry[name] = CacheModel( + name=name, + expire=expire, + result_type=result_type, + key_format=key_format, + ) + logger.debug( + f"注册缓存类型: {name}, 类型: {result_type}, 过期时间: {expire}秒", + LOG_COMMAND, + ) + + def get_model(self, name: str) -> CacheModel: + """获取缓存模型 + + 参数: + name: 缓存名称 + + 返回: + CacheModel: 缓存模型 + + 异常: + CacheException: 缓存类型不存在 + """ + name = name.upper() + if name not in self._registry: + raise CacheException(f"缓存类型 {name} 不存在") + return self._registry[name] + + def _build_key(self, cache_type: str, key: str | dict[str, Any]) -> str: + """构建缓存键 + + 参数: + cache_type: 缓存类型 + key: 键或键参数 + + 返回: + str: 完整缓存键 + """ + cache_type = cache_type.upper() + if cache_type not in self._registry: + raise CacheException(f"缓存类型 {cache_type} 不存在") + + model = self._registry[cache_type] + + # 如果key是字典,使用键格式 + if isinstance(key, dict) and model.key_format: + try: + formatted_key = model.key_format.format(**key) + except KeyError as e: + raise CacheException(f"键格式错误: {model.key_format}, 缺少参数: {e}") + return f"{cache_type}{CACHE_KEY_SEPARATOR}{formatted_key}" + + # 否则直接使用key + return f"{cache_type}{CACHE_KEY_SEPARATOR}{key}" + + def _serialize_value(self, value: Any) -> Any: + """序列化值 + + 参数: + value: 需要序列化的值 + + 返回: + Any: 序列化后的值 + """ + if value is None: + return None + + # 处理datetime + if isinstance(value, datetime): + return value.isoformat() + + # 处理Tortoise-ORM Model + if hasattr(value, "_meta") and hasattr(value, "__dict__"): + result = {} + for field in value._meta.fields: + try: + field_value = getattr(value, field) + # 跳过反向关系字段 + if isinstance(field_value, list | set) and hasattr( + field_value, "_related_name" + ): + continue + # 跳过外键关系字段 + if hasattr(field_value, "_meta"): + field_value = getattr( + field_value, value._meta.fields[field].related_name or "id" + ) + result[field] = self._serialize_value(field_value) + except AttributeError: + continue + return result + + # 处理Pydantic模型 + elif isinstance(value, BaseModel): + return model_dump(value) + elif isinstance(value, dict): + # 处理字典 + return {str(k): self._serialize_value(v) for k, v in value.items()} + elif isinstance(value, list | tuple | set): + # 处理列表、元组、集合 + return [self._serialize_value(item) for item in value] + elif isinstance(value, int | float | str | bool): + # 基本类型直接返回 + return value + else: + # 其他类型转换为字符串 + return str(value) + + def _deserialize_value(self, value: Any, target_type: type | None = None) -> Any: + """反序列化值 + + 参数: + value: 需要反序列化的值 + target_type: 目标类型 + + 返回: + Any: 反序列化后的值 + """ + if value is None: + return None + + # 如果是字典且指定了目标类型 + if isinstance(value, dict) and target_type: + # 处理Tortoise-ORM Model + if hasattr(target_type, "_meta"): + return self._deserialize_tortoise_model(value, target_type) + elif hasattr(target_type, "model_validate"): + return target_type.model_validate(value) + elif hasattr(target_type, "from_dict"): + return target_type.from_dict(value) + elif hasattr(target_type, "parse_obj"): + return target_type.parse_obj(value) + else: + return target_type(**value) + + # 处理列表类型 + if isinstance(value, list): + if not value: + return value + if ( + target_type + and hasattr(target_type, "__origin__") + and target_type.__origin__ is list + ): + item_type = target_type.__args__[0] + return [self._deserialize_value(item, item_type) for item in value] + return [self._deserialize_value(item) for item in value] + + # 处理字典类型 + if isinstance(value, dict): + return {k: self._deserialize_value(v) for k, v in value.items()} + + return value + + def _deserialize_tortoise_model(self, value: dict, target_type: type) -> Any: + """反序列化Tortoise-ORM模型 + + 参数: + value: 字典数据 + target_type: 目标类型 + + 返回: + Any: 反序列化后的模型实例 + """ + # 处理字段值 + processed_value = {} + for field_name, field_value in value.items(): + if field := target_type._meta.fields_map.get(field_name): + # 跳过反向关系字段 + if hasattr(field, "_related_name"): + continue + processed_value[field_name] = field_value + + # 创建模型实例 + instance = target_type() + # 设置字段值 + for field_name, field_value in processed_value.items(): + if field_name in target_type._meta.fields_map: + field = target_type._meta.fields_map[field_name] + # 设置字段值 + try: + if hasattr(field, "to_python_value"): + if not field.field_type: + logger.debug(f"字段 {field_name} 类型为空", LOG_COMMAND) + continue + field_value = field.to_python_value(field_value) + setattr(instance, field_name, field_value) + except Exception as e: + logger.warning(f"设置字段 {field_name} 失败", LOG_COMMAND, e=e) + + # 设置 _saved_in_db 标志 + instance._saved_in_db = True + return instance + + +# 全局缓存管理器实例 +CacheRoot = CacheManager() + + +class CacheRegistry: + """缓存注册器""" + + @staticmethod + def register( + name: str, + result_type: type | None = None, + expire: int = DEFAULT_EXPIRE, + key_format: str | None = None, + ): + """注册缓存类型 + + 参数: + name: 缓存名称 + result_type: 结果类型 + expire: 过期时间(秒) + key_format: 键格式 + """ + CacheRoot.register(name, result_type, expire, key_format) + + @staticmethod + def invalidate(cache_type: str, key: str | dict[str, Any]): + """使缓存失效的装饰器 + + 参数: + cache_type: 缓存类型 + key: 键或键参数 + + 返回: + Callable: 装饰器 + """ + + def decorator(func: Callable): + @wraps(func) + async def wrapper(*args, **kwargs): + # 执行函数 + result = ( + await func(*args, **kwargs) + if is_coroutine_callable(func) + else func(*args, **kwargs) + ) + + # 删除缓存 + if cache_config.cache_mode != CacheMode.NONE: + await CacheRoot.delete(cache_type, key) + + return result + + return wrapper + + return decorator + + +class Cache(Generic[T]): + """类型化缓存访问接口 + + 示例: + ```python + from zhenxun.services.cache import Cache + from zhenxun.models.level_user import LevelUser + from zhenxun.utils.enum import CacheType + + # 创建缓存访问对象 + level_cache = Cache[list[LevelUser]](CacheType.LEVEL) + + # 获取缓存数据 + users = await level_cache.get({"user_id": "123", "group_id": "456"}) + + # 设置缓存数据 + await level_cache.set({"user_id": "123", "group_id": "456"}, users) + ``` + """ + + def __init__(self, cache_type: str): + """初始化缓存访问对象 + + 参数: + cache_type: 缓存类型 + """ + self.cache_type = cache_type.upper() + + # 尝试从类型注解获取结果类型 + try: + type_hints = get_type_hints(self.__class__) + if "T" in type_hints: + result_type = type_hints["T"] + # 确保缓存类型已注册 + try: + CacheRoot.get_model(self.cache_type) + except CacheException: + CacheRoot.register(self.cache_type, result_type) + except Exception: + pass + + async def get( + self, key: str | dict[str, Any], default: T | None = None + ) -> T | None: + """获取缓存数据 + + 参数: + key: 键或键参数 + default: 默认值 + + 返回: + T | None: 缓存数据,如果不存在返回默认值 + """ + return await CacheRoot.get(self.cache_type, key, default) + + async def set( + self, key: str | dict[str, Any], value: T, expire: int | None = None + ) -> bool: + """设置缓存数据 + + 参数: + key: 键或键参数 + value: 值 + expire: 过期时间(秒),为None时使用默认值 + + 返回: + bool: 是否成功 + """ + return await CacheRoot.set(self.cache_type, key, value, expire) + + async def delete(self, key: str | dict[str, Any]) -> bool: + """删除缓存数据 + + 参数: + key: 键或键参数 + + 返回: + bool: 是否成功 + """ + return await CacheRoot.delete(self.cache_type, key) + + async def exists(self, key: str | dict[str, Any]) -> bool: + """检查缓存是否存在 + + 参数: + key: 键或键参数 + + 返回: + bool: 是否存在 + """ + return await CacheRoot.exists(self.cache_type, key) + + async def clear(self) -> bool: + """清除此类型的所有缓存 + + 返回: + bool: 是否成功 + """ + return await CacheRoot.clear(self.cache_type) + + +@driver.on_startup +async def _(): + CacheRoot.enabled = True + logger.info("缓存系统已启用", LOG_COMMAND) + + +@driver.on_shutdown +async def _(): + await CacheRoot.close() diff --git a/zhenxun/services/cache/cache_containers.py b/zhenxun/services/cache/cache_containers.py new file mode 100644 index 00000000..b0efe3fb --- /dev/null +++ b/zhenxun/services/cache/cache_containers.py @@ -0,0 +1,452 @@ +from dataclasses import dataclass +import time +from typing import Any, Generic, TypeVar + +T = TypeVar("T") + + +@dataclass +class CacheData(Generic[T]): + """缓存数据类,存储数据和过期时间""" + + value: T + expire_time: float = 0 # 0表示永不过期 + + +class CacheDict: + """缓存字典类,提供类似普通字典的接口,数据只存储在内存中""" + + def __init__(self, name: str, expire: int = 0): + """初始化缓存字典 + + 参数: + name: 字典名称 + expire: 过期时间(秒),默认为0表示永不过期 + """ + self.name = name.upper() + self.expire = expire + self._data: dict[str, CacheData[Any]] = {} + + def __getitem__(self, key: str) -> Any: + """获取字典项 + + 参数: + key: 字典键 + + 返回: + Any: 字典值 + """ + data = self._data.get(key) + if data is None: + return None + + # 检查是否过期 + if data.expire_time > 0 and data.expire_time < time.time(): + del self._data[key] + return None + + return data.value + + def __setitem__(self, key: str, value: Any) -> None: + """设置字典项 + + 参数: + key: 字典键 + value: 字典值 + """ + # 计算过期时间 + expire_time = 0 + if self.expire > 0: + expire_time = time.time() + self.expire + + self._data[key] = CacheData(value=value, expire_time=expire_time) + + def __delitem__(self, key: str) -> None: + """删除字典项 + + 参数: + key: 字典键 + """ + if key in self._data: + del self._data[key] + + def __contains__(self, key: str) -> bool: + """检查键是否存在 + + 参数: + key: 字典键 + + 返回: + bool: 是否存在 + """ + if key not in self._data: + return False + + # 检查是否过期 + data = self._data[key] + if data.expire_time > 0 and data.expire_time < time.time(): + del self._data[key] + return False + + return True + + def get(self, key: str, default: Any = None) -> Any: + """获取字典项,如果不存在返回默认值 + + 参数: + key: 字典键 + default: 默认值 + + 返回: + Any: 字典值或默认值 + """ + value = self[key] + return default if value is None else value + + def set(self, key: str, value: Any, expire: int | None = None) -> None: + """设置字典项 + + 参数: + key: 字典键 + value: 字典值 + expire: 过期时间(秒),为None时使用默认值 + """ + # 计算过期时间 + expire_time = 0 + if expire is not None and expire > 0: + expire_time = time.time() + expire + elif self.expire > 0: + expire_time = time.time() + self.expire + + self._data[key] = CacheData(value=value, expire_time=expire_time) + + def pop(self, key: str, default: Any = None) -> Any: + """删除并返回字典项 + + 参数: + key: 字典键 + default: 默认值 + + 返回: + Any: 字典值或默认值 + """ + if key not in self._data: + return default + + data = self._data.pop(key) + + # 检查是否过期 + if data.expire_time > 0 and data.expire_time < time.time(): + return default + + return data.value + + def clear(self) -> None: + """清空字典""" + self._data.clear() + + def keys(self) -> list[str]: + """获取所有键 + + 返回: + list[str]: 键列表 + """ + # 清理过期的键 + self._clean_expired() + return list(self._data.keys()) + + def values(self) -> list[Any]: + """获取所有值 + + 返回: + list[Any]: 值列表 + """ + # 清理过期的键 + self._clean_expired() + return [data.value for data in self._data.values()] + + def items(self) -> list[tuple[str, Any]]: + """获取所有键值对 + + 返回: + list[tuple[str, Any]]: 键值对列表 + """ + # 清理过期的键 + self._clean_expired() + return [(key, data.value) for key, data in self._data.items()] + + def _clean_expired(self) -> None: + """清理过期的键""" + now = time.time() + expired_keys = [ + key + for key, data in self._data.items() + if data.expire_time > 0 and data.expire_time < now + ] + for key in expired_keys: + del self._data[key] + + def __len__(self) -> int: + """获取字典长度 + + 返回: + int: 字典长度 + """ + # 清理过期的键 + self._clean_expired() + return len(self._data) + + def __str__(self) -> str: + """字符串表示 + + 返回: + str: 字符串表示 + """ + # 清理过期的键 + self._clean_expired() + return f"CacheDict({self.name}, {len(self._data)} items)" + + +class CacheList: + """缓存列表类,提供类似普通列表的接口,数据只存储在内存中""" + + def __init__(self, name: str, expire: int = 0): + """初始化缓存列表 + + 参数: + name: 列表名称 + expire: 过期时间(秒),默认为0表示永不过期 + """ + self.name = name.upper() + self.expire = expire + self._data: list[CacheData[Any]] = [] + self._expire_time = 0 + + # 如果设置了过期时间,计算整个列表的过期时间 + if self.expire > 0: + self._expire_time = time.time() + self.expire + + def __getitem__(self, index: int) -> Any: + """获取列表项 + + 参数: + index: 列表索引 + + 返回: + Any: 列表值 + """ + # 检查整个列表是否过期 + if self._is_expired(): + self.clear() + raise IndexError(f"列表索引 {index} 超出范围") + + if 0 <= index < len(self._data): + return self._data[index].value + raise IndexError(f"列表索引 {index} 超出范围") + + def __setitem__(self, index: int, value: Any) -> None: + """设置列表项 + + 参数: + index: 列表索引 + value: 列表值 + """ + # 检查整个列表是否过期 + if self._is_expired(): + self.clear() + + # 确保索引有效 + while len(self._data) <= index: + self._data.append(CacheData(value=None)) + self._data[index] = CacheData(value=value) + + # 更新过期时间 + self._update_expire_time() + + def __delitem__(self, index: int) -> None: + """删除列表项 + + 参数: + index: 列表索引 + """ + # 检查整个列表是否过期 + if self._is_expired(): + self.clear() + raise IndexError(f"列表索引 {index} 超出范围") + + if 0 <= index < len(self._data): + del self._data[index] + # 更新过期时间 + self._update_expire_time() + else: + raise IndexError(f"列表索引 {index} 超出范围") + + def __len__(self) -> int: + """获取列表长度 + + 返回: + int: 列表长度 + """ + # 检查整个列表是否过期 + if self._is_expired(): + self.clear() + return len(self._data) + + def append(self, value: Any) -> None: + """添加列表项 + + 参数: + value: 列表值 + """ + # 检查整个列表是否过期 + if self._is_expired(): + self.clear() + + self._data.append(CacheData(value=value)) + + # 更新过期时间 + self._update_expire_time() + + def extend(self, values: list[Any]) -> None: + """扩展列表 + + 参数: + values: 要添加的值列表 + """ + # 检查整个列表是否过期 + if self._is_expired(): + self.clear() + + self._data.extend([CacheData(value=v) for v in values]) + + # 更新过期时间 + self._update_expire_time() + + def insert(self, index: int, value: Any) -> None: + """插入列表项 + + 参数: + index: 插入位置 + value: 列表值 + """ + # 检查整个列表是否过期 + if self._is_expired(): + self.clear() + + self._data.insert(index, CacheData(value=value)) + + # 更新过期时间 + self._update_expire_time() + + def pop(self, index: int = -1) -> Any: + """删除并返回列表项 + + 参数: + index: 列表索引,默认为最后一项 + + 返回: + Any: 列表值 + """ + # 检查整个列表是否过期 + if self._is_expired(): + self.clear() + raise IndexError("从空列表中弹出") + + if not self._data: + raise IndexError("从空列表中弹出") + + item = self._data.pop(index) + + # 更新过期时间 + self._update_expire_time() + + return item.value + + def remove(self, value: Any) -> None: + """删除第一个匹配的列表项 + + 参数: + value: 要删除的值 + """ + # 检查整个列表是否过期 + if self._is_expired(): + self.clear() + raise ValueError(f"{value} 不在列表中") + + # 查找匹配的项 + for i, item in enumerate(self._data): + if item.value == value: + del self._data[i] + # 更新过期时间 + self._update_expire_time() + return + + raise ValueError(f"{value} 不在列表中") + + def clear(self) -> None: + """清空列表""" + self._data.clear() + # 重置过期时间 + self._update_expire_time() + + def index(self, value: Any, start: int = 0, end: int | None = None) -> int: + """查找值的索引 + + 参数: + value: 要查找的值 + start: 起始索引 + end: 结束索引 + + 返回: + int: 索引位置 + """ + # 检查整个列表是否过期 + if self._is_expired(): + self.clear() + raise ValueError(f"{value} 不在列表中") + + end = end if end is not None else len(self._data) + + for i in range(start, min(end, len(self._data))): + if self._data[i].value == value: + return i + + raise ValueError(f"{value} 不在列表中") + + def count(self, value: Any) -> int: + """计算值出现的次数 + + 参数: + value: 要计数的值 + + 返回: + int: 出现次数 + """ + # 检查整个列表是否过期 + if self._is_expired(): + self.clear() + return 0 + + return sum(1 for item in self._data if item.value == value) + + def _is_expired(self) -> bool: + """检查整个列表是否过期""" + return self._expire_time > 0 and self._expire_time < time.time() + + def _update_expire_time(self) -> None: + """更新过期时间""" + if self.expire > 0: + self._expire_time = time.time() + self.expire + else: + self._expire_time = 0 + + def __str__(self) -> str: + """字符串表示 + + 返回: + str: 字符串表示 + """ + # 检查整个列表是否过期 + if self._is_expired(): + self.clear() + return f"CacheList({self.name}, {len(self._data)} items)" diff --git a/zhenxun/services/cache/config.py b/zhenxun/services/cache/config.py new file mode 100644 index 00000000..b974787b --- /dev/null +++ b/zhenxun/services/cache/config.py @@ -0,0 +1,35 @@ +""" +缓存系统配置 +""" + +# 日志标识 +LOG_COMMAND = "CacheRoot" + +# 默认缓存过期时间(秒) +DEFAULT_EXPIRE = 600 + +# 缓存键前缀 +CACHE_KEY_PREFIX = "ZHENXUN" + +# 缓存键分隔符 +CACHE_KEY_SEPARATOR = ":" + +# 复合键分隔符(用于分隔tuple类型的cache_key_field) +COMPOSITE_KEY_SEPARATOR = "_" + + +# 缓存模式 +class CacheMode: + # 内存缓存 - 使用内存存储缓存数据 + MEMORY = "MEMORY" + # Redis缓存 - 使用Redis服务器存储缓存数据 + REDIS = "REDIS" + # 不使用缓存 - 将使用ttl=0的内存缓存,相当于直接从数据库获取数据 + NONE = "NONE" + + +SPECIAL_KEY_FORMATS = { + "LEVEL": "{user_id}" + COMPOSITE_KEY_SEPARATOR + "{group_id}", + "BAN": "{user_id}" + COMPOSITE_KEY_SEPARATOR + "{group_id}", + "GROUPS": "{group_id}" + COMPOSITE_KEY_SEPARATOR + "{channel_id}", +} diff --git a/zhenxun/services/data_access.py b/zhenxun/services/data_access.py new file mode 100644 index 00000000..2dc7623f --- /dev/null +++ b/zhenxun/services/data_access.py @@ -0,0 +1,653 @@ +from typing import Any, ClassVar, Generic, TypeVar, cast + +from zhenxun.services.cache import Cache, CacheRoot, cache_config +from zhenxun.services.cache.config import COMPOSITE_KEY_SEPARATOR, CacheMode +from zhenxun.services.db_context import Model, with_db_timeout +from zhenxun.services.log import logger + +T = TypeVar("T", bound=Model) + + +class DataAccess(Generic[T]): + """数据访问层,根据配置决定是否使用缓存 + + 使用示例: + ```python + from zhenxun.services import DataAccess + from zhenxun.models.plugin_info import PluginInfo + + # 创建数据访问对象 + plugin_dao = DataAccess(PluginInfo) + + # 获取单个数据 + plugin = await plugin_dao.get(module="example_module") + + # 获取所有数据 + all_plugins = await plugin_dao.all() + + # 筛选数据 + enabled_plugins = await plugin_dao.filter(status=True) + + # 创建数据 + new_plugin = await plugin_dao.create( + module="new_module", + name="新插件", + status=True + ) + ``` + """ + + # 添加缓存统计信息 + _cache_stats: ClassVar[dict] = {} + # 空结果标记 + _NULL_RESULT = "__NULL_RESULT_PLACEHOLDER__" + # 默认空结果缓存时间(秒)- 设置为5分钟,避免频繁查询数据库 + _NULL_RESULT_TTL = 300 + + @classmethod + def set_null_result_ttl(cls, seconds: int) -> None: + """设置空结果缓存时间 + + 参数: + seconds: 缓存时间(秒) + """ + if seconds < 0: + raise ValueError("缓存时间不能为负数") + cls._NULL_RESULT_TTL = seconds + logger.info(f"已设置DataAccess空结果缓存时间为 {seconds} 秒") + + @classmethod + def get_null_result_ttl(cls) -> int: + """获取空结果缓存时间 + + 返回: + int: 缓存时间(秒) + """ + return cls._NULL_RESULT_TTL + + def __init__( + self, model_cls: type[T], key_field: str = "id", cache_type: str | None = None + ): + """初始化数据访问对象 + + 参数: + model_cls: 模型类 + key_field: 主键字段 + """ + self.model_cls = model_cls + self.key_field = getattr(model_cls, "cache_key_field", key_field) + self.cache_type = getattr(model_cls, "cache_type", cache_type) + + if not self.cache_type: + raise ValueError("缓存类型不能为空") + self.cache = Cache(self.cache_type) + + # 初始化缓存统计 + if self.cache_type not in self._cache_stats: + self._cache_stats[self.cache_type] = { + "hits": 0, # 缓存命中次数 + "misses": 0, # 缓存未命中次数 + "null_hits": 0, # 空结果缓存命中次数 + "sets": 0, # 缓存设置次数 + "null_sets": 0, # 空结果缓存设置次数 + "deletes": 0, # 缓存删除次数 + } + + @classmethod + def get_cache_stats(cls): + """获取缓存统计信息""" + result = [] + for cache_type, stats in cls._cache_stats.items(): + hits = stats["hits"] + null_hits = stats.get("null_hits", 0) + misses = stats["misses"] + total = hits + null_hits + misses + hit_rate = ((hits + null_hits) / total * 100) if total > 0 else 0 + result.append( + { + "cache_type": cache_type, + "hits": hits, + "null_hits": null_hits, + "misses": misses, + "sets": stats["sets"], + "null_sets": stats.get("null_sets", 0), + "deletes": stats["deletes"], + "hit_rate": f"{hit_rate:.2f}%", + } + ) + return result + + @classmethod + def reset_cache_stats(cls): + """重置缓存统计信息""" + for stats in cls._cache_stats.values(): + stats["hits"] = 0 + stats["null_hits"] = 0 + stats["misses"] = 0 + stats["sets"] = 0 + stats["null_sets"] = 0 + stats["deletes"] = 0 + + def _build_cache_key_from_kwargs(self, **kwargs) -> str | None: + """从关键字参数构建缓存键 + + 参数: + **kwargs: 关键字参数 + + 返回: + str | None: 缓存键,如果无法构建则返回None + """ + if isinstance(self.key_field, tuple): + # 多字段主键 + key_parts = [] + key_parts.extend(str(kwargs.get(field, "")) for field in self.key_field) + return COMPOSITE_KEY_SEPARATOR.join(key_parts) if key_parts else None + elif self.key_field in kwargs: + # 单字段主键 + return str(kwargs[self.key_field]) + return None + + async def safe_get_or_none(self, *args, **kwargs) -> T | None: + """安全的获取单条数据 + + 参数: + *args: 查询参数 + **kwargs: 查询参数 + + 返回: + Optional[T]: 查询结果,如果不存在返回None + """ + # 如果没有缓存类型,直接从数据库获取 + if not self.cache_type or cache_config.cache_mode == CacheMode.NONE: + logger.debug(f"{self.model_cls.__name__} 直接从数据库获取数据: {kwargs}") + return await with_db_timeout( + self.model_cls.safe_get_or_none(*args, **kwargs), + operation=f"{self.model_cls.__name__}.safe_get_or_none", + ) + + # 尝试从缓存获取 + cache_key = None + try: + # 尝试构建缓存键 + cache_key = self._build_cache_key_from_kwargs(**kwargs) + + # 如果成功构建缓存键,尝试从缓存获取 + if cache_key is not None: + data = await self.cache.get(cache_key) + logger.debug( + f"{self.model_cls.__name__} self.cache.get(cache_key)" + f" 从缓存获取到的数据 {type(data)}: {data}" + ) + if data == self._NULL_RESULT: + # 空结果缓存命中 + self._cache_stats[self.cache_type]["null_hits"] += 1 + logger.debug( + f"{self.model_cls.__name__} 从缓存获取到空结果: {cache_key}" + ) + return None + elif data: + # 缓存命中 + self._cache_stats[self.cache_type]["hits"] += 1 + logger.debug( + f"{self.model_cls.__name__} 从缓存获取数据成功: {cache_key}" + ) + return cast(T, data) + else: + # 缓存未命中 + self._cache_stats[self.cache_type]["misses"] += 1 + logger.debug(f"{self.model_cls.__name__} 缓存未命中: {cache_key}") + except Exception as e: + logger.error(f"{self.model_cls.__name__} 从缓存获取数据失败: {kwargs}", e=e) + + # 如果缓存中没有,从数据库获取 + logger.debug(f"{self.model_cls.__name__} 从数据库获取数据: {kwargs}") + data = await self.model_cls.safe_get_or_none(*args, **kwargs) + + # 如果获取到数据,存入缓存 + if data: + try: + # 生成缓存键 + cache_key = self._build_cache_key_for_item(data) + if cache_key is not None: + # 存入缓存 + await self.cache.set(cache_key, data) + self._cache_stats[self.cache_type]["sets"] += 1 + logger.debug( + f"{self.model_cls.__name__} 数据已存入缓存: {cache_key}" + ) + except Exception as e: + logger.error( + f"{self.model_cls.__name__} 存入缓存失败,参数: {kwargs}", e=e + ) + elif cache_key is not None: + # 如果没有获取到数据,缓存空结果 + try: + # 存入空结果缓存,使用较短的过期时间 + await self.cache.set( + cache_key, self._NULL_RESULT, expire=self._NULL_RESULT_TTL + ) + self._cache_stats[self.cache_type]["null_sets"] += 1 + logger.debug( + f"{self.model_cls.__name__} 空结果已存入缓存: {cache_key}," + f" TTL={self._NULL_RESULT_TTL}秒" + ) + except Exception as e: + logger.error( + f"{self.model_cls.__name__} 存入空结果缓存失败,参数: {kwargs}", e=e + ) + + return data + + async def get_or_none(self, *args, **kwargs) -> T | None: + """获取单条数据 + + 参数: + *args: 查询参数 + **kwargs: 查询参数 + + 返回: + Optional[T]: 查询结果,如果不存在返回None + """ + # 如果没有缓存类型,直接从数据库获取 + if not self.cache_type or cache_config.cache_mode == CacheMode.NONE: + logger.debug(f"{self.model_cls.__name__} 直接从数据库获取数据: {kwargs}") + return await with_db_timeout( + self.model_cls.get_or_none(*args, **kwargs), + operation=f"{self.model_cls.__name__}.get_or_none", + ) + + # 尝试从缓存获取 + cache_key = None + try: + # 尝试构建缓存键 + cache_key = self._build_cache_key_from_kwargs(**kwargs) + + # 如果成功构建缓存键,尝试从缓存获取 + if cache_key is not None: + data = await self.cache.get(cache_key) + if data == self._NULL_RESULT: + # 空结果缓存命中 + self._cache_stats[self.cache_type]["null_hits"] += 1 + logger.debug( + f"{self.model_cls.__name__} 从缓存获取到空结果: {cache_key}" + ) + return None + elif data: + # 缓存命中 + self._cache_stats[self.cache_type]["hits"] += 1 + logger.debug( + f"{self.model_cls.__name__} 从缓存获取数据成功: {cache_key}" + ) + return cast(T, data) + else: + # 缓存未命中 + self._cache_stats[self.cache_type]["misses"] += 1 + logger.debug(f"{self.model_cls.__name__} 缓存未命中: {cache_key}") + except Exception as e: + logger.error(f"{self.model_cls.__name__} 从缓存获取数据失败: {kwargs}", e=e) + + # 如果缓存中没有,从数据库获取 + logger.debug(f"{self.model_cls.__name__} 从数据库获取数据: {kwargs}") + data = await self.model_cls.get_or_none(*args, **kwargs) + + # 如果获取到数据,存入缓存 + if data: + try: + cache_key = self._build_cache_key_for_item(data) + # 生成缓存键 + if cache_key is not None: + # 存入缓存 + await self.cache.set(cache_key, data) + self._cache_stats[self.cache_type]["sets"] += 1 + logger.debug( + f"{self.model_cls.__name__} 数据已存入缓存: {cache_key}" + ) + except Exception as e: + logger.error( + f"{self.model_cls.__name__} 存入缓存失败,参数: {kwargs}", e=e + ) + elif cache_key is not None: + # 如果没有获取到数据,缓存空结果 + try: + # 存入空结果缓存,使用较短的过期时间 + await self.cache.set( + cache_key, self._NULL_RESULT, expire=self._NULL_RESULT_TTL + ) + self._cache_stats[self.cache_type]["null_sets"] += 1 + logger.debug( + f"{self.model_cls.__name__} 空结果已存入缓存: {cache_key}," + f" TTL={self._NULL_RESULT_TTL}秒" + ) + except Exception as e: + logger.error( + f"{self.model_cls.__name__} 存入空结果缓存失败,参数: {kwargs}", e=e + ) + + return data + + async def clear_cache(self, **kwargs) -> bool: + """只清除缓存,不影响数据库数据 + + 参数: + **kwargs: 查询参数,必须包含主键字段 + + 返回: + bool: 是否成功清除缓存 + """ + # 如果没有缓存类型,直接返回True + if not self.cache_type or cache_config.cache_mode == CacheMode.NONE: + return True + + try: + # 构建缓存键 + cache_key = self._build_cache_key_from_kwargs(**kwargs) + if cache_key is None: + if isinstance(self.key_field, tuple): + # 如果是复合键,检查缺少哪些字段 + missing_fields = [ + field for field in self.key_field if field not in kwargs + ] + logger.error( + f"清除{self.model_cls.__name__}缓存失败: " + f"缺少主键字段 {', '.join(missing_fields)}" + ) + else: + logger.error( + f"清除{self.model_cls.__name__}缓存失败: " + f"缺少主键字段 {self.key_field}" + ) + return False + + # 删除缓存 + await self.cache.delete(cache_key) + self._cache_stats[self.cache_type]["deletes"] += 1 + logger.debug(f"已清除{self.model_cls.__name__}缓存: {cache_key}") + return True + except Exception as e: + logger.error(f"清除{self.model_cls.__name__}缓存失败", e=e) + return False + + def _build_composite_key(self, data: T) -> str | None: + """构建复合缓存键 + + 参数: + data: 数据对象 + + 返回: + str | None: 构建的缓存键,如果无法构建则返回None + """ + # 如果是元组,表示多个字段组成键 + if isinstance(self.key_field, tuple): + # 构建键参数列表 + key_parts = [] + for field in self.key_field: + value = getattr(data, field, "") + key_parts.append(value if value is not None else "") + + # 如果没有有效参数,返回None + return COMPOSITE_KEY_SEPARATOR.join(key_parts) if key_parts else None + elif hasattr(data, self.key_field): + value = getattr(data, self.key_field, None) + return str(value) if value is not None else None + + return None + + def _build_cache_key_for_item(self, item: T) -> str | None: + """为数据项构建缓存键 + + 参数: + item: 数据项 + + 返回: + str | None: 缓存键,如果无法生成则返回None + """ + # 如果没有缓存类型,返回None + if not self.cache_type: + return None + + # 获取缓存类型的配置信息 + cache_model = CacheRoot.get_model(self.cache_type) + + if not cache_model.key_format: + # 常规处理,使用主键作为缓存键 + return self._build_composite_key(item) + # 构建键参数字典 + key_parts = [] + # 从格式字符串中提取所需的字段名 + import re + + field_names = re.findall(r"{([^}]+)}", cache_model.key_format) + + # 收集所有字段值 + for field in field_names: + value = getattr(item, field, "") + key_parts.append(value if value is not None else "") + + return COMPOSITE_KEY_SEPARATOR.join(key_parts) + + async def _cache_items(self, data_list: list[T]) -> None: + """将数据列表存入缓存 + + 参数: + data_list: 数据列表 + """ + if ( + not data_list + or not self.cache_type + or cache_config.cache_mode == CacheMode.NONE + ): + return + + try: + # 遍历数据列表,将每条数据存入缓存 + cached_count = 0 + for item in data_list: + cache_key = self._build_cache_key_for_item(item) + if cache_key is not None: + await self.cache.set(cache_key, item) + cached_count += 1 + self._cache_stats[self.cache_type]["sets"] += 1 + + logger.debug( + f"{self.model_cls.__name__} 批量缓存: {cached_count}/{len(data_list)}项" + ) + except Exception as e: + logger.error(f"{self.model_cls.__name__} 批量缓存失败", e=e) + + async def filter(self, *args, **kwargs) -> list[T]: + """筛选数据 + + 参数: + *args: 查询参数 + **kwargs: 查询参数 + + 返回: + List[T]: 查询结果列表 + """ + # 从数据库获取数据 + logger.debug(f"{self.model_cls.__name__} filter: 从数据库查询, 参数: {kwargs}") + data_list = await self.model_cls.filter(*args, **kwargs) + logger.debug( + f"{self.model_cls.__name__} filter: 查询结果数量: {len(data_list)}" + ) + + # 将数据存入缓存 + await self._cache_items(data_list) + + return data_list + + async def all(self) -> list[T]: + """获取所有数据 + + 返回: + List[T]: 所有数据列表 + """ + # 直接从数据库获取 + logger.debug(f"{self.model_cls.__name__} all: 从数据库查询所有数据") + data_list = await self.model_cls.all() + logger.debug(f"{self.model_cls.__name__} all: 查询结果数量: {len(data_list)}") + + # 将数据存入缓存 + await self._cache_items(data_list) + + return data_list + + async def count(self, *args, **kwargs) -> int: + """获取数据数量 + + 参数: + *args: 查询参数 + **kwargs: 查询参数 + + 返回: + int: 数据数量 + """ + # 直接从数据库获取数量 + return await self.model_cls.filter(*args, **kwargs).count() + + async def exists(self, *args, **kwargs) -> bool: + """判断数据是否存在 + + 参数: + *args: 查询参数 + **kwargs: 查询参数 + + 返回: + bool: 是否存在 + """ + # 直接从数据库判断是否存在 + return await self.model_cls.filter(*args, **kwargs).exists() + + async def create(self, **kwargs) -> T: + """创建数据 + + 参数: + **kwargs: 创建参数 + + 返回: + T: 创建的数据 + """ + # 创建数据 + logger.debug(f"{self.model_cls.__name__} create: 创建数据, 参数: {kwargs}") + data = await self.model_cls.create(**kwargs) + + # 如果有缓存类型,将数据存入缓存 + if self.cache_type and cache_config.cache_mode != CacheMode.NONE: + try: + # 生成缓存键 + cache_key = self._build_cache_key_for_item(data) + if cache_key is not None: + # 存入缓存 + await self.cache.set(cache_key, data) + self._cache_stats[self.cache_type]["sets"] += 1 + logger.debug( + f"{self.model_cls.__name__} create: " + f"新创建的数据已存入缓存: {cache_key}" + ) + except Exception as e: + logger.error( + f"{self.model_cls.__name__} create: 存入缓存失败,参数: {kwargs}", + e=e, + ) + + return data + + async def update_or_create( + self, defaults: dict[str, Any] | None = None, **kwargs + ) -> tuple[T, bool]: + """更新或创建数据 + + 参数: + defaults: 默认值 + **kwargs: 查询参数 + + 返回: + tuple[T, bool]: (数据, 是否创建) + """ + # 更新或创建数据 + data, created = await self.model_cls.update_or_create( + defaults=defaults, **kwargs + ) + + # 如果有缓存类型,将数据存入缓存 + if self.cache_type and cache_config.cache_mode != CacheMode.NONE: + try: + # 生成缓存键 + cache_key = self._build_cache_key_for_item(data) + if cache_key is not None: + # 存入缓存 + await self.cache.set(cache_key, data) + self._cache_stats[self.cache_type]["sets"] += 1 + logger.debug(f"更新或创建的数据已存入缓存: {cache_key}") + except Exception as e: + logger.error(f"存入缓存失败,参数: {kwargs}", e=e) + + return data, created + + async def delete(self, *args, **kwargs) -> int: + """删除数据 + + 参数: + *args: 查询参数 + **kwargs: 查询参数 + + 返回: + int: 删除的数据数量 + """ + logger.debug(f"{self.model_cls.__name__} delete: 删除数据, 参数: {kwargs}") + + # 如果有缓存类型且有key_field参数,先尝试删除缓存 + if self.cache_type and cache_config.cache_mode != CacheMode.NONE: + try: + # 尝试构建缓存键 + cache_key = self._build_cache_key_from_kwargs(**kwargs) + + if cache_key is not None: + # 如果成功构建缓存键,直接删除缓存 + await self.cache.delete(cache_key) + self._cache_stats[self.cache_type]["deletes"] += 1 + logger.debug( + f"{self.model_cls.__name__} delete: 已删除缓存: {cache_key}" + ) + else: + # 否则需要先查询出要删除的数据,然后删除对应的缓存 + items = await self.model_cls.filter(*args, **kwargs) + logger.debug( + f"{self.model_cls.__name__} delete:" + f" 查询到 {len(items)} 条要删除的数据" + ) + for item in items: + item_cache_key = self._build_cache_key_for_item(item) + if item_cache_key is not None: + await self.cache.delete(item_cache_key) + self._cache_stats[self.cache_type]["deletes"] += 1 + if items: + logger.debug( + f"{self.model_cls.__name__} delete:" + f" 已删除 {len(items)} 条数据的缓存" + ) + except Exception as e: + logger.error(f"{self.model_cls.__name__} delete: 删除缓存失败", e=e) + + # 删除数据 + result = await self.model_cls.filter(*args, **kwargs).delete() + logger.debug( + f"{self.model_cls.__name__} delete: 已从数据库删除 {result} 条数据" + ) + return result + + def _generate_cache_key(self, data: T) -> str: + """根据数据对象生成缓存键 + + 参数: + data: 数据对象 + + 返回: + str: 缓存键 + """ + # 使用新方法构建复合键 + if composite_key := self._build_composite_key(data): + return composite_key + + # 如果无法生成复合键,生成一个唯一键 + return f"object_{id(data)}" diff --git a/zhenxun/services/db_context.py b/zhenxun/services/db_context.py index 4543b262..ffe98415 100644 --- a/zhenxun/services/db_context.py +++ b/zhenxun/services/db_context.py @@ -1,37 +1,328 @@ -import nonebot +import asyncio +from collections.abc import Iterable +import contextlib +import time +from typing import Any, ClassVar +from typing_extensions import Self +from urllib.parse import urlparse + +from nonebot import get_driver from nonebot.utils import is_coroutine_callable from tortoise import Tortoise +from tortoise.backends.base.client import BaseDBAsyncClient from tortoise.connection import connections -from tortoise.models import Model as Model_ +from tortoise.exceptions import IntegrityError, MultipleObjectsReturned +from tortoise.models import Model as TortoiseModel +from tortoise.transactions import in_transaction from zhenxun.configs.config import BotConfig +from zhenxun.services.cache import CacheRoot +from zhenxun.services.log import logger +from zhenxun.utils.enum import DbLockType from zhenxun.utils.exception import HookPriorityException from zhenxun.utils.manager.priority_manager import PriorityLifecycle -from .log import logger +driver = get_driver() SCRIPT_METHOD = [] MODELS: list[str] = [] +# 数据库操作超时设置(秒) +DB_TIMEOUT_SECONDS = 3.0 -driver = nonebot.get_driver() +# 性能监控阈值(秒) +SLOW_QUERY_THRESHOLD = 0.5 + +LOG_COMMAND = "DbContext" -class Model(Model_): +async def with_db_timeout( + coro, timeout: float = DB_TIMEOUT_SECONDS, operation: str | None = None +): + """带超时控制的数据库操作""" + start_time = time.time() + try: + result = await asyncio.wait_for(coro, timeout=timeout) + elapsed = time.time() - start_time + if elapsed > SLOW_QUERY_THRESHOLD and operation: + logger.warning(f"慢查询: {operation} 耗时 {elapsed:.3f}s", LOG_COMMAND) + return result + except asyncio.TimeoutError: + if operation: + logger.error(f"数据库操作超时: {operation} (>{timeout}s)", LOG_COMMAND) + raise + + +class Model(TortoiseModel): """ - 自动添加模块 - - Args: - Model_: Model + 增强的ORM基类,解决锁嵌套问题 """ + sem_data: ClassVar[dict[str, dict[str, asyncio.Semaphore]]] = {} + _current_locks: ClassVar[dict[int, DbLockType]] = {} # 跟踪当前协程持有的锁 + def __init_subclass__(cls, **kwargs): + super().__init_subclass__(**kwargs) if cls.__module__ not in MODELS: MODELS.append(cls.__module__) if func := getattr(cls, "_run_script", None): SCRIPT_METHOD.append((cls.__module__, func)) + @classmethod + def get_cache_type(cls) -> str | None: + """获取缓存类型""" + return getattr(cls, "cache_type", None) + + @classmethod + def get_cache_key_field(cls) -> str | tuple[str]: + """获取缓存键字段""" + return getattr(cls, "cache_key_field", "id") + + @classmethod + def get_cache_key(cls, instance) -> str | None: + """获取缓存键 + + 参数: + instance: 模型实例 + + 返回: + str | None: 缓存键,如果无法获取则返回None + """ + from zhenxun.services.cache.config import COMPOSITE_KEY_SEPARATOR + + key_field = cls.get_cache_key_field() + + if isinstance(key_field, tuple): + # 多字段主键 + key_parts = [] + for field in key_field: + if hasattr(instance, field): + value = getattr(instance, field, None) + key_parts.append(value if value is not None else "") + else: + # 如果缺少任何必要的字段,返回None + key_parts.append("") + + # 如果没有有效参数,返回None + return COMPOSITE_KEY_SEPARATOR.join(key_parts) if key_parts else None + elif hasattr(instance, key_field): + value = getattr(instance, key_field, None) + return str(value) if value is not None else None + + return None + + @classmethod + def get_semaphore(cls, lock_type: DbLockType): + enable_lock = getattr(cls, "enable_lock", None) + if not enable_lock or lock_type not in enable_lock: + return None + + if cls.__name__ not in cls.sem_data: + cls.sem_data[cls.__name__] = {} + if lock_type not in cls.sem_data[cls.__name__]: + cls.sem_data[cls.__name__][lock_type] = asyncio.Semaphore(1) + return cls.sem_data[cls.__name__][lock_type] + + @classmethod + def _require_lock(cls, lock_type: DbLockType) -> bool: + """检查是否需要真正加锁""" + task_id = id(asyncio.current_task()) + return cls._current_locks.get(task_id) != lock_type + + @classmethod + @contextlib.asynccontextmanager + async def _lock_context(cls, lock_type: DbLockType): + """带重入检查的锁上下文""" + task_id = id(asyncio.current_task()) + need_lock = cls._require_lock(lock_type) + + if need_lock and (sem := cls.get_semaphore(lock_type)): + cls._current_locks[task_id] = lock_type + async with sem: + yield + cls._current_locks.pop(task_id, None) + else: + yield + + @classmethod + async def create( + cls, using_db: BaseDBAsyncClient | None = None, **kwargs: Any + ) -> Self: + """创建数据(使用CREATE锁)""" + async with cls._lock_context(DbLockType.CREATE): + # 直接调用父类的_create方法避免触发save的锁 + result = await super().create(using_db=using_db, **kwargs) + if cache_type := cls.get_cache_type(): + await CacheRoot.invalidate_cache(cache_type, cls.get_cache_key(result)) + return result + + @classmethod + async def get_or_create( + cls, + defaults: dict | None = None, + using_db: BaseDBAsyncClient | None = None, + **kwargs: Any, + ) -> tuple[Self, bool]: + """获取或创建数据(无锁版本,依赖数据库约束)""" + result = await super().get_or_create( + defaults=defaults, using_db=using_db, **kwargs + ) + if cache_type := cls.get_cache_type(): + await CacheRoot.invalidate_cache(cache_type, cls.get_cache_key(result[0])) + return result + + @classmethod + async def update_or_create( + cls, + defaults: dict | None = None, + using_db: BaseDBAsyncClient | None = None, + **kwargs: Any, + ) -> tuple[Self, bool]: + """更新或创建数据(使用UPSERT锁)""" + async with cls._lock_context(DbLockType.UPSERT): + try: + # 先尝试更新(带行锁) + async with in_transaction(): + if obj := await cls.filter(**kwargs).select_for_update().first(): + await obj.update_from_dict(defaults or {}) + await obj.save() + result = (obj, False) + else: + # 创建时不重复加锁 + result = await cls.create(**kwargs, **(defaults or {})), True + + if cache_type := cls.get_cache_type(): + await CacheRoot.invalidate_cache( + cache_type, cls.get_cache_key(result[0]) + ) + return result + except IntegrityError: + # 处理极端情况下的唯一约束冲突 + obj = await cls.get(**kwargs) + return obj, False + + async def save( + self, + using_db: BaseDBAsyncClient | None = None, + update_fields: Iterable[str] | None = None, + force_create: bool = False, + force_update: bool = False, + ): + """保存数据(根据操作类型自动选择锁)""" + lock_type = ( + DbLockType.CREATE + if getattr(self, "id", None) is None + else DbLockType.UPDATE + ) + async with self._lock_context(lock_type): + await super().save( + using_db=using_db, + update_fields=update_fields, + force_create=force_create, + force_update=force_update, + ) + if cache_type := getattr(self, "cache_type", None): + await CacheRoot.invalidate_cache( + cache_type, self.__class__.get_cache_key(self) + ) + + async def delete(self, using_db: BaseDBAsyncClient | None = None): + cache_type = getattr(self, "cache_type", None) + key = self.__class__.get_cache_key(self) if cache_type else None + # 执行删除操作 + await super().delete(using_db=using_db) + + # 清除缓存 + if cache_type: + await CacheRoot.invalidate_cache(cache_type, key) + + @classmethod + async def safe_get_or_none( + cls, + *args, + using_db: BaseDBAsyncClient | None = None, + clean_duplicates: bool = True, + **kwargs: Any, + ) -> Self | None: + """安全地获取一条记录或None,处理存在多个记录时返回最新的那个 + 注意,默认会删除重复的记录,仅保留最新的 + + 参数: + *args: 查询参数 + using_db: 数据库连接 + clean_duplicates: 是否删除重复的记录,仅保留最新的 + **kwargs: 查询参数 + + 返回: + Self | None: 查询结果,如果不存在返回None + """ + try: + # 先尝试使用 get_or_none 获取单个记录 + try: + return await with_db_timeout( + cls.get_or_none(*args, using_db=using_db, **kwargs), + operation=f"{cls.__name__}.get_or_none", + ) + except MultipleObjectsReturned: + # 如果出现多个记录的情况,进行特殊处理 + logger.warning( + f"{cls.__name__} safe_get_or_none 发现多个记录: {kwargs}", + LOG_COMMAND, + ) + + # 查询所有匹配记录 + records = await with_db_timeout( + cls.filter(*args, **kwargs).all(), + operation=f"{cls.__name__}.filter.all", + ) + + if not records: + return None + + # 如果需要清理重复记录 + if clean_duplicates and hasattr(records[0], "id"): + # 按 id 排序 + records = sorted( + records, key=lambda x: getattr(x, "id", 0), reverse=True + ) + for record in records[1:]: + try: + await with_db_timeout( + record.delete(), + operation=f"{cls.__name__}.delete_duplicate", + ) + logger.info( + f"{cls.__name__} 删除重复记录:" + f" id={getattr(record, 'id', None)}", + LOG_COMMAND, + ) + except Exception as del_e: + logger.error(f"删除重复记录失败: {del_e}") + return records[0] + # 如果不需要清理或没有 id 字段,则返回最新的记录 + if hasattr(cls, "id"): + return await with_db_timeout( + cls.filter(*args, **kwargs).order_by("-id").first(), + operation=f"{cls.__name__}.filter.order_by.first", + ) + # 如果没有 id 字段,则返回第一个记录 + return await with_db_timeout( + cls.filter(*args, **kwargs).first(), + operation=f"{cls.__name__}.filter.first", + ) + except asyncio.TimeoutError: + logger.error( + f"数据库操作超时: {cls.__name__}.safe_get_or_none", LOG_COMMAND + ) + return None + except Exception as e: + # 其他类型的错误则继续抛出 + logger.error( + f"数据库操作异常: {cls.__name__}.safe_get_or_none, {e!s}", LOG_COMMAND + ) + raise + class DbUrlIsNode(HookPriorityException): """ @@ -49,6 +340,77 @@ class DbConnectError(Exception): pass +POSTGRESQL_CONFIG = { + "max_size": 30, # 最大连接数 + "min_size": 5, # 最小保持的连接数(可选) +} + + +MYSQL_CONFIG = { + "max_connections": 20, # 最大连接数 + "connect_timeout": 30, # 连接超时(可选) +} + +SQLITE_CONFIG = { + "journal_mode": "WAL", # 提高并发写入性能 + "timeout": 30, # 锁等待超时(可选) +} + + +def get_config(db_url: str) -> dict: + """获取数据库配置""" + parsed = urlparse(BotConfig.db_url) + + # 基础配置 + config = { + "connections": { + "default": BotConfig.db_url # 默认直接使用连接字符串 + }, + "apps": { + "models": { + "models": MODELS, + "default_connection": "default", + } + }, + "timezone": "Asia/Shanghai", + } + + # 根据数据库类型应用高级配置 + if parsed.scheme.startswith("postgres"): + config["connections"]["default"] = { + "engine": "tortoise.backends.asyncpg", + "credentials": { + "host": parsed.hostname, + "port": parsed.port or 5432, + "user": parsed.username, + "password": parsed.password, + "database": parsed.path[1:], + }, + **POSTGRESQL_CONFIG, + } + elif parsed.scheme == "mysql": + config["connections"]["default"] = { + "engine": "tortoise.backends.mysql", + "credentials": { + "host": parsed.hostname, + "port": parsed.port or 3306, + "user": parsed.username, + "password": parsed.password, + "database": parsed.path[1:], + }, + **MYSQL_CONFIG, + } + elif parsed.scheme == "sqlite": + config["connections"]["default"] = { + "engine": "tortoise.backends.sqlite", + "credentials": { + "file_path": parsed.path[1:] or ":memory:", + }, + **SQLITE_CONFIG, + } + return config + + @PriorityLifecycle.on_startup(priority=1) async def init(): if not BotConfig.db_url: @@ -64,9 +426,7 @@ async def init(): raise DbUrlIsNode("\n" + error.strip()) try: await Tortoise.init( - db_url=BotConfig.db_url, - modules={"models": MODELS}, - timezone="Asia/Shanghai", + config=get_config(BotConfig.db_url), ) if SCRIPT_METHOD: db = Tortoise.get_connection("default") @@ -85,13 +445,17 @@ async def init(): for sql in sql_list: logger.debug(f"执行SQL: {sql}") try: - await db.execute_query_dict(sql) + await asyncio.wait_for( + db.execute_query_dict(sql), timeout=DB_TIMEOUT_SECONDS + ) # await TestSQL.raw(sql) except Exception as e: logger.debug(f"执行SQL: {sql} 错误...", e=e) if sql_list: logger.debug("SCRIPT_METHOD方法执行完毕!") + logger.debug("开始生成数据库表结构...") await Tortoise.generate_schemas() + logger.debug("数据库表结构生成完毕!") logger.info("Database loaded successfully!") except Exception as e: raise DbConnectError(f"数据库连接错误... e:{e}") from e diff --git a/zhenxun/utils/_image_template.py b/zhenxun/utils/_image_template.py index c7678b2f..327f7bc2 100644 --- a/zhenxun/utils/_image_template.py +++ b/zhenxun/utils/_image_template.py @@ -469,7 +469,7 @@ class Notebook: template_name="main.html", templates={"elements": self._data}, pages={ - "viewport": {"width": 700, "height": 1000}, + "viewport": {"width": 700, "height": 10}, "base_url": f"file://{TEMPLATE_PATH}", }, wait=2, diff --git a/zhenxun/utils/common_utils.py b/zhenxun/utils/common_utils.py index cc143898..cfdabdc5 100644 --- a/zhenxun/utils/common_utils.py +++ b/zhenxun/utils/common_utils.py @@ -53,9 +53,7 @@ class CommonUtils: if await GroupConsole.is_block_task(group_id, module): """群组是否禁用被动""" return True - if g := await GroupConsole.get_or_none( - group_id=group_id, channel_id__isnull=True - ): + if g := await GroupConsole.get_group(group_id=group_id): """群组权限是否小于0""" if g.level < 0: return True diff --git a/zhenxun/utils/enum.py b/zhenxun/utils/enum.py index db527fc3..dc8d4d1c 100644 --- a/zhenxun/utils/enum.py +++ b/zhenxun/utils/enum.py @@ -44,6 +44,44 @@ class EventLogType(StrEnum): """主动退群""" +class CacheType(StrEnum): + """ + 缓存类型 + """ + + PLUGINS = "GLOBAL_ALL_PLUGINS" + """全局全部插件""" + GROUPS = "GLOBAL_ALL_GROUPS" + """全局全部群组""" + USERS = "GLOBAL_ALL_USERS" + """全部用户""" + BAN = "GLOBAL_ALL_BAN" + """全局ban列表""" + BOT = "GLOBAL_BOT" + """全局bot信息""" + LEVEL = "GLOBAL_USER_LEVEL" + """用户权限""" + LIMIT = "GLOBAL_LIMIT" + """插件限制""" + + +class DbLockType(StrEnum): + """ + 锁类型 + """ + + CREATE = "CREATE" + """创建""" + DELETE = "DELETE" + """删除""" + UPDATE = "UPDATE" + """更新""" + QUERY = "QUERY" + """查询""" + UPSERT = "UPSERT" + """创建或更新""" + + class GoldHandle(StrEnum): """ 金币处理 diff --git a/zhenxun/utils/manager/priority_manager.py b/zhenxun/utils/manager/priority_manager.py index 1c59635c..4ad45586 100644 --- a/zhenxun/utils/manager/priority_manager.py +++ b/zhenxun/utils/manager/priority_manager.py @@ -49,6 +49,9 @@ async def _(): try: for priority in priority_list: for func in priority_data[priority]: + logger.debug( + f"执行优先级 [{priority}] on_startup 方法: {func.__module__}" + ) if is_coroutine_callable(func): await func() else: diff --git a/zhenxun/utils/utils.py b/zhenxun/utils/utils.py index bdd28f83..44dcd672 100644 --- a/zhenxun/utils/utils.py +++ b/zhenxun/utils/utils.py @@ -1,11 +1,13 @@ from collections import defaultdict +from dataclasses import dataclass from datetime import date, datetime import os from pathlib import Path import time -from typing import Any +from typing import Any, ClassVar import httpx +from nonebot_plugin_uninfo import Uninfo import pypinyin import pytz @@ -13,43 +15,53 @@ from zhenxun.configs.config import Config from zhenxun.services.log import logger +@dataclass +class EntityIDs: + user_id: str + """用户id""" + group_id: str | None + """群组id""" + channel_id: str | None + """频道id""" + + class ResourceDirManager: """ 临时文件管理器 """ - temp_path = [] # noqa: RUF012 + temp_path: ClassVar[set[Path]] = set() @classmethod - def __tree_append(cls, path: Path): - """递归添加文件夹 - - 参数: - path: 文件夹路径 - """ + def __tree_append(cls, path: Path, deep: int = 1, current: int = 0): + """递归添加文件夹""" + if current >= deep and deep != -1: + return + path = path.resolve() # 标准化路径 for f in os.listdir(path): - file = path / f + file = (path / f).resolve() # 标准化子路径 if file.is_dir(): if file not in cls.temp_path: - cls.temp_path.append(file) - logger.debug(f"添加临时文件夹: {path}") - cls.__tree_append(file) + cls.temp_path.add(file) + logger.debug(f"添加临时文件夹: {file}") + cls.__tree_append(file, deep, current + 1) @classmethod - def add_temp_dir(cls, path: str | Path, tree: bool = False): + def add_temp_dir(cls, path: str | Path, tree: bool = False, deep: int = 1): """添加临时清理文件夹,这些文件夹会被自动清理 参数: path: 文件夹路径 tree: 是否递归添加文件夹 + deep: 深度, -1 为无限深度 """ if isinstance(path, str): path = Path(path) if path not in cls.temp_path: - cls.temp_path.append(path) + cls.temp_path.add(path) logger.debug(f"添加临时文件夹: {path}") if tree: - cls.__tree_append(path) + cls.__tree_append(path, deep) class CountLimiter: @@ -230,6 +242,27 @@ def is_valid_date(date_text: str, separator: str = "-") -> bool: return False +def get_entity_ids(session: Uninfo) -> EntityIDs: + """获取用户id,群组id,频道id + + 参数: + session: Uninfo + + 返回: + EntityIDs: 用户id,群组id,频道id + """ + user_id = session.user.id + group_id = None + channel_id = None + if session.group: + if session.group.parent: + group_id = session.group.parent.id + channel_id = session.group.id + else: + group_id = session.group.id + return EntityIDs(user_id=user_id, group_id=group_id, channel_id=channel_id) + + def is_number(text: str) -> bool: """是否为数字 From 46a0768a4501ce6ba9a573ed337b077d67380ec6 Mon Sep 17 00:00:00 2001 From: Rumio <32546670+webjoin111@users.noreply.github.com> Date: Mon, 14 Jul 2025 22:39:17 +0800 Subject: [PATCH 2/4] =?UTF-8?q?=E2=9C=A8=20feat(llm):=20=E6=96=B0=E5=A2=9E?= =?UTF-8?q?LLM=E6=A8=A1=E5=9E=8B=E7=AE=A1=E7=90=86=E6=8F=92=E4=BB=B6?= =?UTF-8?q?=E5=B9=B6=E5=A2=9E=E5=BC=BAAPI=E5=AF=86=E9=92=A5=E7=AE=A1?= =?UTF-8?q?=E7=90=86=20(#1972)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 🔧 新增功能: - LLM模型管理插件 (builtin_plugins/llm_manager/) • llm list - 查看可用模型列表 (图片格式) • llm info - 查看模型详细信息 (Markdown图片) • llm default - 管理全局默认模型 • llm test - 测试模型连通性 • llm keys - 查看API Key状态 (表格图片,含健康度/成功率/延迟) • llm reset-key - 重置API Key失败状态 🏗️ 架构重构: - 会话管理: AI/AIConfig 类迁移至独立的 session.py - 类型定义: TaskType 枚举移至 types/enums.py - API增强: • chat() 函数返回完整 LLMResponse,支持工具调用 • 新增 generate() 函数用于一次性响应生成 • 统一API调用核心方法 _perform_api_call,返回使用的API密钥 🚀 密钥管理增强: - 详细状态跟踪: 健康度、成功率、平均延迟、错误信息、建议操作 - 状态持久化: 启动时加载,关闭时自动保存密钥状态 - 智能冷却策略: 根据错误类型设置不同冷却时间 - 延迟监控: with_smart_retry 记录API调用延迟并更新统计 Co-authored-by: webjoin111 <455457521@qq.com> Co-authored-by: HibiKier <45528451+HibiKier@users.noreply.github.com> --- .../builtin_plugins/llm_manager/__init__.py | 171 ++++++ .../llm_manager/data_source.py | 120 ++++ .../builtin_plugins/llm_manager/presenters.py | 204 +++++++ zhenxun/services/__init__.py | 34 ++ zhenxun/services/llm/README.md | 5 +- zhenxun/services/llm/__init__.py | 7 +- zhenxun/services/llm/api.py | 560 ++---------------- zhenxun/services/llm/config/providers.py | 12 +- zhenxun/services/llm/core.py | 309 ++++++++-- zhenxun/services/llm/manager.py | 4 +- zhenxun/services/llm/service.py | 127 +--- zhenxun/services/llm/session.py | 532 +++++++++++++++++ zhenxun/services/llm/types/__init__.py | 9 +- zhenxun/services/llm/types/enums.py | 11 + 14 files changed, 1423 insertions(+), 682 deletions(-) create mode 100644 zhenxun/builtin_plugins/llm_manager/__init__.py create mode 100644 zhenxun/builtin_plugins/llm_manager/data_source.py create mode 100644 zhenxun/builtin_plugins/llm_manager/presenters.py create mode 100644 zhenxun/services/llm/session.py diff --git a/zhenxun/builtin_plugins/llm_manager/__init__.py b/zhenxun/builtin_plugins/llm_manager/__init__.py new file mode 100644 index 00000000..de0e0caf --- /dev/null +++ b/zhenxun/builtin_plugins/llm_manager/__init__.py @@ -0,0 +1,171 @@ +from nonebot.permission import SUPERUSER +from nonebot.plugin import PluginMetadata +from nonebot_plugin_alconna import ( + Alconna, + Args, + Arparma, + Match, + Option, + Query, + Subcommand, + on_alconna, + store_true, +) + +from zhenxun.configs.utils import PluginExtraData +from zhenxun.services.log import logger +from zhenxun.utils.enum import PluginType +from zhenxun.utils.message import MessageUtils + +from .data_source import DataSource +from .presenters import Presenters + +__plugin_meta__ = PluginMetadata( + name="LLM模型管理", + description="查看和管理大语言模型服务。", + usage=""" + LLM模型管理 (SUPERUSER) + + llm list [--all] + - 查看可用模型列表。 + - --all: 显示包括不可用在内的所有模型。 + + llm info + - 查看指定模型的详细信息和能力。 + + llm default [Provider/ModelName] + - 查看或设置全局默认模型。 + - 不带参数: 查看当前默认模型。 + - 带参数: 设置新的默认模型。 + - 例子: llm default Gemini/gemini-2.0-flash + + llm test + - 测试指定模型的连通性和API Key有效性。 + + llm keys + - 查看指定提供商的所有API Key状态。 + + llm reset-key [--key ] + - 重置提供商的所有或指定API Key的失败状态。 + """, + extra=PluginExtraData( + author="HibiKier", + version="1.0.0", + plugin_type=PluginType.SUPERUSER, + ).to_dict(), +) + +llm_cmd = on_alconna( + Alconna( + "llm", + Subcommand("list", alias=["ls"], help_text="查看模型列表"), + Subcommand("info", Args["model_name", str], help_text="查看模型详情"), + Subcommand("default", Args["model_name?", str], help_text="查看或设置默认模型"), + Subcommand( + "test", Args["model_name", str], alias=["ping"], help_text="测试模型连通性" + ), + Subcommand("keys", Args["provider_name", str], help_text="查看API密钥状态"), + Subcommand( + "reset-key", + Args["provider_name", str], + Option("--key", Args["api_key", str], help_text="指定要重置的API Key"), + help_text="重置API Key状态", + ), + Option("--all", action=store_true, help_text="显示所有条目"), + ), + permission=SUPERUSER, + priority=5, + block=True, +) + + +@llm_cmd.assign("list") +async def handle_list(arp: Arparma, show_all: Query[bool] = Query("all")): + """处理 'llm list' 命令""" + logger.info("获取LLM模型列表", command="LLM Manage", session=arp.header_result) + models = await DataSource.get_model_list(show_all=show_all.result) + + image = await Presenters.format_model_list_as_image(models, show_all.result) + await llm_cmd.finish(MessageUtils.build_message(image)) + + +@llm_cmd.assign("info") +async def handle_info(arp: Arparma, model_name: Match[str]): + """处理 'llm info' 命令""" + logger.info( + f"获取模型详情: {model_name.result}", + command="LLM Manage", + session=arp.header_result, + ) + details = await DataSource.get_model_details(model_name.result) + if not details: + await llm_cmd.finish(f"未找到模型: {model_name.result}") + + image_bytes = await Presenters.format_model_details_as_markdown_image(details) + await llm_cmd.finish(MessageUtils.build_message(image_bytes)) + + +@llm_cmd.assign("default") +async def handle_default(arp: Arparma, model_name: Match[str]): + """处理 'llm default' 命令""" + if model_name.available: + logger.info( + f"设置默认模型为: {model_name.result}", + command="LLM Manage", + session=arp.header_result, + ) + success, message = await DataSource.set_default_model(model_name.result) + await llm_cmd.finish(message) + else: + logger.info("查看默认模型", command="LLM Manage", session=arp.header_result) + current_default = await DataSource.get_default_model() + await llm_cmd.finish(f"当前全局默认模型为: {current_default or '未设置'}") + + +@llm_cmd.assign("test") +async def handle_test(arp: Arparma, model_name: Match[str]): + """处理 'llm test' 命令""" + logger.info( + f"测试模型连通性: {model_name.result}", + command="LLM Manage", + session=arp.header_result, + ) + await llm_cmd.send(f"正在测试模型 '{model_name.result}',请稍候...") + + success, message = await DataSource.test_model_connectivity(model_name.result) + await llm_cmd.finish(message) + + +@llm_cmd.assign("keys") +async def handle_keys(arp: Arparma, provider_name: Match[str]): + """处理 'llm keys' 命令""" + logger.info( + f"查看提供商API Key状态: {provider_name.result}", + command="LLM Manage", + session=arp.header_result, + ) + sorted_stats = await DataSource.get_key_status(provider_name.result) + if not sorted_stats: + await llm_cmd.finish( + f"未找到提供商 '{provider_name.result}' 或其没有配置API Keys。" + ) + + image = await Presenters.format_key_status_as_image( + provider_name.result, sorted_stats + ) + await llm_cmd.finish(MessageUtils.build_message(image)) + + +@llm_cmd.assign("reset-key") +async def handle_reset_key( + arp: Arparma, provider_name: Match[str], api_key: Match[str] +): + """处理 'llm reset-key' 命令""" + key_to_reset = api_key.result if api_key.available else None + log_msg = f"重置 {provider_name.result} 的 " + ( + "指定API Key" if key_to_reset else "所有API Keys" + ) + logger.info(log_msg, command="LLM Manage", session=arp.header_result) + + success, message = await DataSource.reset_key(provider_name.result, key_to_reset) + await llm_cmd.finish(message) diff --git a/zhenxun/builtin_plugins/llm_manager/data_source.py b/zhenxun/builtin_plugins/llm_manager/data_source.py new file mode 100644 index 00000000..0fc19e99 --- /dev/null +++ b/zhenxun/builtin_plugins/llm_manager/data_source.py @@ -0,0 +1,120 @@ +import time +from typing import Any + +from zhenxun.services.llm import ( + LLMException, + get_global_default_model_name, + get_model_instance, + list_available_models, + set_global_default_model_name, +) +from zhenxun.services.llm.core import KeyStatus +from zhenxun.services.llm.manager import ( + reset_key_status, +) + + +class DataSource: + """LLM管理插件的数据源和业务逻辑""" + + @staticmethod + async def get_model_list(show_all: bool = False) -> list[dict[str, Any]]: + """获取模型列表""" + models = list_available_models() + if show_all: + return models + return [m for m in models if m.get("is_available", True)] + + @staticmethod + async def get_model_details(model_name_str: str) -> dict[str, Any] | None: + """获取指定模型的详细信息""" + try: + model = await get_model_instance(model_name_str) + return { + "provider_config": model.provider_config, + "model_detail": model.model_detail, + "capabilities": model.capabilities, + } + except LLMException: + return None + + @staticmethod + async def get_default_model() -> str | None: + """获取全局默认模型""" + return get_global_default_model_name() + + @staticmethod + async def set_default_model(model_name_str: str) -> tuple[bool, str]: + """设置全局默认模型""" + success = set_global_default_model_name(model_name_str) + if success: + return True, f"✅ 成功将默认模型设置为: {model_name_str}" + else: + return False, f"❌ 设置失败,模型 '{model_name_str}' 不存在或无效。" + + @staticmethod + async def test_model_connectivity(model_name_str: str) -> tuple[bool, str]: + """测试模型连通性""" + start_time = time.monotonic() + try: + async with await get_model_instance(model_name_str) as model: + await model.generate_text("你好") + end_time = time.monotonic() + latency = (end_time - start_time) * 1000 + return ( + True, + f"✅ 模型 '{model_name_str}' 连接成功!\n响应延迟: {latency:.2f} ms", + ) + except LLMException as e: + return ( + False, + f"❌ 模型 '{model_name_str}' 连接测试失败:\n" + f"{e.user_friendly_message}\n错误码: {e.code.name}", + ) + except Exception as e: + return False, f"❌ 测试时发生未知错误: {e!s}" + + @staticmethod + async def get_key_status(provider_name: str) -> list[dict[str, Any]] | None: + """获取并排序指定提供商的API Key状态""" + from zhenxun.services.llm.manager import get_key_usage_stats + + all_stats = await get_key_usage_stats() + provider_stats = all_stats.get(provider_name) + + if not provider_stats or not provider_stats.get("key_stats"): + return None + + key_stats_dict = provider_stats["key_stats"] + + stats_list = [ + {"key_id": key_id, **stats} for key_id, stats in key_stats_dict.items() + ] + + def sort_key(item: dict[str, Any]): + status_priority = item.get("status_enum", KeyStatus.UNUSED).value + return ( + status_priority, + 100 - item.get("success_rate", 100.0), + -item.get("total_calls", 0), + ) + + sorted_stats_list = sorted(stats_list, key=sort_key) + + return sorted_stats_list + + @staticmethod + async def reset_key(provider_name: str, api_key: str | None) -> tuple[bool, str]: + """重置API Key状态""" + success = await reset_key_status(provider_name, api_key) + if success: + if api_key: + if len(api_key) > 8: + target = f"API Key '{api_key[:4]}...{api_key[-4:]}'" + else: + target = f"API Key '{api_key}'" + else: + target = "所有API Keys" + return True, f"✅ 成功重置提供商 '{provider_name}' 的 {target} 的状态。" + else: + return False, "❌ 重置失败,请检查提供商名称或API Key是否正确。" diff --git a/zhenxun/builtin_plugins/llm_manager/presenters.py b/zhenxun/builtin_plugins/llm_manager/presenters.py new file mode 100644 index 00000000..d745aaf7 --- /dev/null +++ b/zhenxun/builtin_plugins/llm_manager/presenters.py @@ -0,0 +1,204 @@ +from typing import Any + +from zhenxun.services.llm.core import KeyStatus +from zhenxun.services.llm.types import ModelModality +from zhenxun.utils._build_image import BuildImage +from zhenxun.utils._image_template import ImageTemplate, Markdown, RowStyle + + +def _format_seconds(seconds: int) -> str: + """将秒数格式化为 'Xm Ys' 或 'Xh Ym' 的形式""" + if seconds <= 0: + return "0s" + if seconds < 60: + return f"{seconds}s" + + minutes, seconds = divmod(seconds, 60) + if minutes < 60: + return f"{minutes}m {seconds}s" + + hours, minutes = divmod(minutes, 60) + return f"{hours}h {minutes}m" + + +class Presenters: + """格式化LLM管理插件的输出 (图片格式)""" + + @staticmethod + async def format_model_list_as_image( + models: list[dict[str, Any]], show_all: bool + ) -> BuildImage: + """将模型列表格式化为表格图片""" + title = "📋 LLM模型列表" + (" (所有已配置模型)" if show_all else " (仅可用)") + + if not models: + return await BuildImage.build_text_image( + f"{title}\n\n当前没有配置任何LLM模型。" + ) + + column_name = ["提供商", "模型名称", "API类型", "状态"] + data_list = [] + for model in models: + status_text = "✅ 可用" if model.get("is_available", True) else "❌ 不可用" + embed_tag = " (Embed)" if model.get("is_embedding_model", False) else "" + data_list.append( + [ + model.get("provider_name", "N/A"), + f"{model.get('model_name', 'N/A')}{embed_tag}", + model.get("api_type", "N/A"), + status_text, + ] + ) + + return await ImageTemplate.table_page( + head_text=title, + tip_text="使用 `llm info ` 查看详情", + column_name=column_name, + data_list=data_list, + ) + + @staticmethod + async def format_model_details_as_markdown_image(details: dict[str, Any]) -> bytes: + """将模型详情格式化为Markdown图片""" + provider = details["provider_config"] + model = details["model_detail"] + caps = details["capabilities"] + + cap_list = [] + if ModelModality.IMAGE in caps.input_modalities: + cap_list.append("视觉") + if ModelModality.VIDEO in caps.input_modalities: + cap_list.append("视频") + if ModelModality.AUDIO in caps.input_modalities: + cap_list.append("音频") + if caps.supports_tool_calling: + cap_list.append("工具调用") + if caps.is_embedding_model: + cap_list.append("文本嵌入") + + md = Markdown() + md.head(f"🔎 模型详情: {provider.name}/{model.model_name}", level=1) + md.text("---") + md.head("提供商信息", level=2) + md.list( + [ + f"**名称**: {provider.name}", + f"**API 类型**: {provider.api_type}", + f"**API Base**: {provider.api_base or '默认'}", + ] + ) + md.head("模型详情", level=2) + + temp_value = model.temperature or provider.temperature or "未设置" + token_value = model.max_tokens or provider.max_tokens or "未设置" + + md.list( + [ + f"**名称**: {model.model_name}", + f"**默认温度**: {temp_value}", + f"**最大Token**: {token_value}", + f"**核心能力**: {', '.join(cap_list) or '纯文本'}", + ] + ) + + return await md.build() + + @staticmethod + async def format_key_status_as_image( + provider_name: str, sorted_stats: list[dict[str, Any]] + ) -> BuildImage: + """将已排序的、详细的API Key状态格式化为表格图片""" + title = f"🔑 '{provider_name}' API Key 状态" + + if not sorted_stats: + return await BuildImage.build_text_image( + f"{title}\n\n该提供商没有配置API Keys。" + ) + + def _status_row_style(column: str, text: str) -> RowStyle: + style = RowStyle() + if column == "状态": + if "✅ 健康" in text: + style.font_color = "#67C23A" + elif "⚠️ 告警" in text: + style.font_color = "#E6A23C" + elif "❌ 错误" in text or "🚫" in text: + style.font_color = "#F56C6C" + elif "❄️ 冷却中" in text: + style.font_color = "#409EFF" + elif column == "成功率": + try: + if text != "N/A": + rate = float(text.replace("%", "")) + if rate < 80: + style.font_color = "#F56C6C" + elif rate < 95: + style.font_color = "#E6A23C" + except (ValueError, TypeError): + pass + return style + + column_name = [ + "Key (部分)", + "状态", + "总调用", + "成功率", + "平均延迟(s)", + "上次错误", + "建议操作", + ] + data_list = [] + + for key_info in sorted_stats: + status_enum: KeyStatus = key_info["status_enum"] + + if status_enum == KeyStatus.COOLDOWN: + cooldown_seconds = int(key_info["cooldown_seconds_left"]) + formatted_time = _format_seconds(cooldown_seconds) + status_text = f"❄️ 冷却中({formatted_time})" + else: + status_text = { + KeyStatus.DISABLED: "🚫 永久禁用", + KeyStatus.ERROR: "❌ 错误", + KeyStatus.WARNING: "⚠️ 告警", + KeyStatus.HEALTHY: "✅ 健康", + KeyStatus.UNUSED: "⚪️ 未使用", + }.get(status_enum, "❔ 未知") + + total_calls = key_info["total_calls"] + total_calls_text = ( + f"{key_info['success_count']}/{total_calls}" + if total_calls > 0 + else "0/0" + ) + + success_rate = key_info["success_rate"] + success_rate_text = f"{success_rate:.1f}%" if total_calls > 0 else "N/A" + + avg_latency = key_info["avg_latency"] + avg_latency_text = f"{avg_latency / 1000:.2f}" if avg_latency > 0 else "N/A" + + last_error = key_info.get("last_error") or "-" + if len(last_error) > 25: + last_error = last_error[:22] + "..." + + data_list.append( + [ + key_info["key_id"], + status_text, + total_calls_text, + success_rate_text, + avg_latency_text, + last_error, + key_info["suggested_action"], + ] + ) + + return await ImageTemplate.table_page( + head_text=title, + tip_text="使用 `llm reset-key ` 重置Key状态", + column_name=column_name, + data_list=data_list, + text_style=_status_row_style, + column_space=15, + ) diff --git a/zhenxun/services/__init__.py b/zhenxun/services/__init__.py index 6af390a8..4c820b87 100644 --- a/zhenxun/services/__init__.py +++ b/zhenxun/services/__init__.py @@ -21,11 +21,28 @@ require("nonebot_plugin_waiter") from .db_context import Model, disconnect from .llm import ( AI, + AIConfig, + CommonOverrides, LLMContentPart, LLMException, + LLMGenerationConfig, LLMMessage, + analyze, + analyze_multimodal, + chat, + clear_model_cache, + code, + create_multimodal_message, + embed, + generate, + get_cache_stats, get_model_instance, list_available_models, + list_embedding_models, + pipeline_chat, + search, + search_multimodal, + set_global_default_model_name, tool_registry, ) from .log import logger @@ -34,16 +51,33 @@ from .scheduler import scheduler_manager __all__ = [ "AI", + "AIConfig", + "CommonOverrides", "LLMContentPart", "LLMException", + "LLMGenerationConfig", "LLMMessage", "Model", "PluginInit", "PluginInitManager", + "analyze", + "analyze_multimodal", + "chat", + "clear_model_cache", + "code", + "create_multimodal_message", "disconnect", + "embed", + "generate", + "get_cache_stats", "get_model_instance", "list_available_models", + "list_embedding_models", "logger", + "pipeline_chat", "scheduler_manager", + "search", + "search_multimodal", + "set_global_default_model_name", "tool_registry", ] diff --git a/zhenxun/services/llm/README.md b/zhenxun/services/llm/README.md index 93394fdf..c827f80a 100644 --- a/zhenxun/services/llm/README.md +++ b/zhenxun/services/llm/README.md @@ -198,7 +198,7 @@ print(search_result['text']) 当你需要进行有上下文的、连续的对话时,`AI` 类是你的最佳选择。 ```python -from zhenxun.services.llm.api import AI, AIConfig +from zhenxun.services.llm import AI, AIConfig # 初始化一个AI会话,可以传入自定义配置 ai_config = AIConfig(model="GLM/glm-4-flash", temperature=0.7) @@ -395,7 +395,7 @@ async def my_tool_factory(config: MyToolConfig): 在 `analyze` 或 `generate_response` 中使用 `use_tools` 参数。框架会自动处理整个调用流程。 ```python -from zhenxun.services.llm.api import analyze +from zhenxun.services.llm import analyze from nonebot_plugin_alconna.uniseg import UniMessage response = await analyze( @@ -442,7 +442,6 @@ from zhenxun.services.llm.manager import ( get_key_usage_stats, reset_key_status ) -from zhenxun.services.llm import clear_model_cache, get_cache_stats # 列出所有在config.yaml中配置的可用模型 models = list_available_models() diff --git a/zhenxun/services/llm/__init__.py b/zhenxun/services/llm/__init__.py index 62a0003f..31e82d4d 100644 --- a/zhenxun/services/llm/__init__.py +++ b/zhenxun/services/llm/__init__.py @@ -5,14 +5,12 @@ LLM 服务模块 - 公共 API 入口 """ from .api import ( - AI, - AIConfig, - TaskType, analyze, analyze_multimodal, chat, code, embed, + generate, pipeline_chat, search, search_multimodal, @@ -35,6 +33,7 @@ from .manager import ( list_model_identifiers, set_global_default_model_name, ) +from .session import AI, AIConfig from .tools import tool_registry from .types import ( EmbeddingTaskType, @@ -49,6 +48,7 @@ from .types import ( ModelInfo, ModelProvider, ResponseFormat, + TaskType, ToolCategory, ToolMetadata, UsageInfo, @@ -84,6 +84,7 @@ __all__ = [ "code", "create_multimodal_message", "embed", + "generate", "get_cache_stats", "get_global_default_model_name", "get_model_instance", diff --git a/zhenxun/services/llm/api.py b/zhenxun/services/llm/api.py index d9606f80..5059bbe9 100644 --- a/zhenxun/services/llm/api.py +++ b/zhenxun/services/llm/api.py @@ -1,10 +1,7 @@ """ -LLM 服务的高级 API 接口 +LLM 服务的高级 API 接口 - 便捷函数入口 """ -import copy -from dataclasses import dataclass -from enum import Enum from pathlib import Path from typing import Any @@ -12,10 +9,8 @@ from nonebot_plugin_alconna.uniseg import UniMessage from zhenxun.services.log import logger -from .config import CommonOverrides, LLMGenerationConfig -from .config.providers import get_ai_config -from .manager import get_global_default_model_name, get_model_instance -from .tools import tool_registry +from .manager import get_model_instance +from .session import AI from .types import ( EmbeddingTaskType, LLMContentPart, @@ -29,514 +24,31 @@ from .types import ( from .utils import create_multimodal_message, unimsg_to_llm_parts -class TaskType(Enum): - """任务类型枚举""" - - CHAT = "chat" - CODE = "code" - SEARCH = "search" - ANALYSIS = "analysis" - GENERATION = "generation" - MULTIMODAL = "multimodal" - - -@dataclass -class AIConfig: - """AI配置类 - 简化版本""" - - model: ModelName = None - default_embedding_model: ModelName = None - temperature: float | None = None - max_tokens: int | None = None - enable_cache: bool = False - enable_code: bool = False - enable_search: bool = False - timeout: int | None = None - - enable_gemini_json_mode: bool = False - enable_gemini_thinking: bool = False - enable_gemini_safe_mode: bool = False - enable_gemini_multimodal: bool = False - enable_gemini_grounding: bool = False - default_preserve_media_in_history: bool = False - - def __post_init__(self): - """初始化后从配置中读取默认值""" - ai_config = get_ai_config() - if self.model is None: - self.model = ai_config.get("default_model_name") - if self.timeout is None: - self.timeout = ai_config.get("timeout", 180) - - -class AI: - """统一的AI服务类 - 平衡设计版本 - - 提供三层API: - 1. 简单方法:ai.chat(), ai.code(), ai.search() - 2. 标准方法:ai.analyze() 支持复杂参数 - 3. 高级方法:通过get_model_instance()直接访问 - """ - - def __init__( - self, config: AIConfig | None = None, history: list[LLMMessage] | None = None - ): - """ - 初始化AI服务 - - 参数: - config: AI 配置. - history: 可选的初始对话历史. - """ - self.config = config or AIConfig() - self.history = history or [] - - def clear_history(self): - """清空当前会话的历史记录""" - self.history = [] - logger.info("AI session history cleared.") - - def _sanitize_message_for_history(self, message: LLMMessage) -> LLMMessage: - """ - 净化用于存入历史记录的消息。 - 将非文本的多模态内容部分替换为文本占位符,以避免重复处理。 - """ - if not isinstance(message.content, list): - return message - - sanitized_message = copy.deepcopy(message) - content_list = sanitized_message.content - if not isinstance(content_list, list): - return sanitized_message - - new_content_parts: list[LLMContentPart] = [] - has_multimodal_content = False - - for part in content_list: - if isinstance(part, LLMContentPart) and part.type == "text": - new_content_parts.append(part) - else: - has_multimodal_content = True - - if has_multimodal_content: - placeholder = "[用户发送了媒体文件,内容已在首次分析时处理]" - text_part_found = False - for part in new_content_parts: - if part.type == "text": - part.text = f"{placeholder} {part.text or ''}".strip() - text_part_found = True - break - if not text_part_found: - new_content_parts.insert(0, LLMContentPart.text_part(placeholder)) - - sanitized_message.content = new_content_parts - return sanitized_message - - async def chat( - self, - message: str | LLMMessage | list[LLMContentPart], - *, - model: ModelName = None, - preserve_media_in_history: bool | None = None, - **kwargs: Any, - ) -> str: - """ - 进行一次聊天对话。 - 此方法会自动使用和更新会话内的历史记录。 - - 参数: - message: 用户输入的消息。 - model: 本次对话要使用的模型。 - preserve_media_in_history: 是否在历史记录中保留原始多模态信息。 - - True: 保留,用于深度多轮媒体分析。 - - False: 不保留,替换为占位符,提高效率。 - - None (默认): 使用AI实例配置的默认值。 - **kwargs: 传递给模型的其他参数。 - - 返回: - str: 模型的文本响应。 - """ - current_message: LLMMessage - if isinstance(message, str): - current_message = LLMMessage.user(message) - elif isinstance(message, list) and all( - isinstance(part, LLMContentPart) for part in message - ): - current_message = LLMMessage.user(message) - elif isinstance(message, LLMMessage): - current_message = message - else: - raise LLMException( - f"AI.chat 不支持的消息类型: {type(message)}. " - "请使用 str, LLMMessage, 或 list[LLMContentPart]. " - "对于更复杂的多模态输入或文件路径,请使用 AI.analyze().", - code=LLMErrorCode.API_REQUEST_FAILED, - ) - - final_messages = [*self.history, current_message] - - response = await self._execute_generation( - final_messages, model, "聊天失败", kwargs - ) - - should_preserve = ( - preserve_media_in_history - if preserve_media_in_history is not None - else self.config.default_preserve_media_in_history - ) - - if should_preserve: - logger.debug("深度分析模式:在历史记录中保留原始多模态消息。") - self.history.append(current_message) - else: - logger.debug("高效模式:净化历史记录中的多模态消息。") - sanitized_user_message = self._sanitize_message_for_history(current_message) - self.history.append(sanitized_user_message) - - self.history.append(LLMMessage.assistant_text_response(response.text)) - - return response.text - - async def code( - self, - prompt: str, - *, - model: ModelName = None, - timeout: int | None = None, - **kwargs: Any, - ) -> dict[str, Any]: - """ - 代码执行 - - 参数: - prompt: 代码执行的提示词。 - model: 要使用的模型名称。 - timeout: 代码执行超时时间(秒)。 - **kwargs: 传递给模型的其他参数。 - - 返回: - dict[str, Any]: 包含执行结果的字典,包含text、code_executions和success字段。 - """ - resolved_model = model or self.config.model or "Gemini/gemini-2.0-flash" - - config = CommonOverrides.gemini_code_execution() - if timeout: - config.custom_params = config.custom_params or {} - config.custom_params["code_execution_timeout"] = timeout - - messages = [LLMMessage.user(prompt)] - - response = await self._execute_generation( - messages, resolved_model, "代码执行失败", kwargs, base_config=config - ) - - return { - "text": response.text, - "code_executions": response.code_executions or [], - "success": True, - } - - async def search( - self, - query: str | UniMessage, - *, - model: ModelName = None, - instruction: str = "", - **kwargs: Any, - ) -> dict[str, Any]: - """ - 信息搜索 - 支持多模态输入 - - 参数: - query: 搜索查询内容,支持文本或多模态消息。 - model: 要使用的模型名称。 - instruction: 搜索指令。 - **kwargs: 传递给模型的其他参数。 - - 返回: - dict[str, Any]: 包含搜索结果的字典,包含text、sources、queries和success字段 - """ - resolved_model = model or self.config.model or "Gemini/gemini-2.0-flash" - config = CommonOverrides.gemini_grounding() - - if isinstance(query, str): - messages = [LLMMessage.user(query)] - elif isinstance(query, UniMessage): - content_parts = await unimsg_to_llm_parts(query) - - final_messages: list[LLMMessage] = [] - if instruction: - final_messages.append(LLMMessage.system(instruction)) - - if not content_parts: - if instruction: - final_messages.append(LLMMessage.user(instruction)) - else: - raise LLMException( - "搜索内容为空或无法处理。", code=LLMErrorCode.API_REQUEST_FAILED - ) - else: - final_messages.append(LLMMessage.user(content_parts)) - - messages = final_messages - else: - raise LLMException( - f"不支持的搜索输入类型: {type(query)}. 请使用 str 或 UniMessage.", - code=LLMErrorCode.API_REQUEST_FAILED, - ) - - response = await self._execute_generation( - messages, resolved_model, "信息搜索失败", kwargs, base_config=config - ) - - result = { - "text": response.text, - "sources": [], - "queries": [], - "success": True, - } - - if response.grounding_metadata: - result["sources"] = response.grounding_metadata.grounding_attributions or [] - result["queries"] = response.grounding_metadata.web_search_queries or [] - - return result - - async def analyze( - self, - message: UniMessage | None, - *, - instruction: str = "", - model: ModelName = None, - use_tools: list[str] | None = None, - tool_config: dict[str, Any] | None = None, - activated_tools: list[LLMTool] | None = None, - history: list[LLMMessage] | None = None, - **kwargs: Any, - ) -> LLMResponse: - """ - 内容分析 - 接收 UniMessage 物件进行多模态分析和工具呼叫。 - - 参数: - message: 要分析的消息内容(支持多模态)。 - instruction: 分析指令。 - model: 要使用的模型名称。 - use_tools: 要使用的工具名称列表。 - tool_config: 工具配置。 - activated_tools: 已激活的工具列表。 - history: 对话历史记录。 - **kwargs: 传递给模型的其他参数。 - - 返回: - LLMResponse: 模型的完整响应结果。 - """ - content_parts = await unimsg_to_llm_parts(message or UniMessage()) - - final_messages: list[LLMMessage] = [] - if history: - final_messages.extend(history) - - if instruction: - if not any(msg.role == "system" for msg in final_messages): - final_messages.insert(0, LLMMessage.system(instruction)) - - if not content_parts: - if instruction and not history: - final_messages.append(LLMMessage.user(instruction)) - elif not history: - raise LLMException( - "分析内容为空或无法处理。", code=LLMErrorCode.API_REQUEST_FAILED - ) - else: - final_messages.append(LLMMessage.user(content_parts)) - - llm_tools: list[LLMTool] | None = activated_tools - if not llm_tools and use_tools: - try: - llm_tools = tool_registry.get_tools(use_tools) - logger.debug(f"已从注册表加载工具定义: {use_tools}") - except ValueError as e: - raise LLMException( - f"加载工具定义失败: {e}", - code=LLMErrorCode.CONFIGURATION_ERROR, - cause=e, - ) - - tool_choice = None - if tool_config: - mode = tool_config.get("mode", "auto") - if mode in ["auto", "any", "none"]: - tool_choice = mode - - response = await self._execute_generation( - final_messages, - model, - "内容分析失败", - kwargs, - llm_tools=llm_tools, - tool_choice=tool_choice, - ) - - return response - - async def _execute_generation( - self, - messages: list[LLMMessage], - model_name: ModelName, - error_message: str, - config_overrides: dict[str, Any], - llm_tools: list[LLMTool] | None = None, - tool_choice: str | dict[str, Any] | None = None, - base_config: LLMGenerationConfig | None = None, - ) -> LLMResponse: - """通用的生成执行方法,封装模型获取和单次API调用""" - try: - resolved_model_name = self._resolve_model_name( - model_name or self.config.model - ) - final_config_dict = self._merge_config( - config_overrides, base_config=base_config - ) - - async with await get_model_instance( - resolved_model_name, override_config=final_config_dict - ) as model_instance: - return await model_instance.generate_response( - messages, - tools=llm_tools, - tool_choice=tool_choice, - ) - except LLMException: - raise - except Exception as e: - logger.error(f"{error_message}: {e}", e=e) - raise LLMException(f"{error_message}: {e}", cause=e) - - def _resolve_model_name(self, model_name: ModelName) -> str: - """解析模型名称""" - if model_name: - return model_name - - default_model = get_global_default_model_name() - if default_model: - return default_model - - raise LLMException( - "未指定模型名称且未设置全局默认模型", - code=LLMErrorCode.MODEL_NOT_FOUND, - ) - - def _merge_config( - self, - user_config: dict[str, Any], - base_config: LLMGenerationConfig | None = None, - ) -> dict[str, Any]: - """合并配置""" - final_config = {} - if base_config: - final_config.update(base_config.to_dict()) - - if self.config.temperature is not None: - final_config["temperature"] = self.config.temperature - if self.config.max_tokens is not None: - final_config["max_tokens"] = self.config.max_tokens - - if self.config.enable_cache: - final_config["enable_caching"] = True - if self.config.enable_code: - final_config["enable_code_execution"] = True - if self.config.enable_search: - final_config["enable_grounding"] = True - - if self.config.enable_gemini_json_mode: - final_config["response_mime_type"] = "application/json" - if self.config.enable_gemini_thinking: - final_config["thinking_budget"] = 0.8 - if self.config.enable_gemini_safe_mode: - final_config["safety_settings"] = ( - CommonOverrides.gemini_safe().safety_settings - ) - if self.config.enable_gemini_multimodal: - final_config.update(CommonOverrides.gemini_multimodal().to_dict()) - if self.config.enable_gemini_grounding: - final_config["enable_grounding"] = True - - final_config.update(user_config) - - return final_config - - async def embed( - self, - texts: list[str] | str, - *, - model: ModelName = None, - task_type: EmbeddingTaskType | str = EmbeddingTaskType.RETRIEVAL_DOCUMENT, - **kwargs: Any, - ) -> list[list[float]]: - """ - 生成文本嵌入向量 - - 参数: - texts: 要生成嵌入向量的文本或文本列表。 - model: 要使用的嵌入模型名称。 - task_type: 嵌入任务类型。 - **kwargs: 传递给模型的其他参数。 - - 返回: - list[list[float]]: 文本的嵌入向量列表。 - """ - if isinstance(texts, str): - texts = [texts] - if not texts: - return [] - - try: - resolved_model_str = ( - model or self.config.default_embedding_model or self.config.model - ) - if not resolved_model_str: - raise LLMException( - "使用 embed 功能时必须指定嵌入模型名称," - "或在 AIConfig 中配置 default_embedding_model。", - code=LLMErrorCode.MODEL_NOT_FOUND, - ) - resolved_model_str = self._resolve_model_name(resolved_model_str) - - async with await get_model_instance( - resolved_model_str, - override_config=None, - ) as embedding_model_instance: - return await embedding_model_instance.generate_embeddings( - texts, task_type=task_type, **kwargs - ) - except LLMException: - raise - except Exception as e: - logger.error(f"文本嵌入失败: {e}", e=e) - raise LLMException( - f"文本嵌入失败: {e}", code=LLMErrorCode.EMBEDDING_FAILED, cause=e - ) - - async def chat( message: str | LLMMessage | list[LLMContentPart], *, model: ModelName = None, + tools: list[LLMTool] | None = None, + tool_choice: str | dict[str, Any] | None = None, **kwargs: Any, -) -> str: +) -> LLMResponse: """ 聊天对话便捷函数 参数: message: 用户输入的消息。 model: 要使用的模型名称。 + tools: 本次对话可用的工具列表。 + tool_choice: 强制模型使用的工具。 **kwargs: 传递给模型的其他参数。 返回: - str: 模型的文本响应。 + LLMResponse: 模型的完整响应,可能包含文本或工具调用请求。 """ ai = AI() - return await ai.chat(message, model=model, **kwargs) + return await ai.chat( + message, model=model, tools=tools, tool_choice=tool_choice, **kwargs + ) async def code( @@ -730,12 +242,14 @@ async def pipeline_chat( raise ValueError("模型链`model_chain`不能为空。") current_content: str | list[LLMContentPart] - if isinstance(message, str): + if isinstance(message, UniMessage): + current_content = await unimsg_to_llm_parts(message) + elif isinstance(message, str): current_content = message elif isinstance(message, list): current_content = message else: - current_content = await unimsg_to_llm_parts(message) + raise TypeError(f"不支持的消息类型: {type(message)}") final_response: LLMResponse | None = None @@ -787,3 +301,45 @@ async def pipeline_chat( ) return final_response + + +async def generate( + messages: list[LLMMessage], + *, + model: ModelName = None, + tools: list[LLMTool] | None = None, + tool_choice: str | dict[str, Any] | None = None, + **kwargs: Any, +) -> LLMResponse: + """ + 根据完整的消息列表(包括系统指令)生成一次性响应。 + 这是一个便捷的函数,不使用或修改任何会话历史。 + + 参数: + messages: 用于生成响应的完整消息列表。 + model: 要使用的模型名称。 + tools: 可用的工具列表。 + tool_choice: 工具选择策略。 + **kwargs: 传递给模型的其他参数。 + + 返回: + LLMResponse: 模型的完整响应对象。 + """ + try: + ai_instance = AI() + resolved_model_name = ai_instance._resolve_model_name(model) + final_config_dict = ai_instance._merge_config(kwargs) + + async with await get_model_instance( + resolved_model_name, override_config=final_config_dict + ) as model_instance: + return await model_instance.generate_response( + messages, + tools=tools, + tool_choice=tool_choice, + ) + except LLMException: + raise + except Exception as e: + logger.error(f"生成响应失败: {e}", e=e) + raise LLMException(f"生成响应失败: {e}", cause=e) diff --git a/zhenxun/services/llm/config/providers.py b/zhenxun/services/llm/config/providers.py index a39e32c9..96d30cdf 100644 --- a/zhenxun/services/llm/config/providers.py +++ b/zhenxun/services/llm/config/providers.py @@ -17,6 +17,7 @@ from zhenxun.configs.utils import parse_as from zhenxun.services.log import logger from zhenxun.utils.manager.priority_manager import PriorityLifecycle +from ..core import key_store from ..types.models import ModelDetail, ProviderConfig @@ -502,12 +503,13 @@ def set_default_model(provider_model_name: str | None) -> bool: @PriorityLifecycle.on_startup(priority=10) async def _init_llm_config_on_startup(): """ - 在服务启动时主动调用一次 get_llm_config, - 以触发必要的初始化操作,例如创建默认的 mcp_tools.json 文件。 + 在服务启动时主动调用一次 get_llm_config 和 key_store.initialize, + 以触发必要的初始化操作。 """ - logger.info("正在初始化 LLM 配置并检查 MCP 工具文件...") + logger.info("正在初始化 LLM 配置并加载密钥状态...") try: get_llm_config() - logger.info("LLM 配置初始化完成。") + await key_store.initialize() + logger.info("LLM 配置和密钥状态初始化完成。") except Exception as e: - logger.error(f"LLM 配置初始化时发生错误: {e}", e=e) + logger.error(f"LLM 配置或密钥状态初始化时发生错误: {e}", e=e) diff --git a/zhenxun/services/llm/core.py b/zhenxun/services/llm/core.py index 56591701..6e5b5960 100644 --- a/zhenxun/services/llm/core.py +++ b/zhenxun/services/llm/core.py @@ -5,17 +5,27 @@ LLM 核心基础设施模块 """ import asyncio +from dataclasses import asdict, dataclass +from enum import IntEnum +import json +import os +import time from typing import Any +import aiofiles import httpx +import nonebot from pydantic import BaseModel +from zhenxun.configs.path_config import DATA_PATH from zhenxun.services.log import logger from zhenxun.utils.user_agent import get_user_agent from .types import ProviderConfig from .types.exceptions import LLMErrorCode, LLMException +driver = nonebot.get_driver() + class HttpClientConfig(BaseModel): """HTTP客户端配置""" @@ -194,6 +204,82 @@ async def create_llm_http_client( return LLMHttpClient(config) +class KeyStatus(IntEnum): + """用于排序和展示的密钥状态枚举""" + + DISABLED = 0 + ERROR = 1 + COOLDOWN = 2 + WARNING = 3 + HEALTHY = 4 + UNUSED = 5 + + +@dataclass +class KeyStats: + """单个API Key的详细状态和统计信息""" + + cooldown_until: float = 0.0 + success_count: int = 0 + failure_count: int = 0 + total_latency: float = 0.0 + last_error_info: str | None = None + + @property + def is_available(self) -> bool: + """检查Key当前是否可用""" + return time.time() >= self.cooldown_until + + @property + def avg_latency(self) -> float: + """计算平均延迟""" + return ( + self.total_latency / self.success_count if self.success_count > 0 else 0.0 + ) + + @property + def success_rate(self) -> float: + """计算成功率""" + total = self.success_count + self.failure_count + return self.success_count / total * 100 if total > 0 else 100.0 + + @property + def status(self) -> KeyStatus: + """根据当前统计数据动态计算状态""" + now = time.time() + cooldown_left = max(0, self.cooldown_until - now) + + if cooldown_left > 31536000 - 60: + return KeyStatus.DISABLED + if cooldown_left > 0: + return KeyStatus.COOLDOWN + + total_calls = self.success_count + self.failure_count + if total_calls == 0: + return KeyStatus.UNUSED + + if self.success_rate < 80: + return KeyStatus.ERROR + + if total_calls >= 5 and self.avg_latency > 15000: + return KeyStatus.WARNING + + return KeyStatus.HEALTHY + + @property + def suggested_action(self) -> str: + """根据状态给出建议操作""" + status_actions = { + KeyStatus.DISABLED: "更换Key", + KeyStatus.ERROR: "检查网络/重置", + KeyStatus.COOLDOWN: "等待/重置", + KeyStatus.WARNING: "观察", + KeyStatus.HEALTHY: "-", + KeyStatus.UNUSED: "-", + } + return status_actions.get(self.status, "未知") + + class RetryConfig: """重试配置""" @@ -236,26 +322,38 @@ async def with_smart_retry( last_exception: Exception | None = None failed_keys: set[str] = set() + model_instance = next((arg for arg in args if hasattr(arg, "api_keys")), None) + all_provider_keys = model_instance.api_keys if model_instance else [] + for attempt in range(config.max_retries + 1): try: if config.key_rotation and "failed_keys" in func.__code__.co_varnames: kwargs["failed_keys"] = failed_keys - return await func(*args, **kwargs) + start_time = time.monotonic() + result = await func(*args, **kwargs) + latency = (time.monotonic() - start_time) * 1000 + + if key_store and isinstance(result, tuple) and len(result) == 2: + final_result, api_key_used = result + if api_key_used: + await key_store.record_success(api_key_used, latency) + return final_result + else: + return result except LLMException as e: last_exception = e + api_key_in_use = e.details.get("api_key") - if e.code in [ - LLMErrorCode.API_KEY_INVALID, - LLMErrorCode.API_QUOTA_EXCEEDED, - ]: - if hasattr(e, "details") and e.details and "api_key" in e.details: - failed_keys.add(e.details["api_key"]) - if key_store and provider_name: - await key_store.record_failure( - e.details["api_key"], e.details.get("status_code") - ) + if api_key_in_use: + failed_keys.add(api_key_in_use) + if key_store and provider_name and len(all_provider_keys) > 1: + status_code = e.details.get("status_code") + error_message = f"({e.code.name}) {e.message}" + await key_store.record_failure( + api_key_in_use, status_code, error_message + ) should_retry = _should_retry_llm_error(e, attempt, config.max_retries) if not should_retry: @@ -267,7 +365,7 @@ async def with_smart_retry( if config.exponential_backoff: wait_time *= 2**attempt logger.warning( - f"请求失败,{wait_time}秒后重试 (第{attempt + 1}次): {e}" + f"请求失败,{wait_time:.2f}秒后重试 (第{attempt + 1}次): {e}" ) await asyncio.sleep(wait_time) else: @@ -325,14 +423,66 @@ def _should_retry_llm_error( class KeyStatusStore: - """API Key 状态管理存储 - 优化版本,支持轮询和负载均衡""" + """API Key 状态管理存储 - 支持持久化""" def __init__(self): - self._key_status: dict[str, bool] = {} - self._key_usage_count: dict[str, int] = {} - self._key_last_used: dict[str, float] = {} + self._key_stats: dict[str, KeyStats] = {} self._provider_key_index: dict[str, int] = {} self._lock = asyncio.Lock() + self._file_path = DATA_PATH / "llm" / "key_status.json" + + async def initialize(self): + """从文件异步加载密钥状态,在应用启动时调用""" + async with self._lock: + if not self._file_path.exists(): + logger.info("未找到密钥状态文件,将使用内存状态启动。") + return + + try: + logger.info(f"正在从 {self._file_path} 加载密钥状态...") + async with aiofiles.open(self._file_path, encoding="utf-8") as f: + content = await f.read() + if not content: + logger.warning("密钥状态文件为空。") + return + data = json.loads(content) + + for key, stats_dict in data.items(): + self._key_stats[key] = KeyStats(**stats_dict) + + logger.info(f"成功加载 {len(self._key_stats)} 个密钥的状态。") + + except json.JSONDecodeError: + logger.error(f"密钥状态文件 {self._file_path} 格式错误,无法解析。") + except Exception as e: + logger.error(f"加载密钥状态文件时发生错误: {e}", e=e) + + async def _save_to_file_internal(self): + """ + [内部方法] 将当前密钥状态安全地写入JSON文件。 + 假定调用方已持有锁。 + """ + data_to_save = {key: asdict(stats) for key, stats in self._key_stats.items()} + + try: + self._file_path.parent.mkdir(parents=True, exist_ok=True) + temp_path = self._file_path.with_suffix(".json.tmp") + + async with aiofiles.open(temp_path, "w", encoding="utf-8") as f: + await f.write(json.dumps(data_to_save, ensure_ascii=False, indent=2)) + + if self._file_path.exists(): + self._file_path.unlink() + os.rename(temp_path, self._file_path) + logger.debug("密钥状态已成功持久化到文件。") + except Exception as e: + logger.error(f"保存密钥状态到文件失败: {e}", e=e) + + async def shutdown(self): + """在应用关闭时安全地保存状态""" + async with self._lock: + await self._save_to_file_internal() + logger.info("KeyStatusStore 已在关闭前保存状态。") async def get_next_available_key( self, @@ -355,88 +505,122 @@ class KeyStatusStore: return None exclude_keys = exclude_keys or set() - available_keys = [ - key - for key in api_keys - if key not in exclude_keys and self._key_status.get(key, True) - ] - - if not available_keys: - return api_keys[0] if api_keys else None async with self._lock: + for key in api_keys: + if key not in self._key_stats: + self._key_stats[key] = KeyStats() + + available_keys = [ + key + for key in api_keys + if key not in exclude_keys and self._key_stats[key].is_available + ] + + if not available_keys: + return api_keys[0] + current_index = self._provider_key_index.get(provider_name, 0) - selected_key = available_keys[current_index % len(available_keys)] + self._provider_key_index[provider_name] = current_index + 1 - self._provider_key_index[provider_name] = (current_index + 1) % len( - available_keys + total_usage = ( + self._key_stats[selected_key].success_count + + self._key_stats[selected_key].failure_count ) - - import time - - self._key_usage_count[selected_key] = ( - self._key_usage_count.get(selected_key, 0) + 1 - ) - self._key_last_used[selected_key] = time.time() - logger.debug( f"轮询选择API密钥: {self._get_key_id(selected_key)} " - f"(使用次数: {self._key_usage_count[selected_key]})" + f"(使用次数: {total_usage})" ) - return selected_key - async def record_success(self, api_key: str): - """记录成功使用""" + async def record_success(self, api_key: str, latency: float): + """记录成功使用,并持久化""" async with self._lock: - self._key_status[api_key] = True - logger.debug(f"记录API密钥成功使用: {self._get_key_id(api_key)}") + stats = self._key_stats.setdefault(api_key, KeyStats()) + stats.cooldown_until = 0.0 + stats.success_count += 1 + stats.total_latency += latency + stats.last_error_info = None + await self._save_to_file_internal() + logger.debug( + f"记录API密钥成功使用: {self._get_key_id(api_key)}, 延迟: {latency:.2f}ms" + ) - async def record_failure(self, api_key: str, status_code: int | None): + async def record_failure( + self, api_key: str, status_code: int | None, error_message: str + ): """ - 记录失败使用 + 记录失败使用,并设置冷却时间 参数: api_key: API密钥。 status_code: HTTP状态码。 + error_message: 错误信息。 """ key_id = self._get_key_id(api_key) + now = time.time() + cooldown_duration = 300 + + if status_code in [401, 403, 404]: + cooldown_duration = 31536000 + log_level = "error" + log_message = f"API密钥认证/权限/路径错误,将永久禁用: {key_id}" + elif status_code == 429: + cooldown_duration = 60 + log_level = "warning" + log_message = f"API密钥被限流,冷却60秒: {key_id}" + else: + log_level = "warning" + log_message = f"API密钥遇到临时性错误,冷却{cooldown_duration}秒: {key_id}" + async with self._lock: - if status_code in [401, 403]: - self._key_status[api_key] = False - logger.warning( - f"API密钥认证失败,标记为不可用: {key_id} (状态码: {status_code})" - ) - else: - logger.debug(f"记录API密钥失败使用: {key_id} (状态码: {status_code})") + stats = self._key_stats.setdefault(api_key, KeyStats()) + stats.cooldown_until = now + cooldown_duration + stats.failure_count += 1 + stats.last_error_info = error_message[:256] + await self._save_to_file_internal() + + getattr(logger, log_level)(log_message) async def reset_key_status(self, api_key: str): - """重置密钥状态(用于恢复机制)""" + """重置密钥状态,并持久化""" async with self._lock: - self._key_status[api_key] = True + stats = self._key_stats.setdefault(api_key, KeyStats()) + stats.cooldown_until = 0.0 + stats.last_error_info = None + await self._save_to_file_internal() logger.info(f"重置API密钥状态: {self._get_key_id(api_key)}") async def get_key_stats(self, api_keys: list[str]) -> dict[str, dict]: """ - 获取密钥使用统计 + 获取密钥使用统计,并计算出用于展示的派生数据。 参数: api_keys: API密钥列表。 返回: - dict[str, dict]: 密钥统计信息字典。 + dict[str, dict]: 包含丰富状态和统计信息的密钥字典。 """ - stats = {} + stats_dict = {} + now = time.time() async with self._lock: for key in api_keys: key_id = self._get_key_id(key) - stats[key_id] = { - "available": self._key_status.get(key, True), - "usage_count": self._key_usage_count.get(key, 0), - "last_used": self._key_last_used.get(key, 0), + stats = self._key_stats.get(key, KeyStats()) + + stats_dict[key_id] = { + "status_enum": stats.status, + "cooldown_seconds_left": max(0, stats.cooldown_until - now), + "total_calls": stats.success_count + stats.failure_count, + "success_count": stats.success_count, + "failure_count": stats.failure_count, + "success_rate": stats.success_rate, + "avg_latency": stats.avg_latency, + "last_error": stats.last_error_info, + "suggested_action": stats.suggested_action, } - return stats + return stats_dict def _get_key_id(self, api_key: str) -> str: """获取API密钥的标识符(用于日志)""" @@ -446,3 +630,8 @@ class KeyStatusStore: key_store = KeyStatusStore() + + +@driver.on_shutdown +async def _shutdown_key_store(): + await key_store.shutdown() diff --git a/zhenxun/services/llm/manager.py b/zhenxun/services/llm/manager.py index f0e9c560..6bad91f1 100644 --- a/zhenxun/services/llm/manager.py +++ b/zhenxun/services/llm/manager.py @@ -137,8 +137,8 @@ def get_configured_providers() -> list[ProviderConfig]: valid_providers.append(item) else: logger.warning( - f"配置文件中第 {i + 1} 项未能正确解析为 ProviderConfig 对象," - f"已跳过。实际类型: {type(item)}" + f"配置文件中第 {i + 1} 项未能正确解析为 ProviderConfig 对象,已跳过。" + f"实际类型: {type(item)}" ) return valid_providers diff --git a/zhenxun/services/llm/service.py b/zhenxun/services/llm/service.py index 587b15cc..76d846ba 100644 --- a/zhenxun/services/llm/service.py +++ b/zhenxun/services/llm/service.py @@ -46,17 +46,7 @@ class LLMModelBase(ABC): history: list[dict[str, str]] | None = None, **kwargs: Any, ) -> str: - """ - 生成文本 - - 参数: - prompt: 输入提示词。 - history: 对话历史记录。 - **kwargs: 其他参数。 - - 返回: - str: 生成的文本。 - """ + """生成文本""" pass @abstractmethod @@ -68,19 +58,7 @@ class LLMModelBase(ABC): tool_choice: str | dict[str, Any] | None = None, **kwargs: Any, ) -> LLMResponse: - """ - 生成高级响应 - - 参数: - messages: 消息列表。 - config: 生成配置。 - tools: 工具列表。 - tool_choice: 工具选择策略。 - **kwargs: 其他参数。 - - 返回: - LLMResponse: 模型响应。 - """ + """生成高级响应""" pass @abstractmethod @@ -90,17 +68,7 @@ class LLMModelBase(ABC): task_type: EmbeddingTaskType | str = EmbeddingTaskType.RETRIEVAL_DOCUMENT, **kwargs: Any, ) -> list[list[float]]: - """ - 生成文本嵌入向量 - - 参数: - texts: 文本列表。 - task_type: 嵌入任务类型。 - **kwargs: 其他参数。 - - 返回: - list[list[float]]: 嵌入向量列表。 - """ + """生成文本嵌入向量""" pass @@ -208,28 +176,8 @@ class LLMModel(LLMModelBase): http_client: "LLMHttpClient", failed_keys: set[str] | None = None, log_context: str = "API", - ) -> Any: - """ - 执行API调用的通用核心方法。 - - 该方法封装了以下通用逻辑: - 1. 选择API密钥。 - 2. 准备和记录请求。 - 3. 发送HTTP POST请求。 - 4. 处理HTTP错误和API特定错误。 - 5. 记录密钥使用状态。 - 6. 解析成功的响应。 - - 参数: - prepare_request_func: 准备请求的函数。 - parse_response_func: 解析响应的函数。 - http_client: HTTP客户端。 - failed_keys: 失败的密钥集合。 - log_context: 日志上下文。 - - 返回: - Any: 解析后的响应数据。 - """ + ) -> tuple[Any, str]: + """执行API调用的通用核心方法""" api_key = await self._select_api_key(failed_keys) try: @@ -267,7 +215,9 @@ class LLMModel(LLMModelBase): ) logger.debug(f"💥 完整错误响应: {error_text}") - await self.key_store.record_failure(api_key, http_response.status_code) + await self.key_store.record_failure( + api_key, http_response.status_code, error_text + ) if http_response.status_code in [401, 403]: error_code = LLMErrorCode.API_KEY_INVALID @@ -298,7 +248,7 @@ class LLMModel(LLMModelBase): except Exception as e: logger.error(f"解析 {log_context} 响应失败: {e}", e=e) - await self.key_store.record_failure(api_key, None) + await self.key_store.record_failure(api_key, None, str(e)) if isinstance(e, LLMException): raise else: @@ -308,17 +258,15 @@ class LLMModel(LLMModelBase): cause=e, ) - await self.key_store.record_success(api_key) - logger.debug(f"✅ API密钥使用成功: {masked_key}") logger.info(f"🎯 LLM响应解析完成 [{log_context}]") - return parsed_data + return parsed_data, api_key except LLMException: raise except Exception as e: error_log_msg = f"生成 {log_context.lower()} 时发生未预期错误: {e}" logger.error(error_log_msg, e=e) - await self.key_store.record_failure(api_key, None) + await self.key_store.record_failure(api_key, None, str(e)) raise LLMException( error_log_msg, code=LLMErrorCode.GENERATION_FAILED @@ -349,13 +297,14 @@ class LLMModel(LLMModelBase): adapter.validate_embedding_response(response_json) return adapter.parse_embedding_response(response_json) - return await self._perform_api_call( + parsed_data, api_key_used = await self._perform_api_call( prepare_request_func=prepare_request, parse_response_func=parse_response, http_client=http_client, failed_keys=failed_keys, log_context="Embedding", ) + return parsed_data async def _execute_with_smart_retry( self, @@ -394,8 +343,8 @@ class LLMModel(LLMModelBase): tool_choice: str | dict[str, Any] | None, http_client: LLMHttpClient, failed_keys: set[str] | None = None, - ) -> LLMResponse: - """执行单次请求 - 供重试机制调用,直接返回 LLMResponse""" + ) -> tuple[LLMResponse, str]: + """执行单次请求 - 供重试机制调用,直接返回 LLMResponse 和使用的 key""" async def prepare_request(api_key: str) -> RequestData: return await adapter.prepare_advanced_request( @@ -441,19 +390,17 @@ class LLMModel(LLMModelBase): cache_info=response_data.cache_info, ) - return await self._perform_api_call( + parsed_data, api_key_used = await self._perform_api_call( prepare_request_func=prepare_request, parse_response_func=parse_response, http_client=http_client, failed_keys=failed_keys, log_context="Generation", ) + return parsed_data, api_key_used async def close(self): - """ - 标记模型实例的当前使用周期结束。 - 共享的 HTTP 客户端由 LLMHttpClientManager 管理,不由 LLMModel 关闭。 - """ + """标记模型实例的当前使用周期结束""" if self._is_closed: return self._is_closed = True @@ -487,17 +434,7 @@ class LLMModel(LLMModelBase): history: list[dict[str, str]] | None = None, **kwargs: Any, ) -> str: - """ - 生成文本 - 通过 generate_response 实现 - - 参数: - prompt: 输入提示词。 - history: 对话历史记录。 - **kwargs: 其他参数。 - - 返回: - str: 生成的文本。 - """ + """生成文本""" self._check_not_closed() messages: list[LLMMessage] = [] @@ -538,19 +475,7 @@ class LLMModel(LLMModelBase): tool_choice: str | dict[str, Any] | None = None, **kwargs: Any, ) -> LLMResponse: - """ - 生成高级响应 - - 参数: - messages: 消息列表。 - config: 生成配置。 - tools: 工具列表。 - tool_choice: 工具选择策略。 - **kwargs: 其他参数。 - - 返回: - LLMResponse: 模型响应。 - """ + """生成高级响应""" self._check_not_closed() from .adapters import get_adapter_for_api_type @@ -619,17 +544,7 @@ class LLMModel(LLMModelBase): task_type: EmbeddingTaskType | str = EmbeddingTaskType.RETRIEVAL_DOCUMENT, **kwargs: Any, ) -> list[list[float]]: - """ - 生成文本嵌入向量 - - 参数: - texts: 文本列表。 - task_type: 嵌入任务类型。 - **kwargs: 其他参数。 - - 返回: - list[list[float]]: 嵌入向量列表。 - """ + """生成文本嵌入向量""" self._check_not_closed() if not texts: return [] diff --git a/zhenxun/services/llm/session.py b/zhenxun/services/llm/session.py new file mode 100644 index 00000000..ed23eeca --- /dev/null +++ b/zhenxun/services/llm/session.py @@ -0,0 +1,532 @@ +""" +LLM 服务 - 会话客户端 + +提供一个有状态的、面向会话的 LLM 客户端,用于进行多轮对话和复杂交互。 +""" + +import copy +from dataclasses import dataclass +from typing import Any + +from nonebot_plugin_alconna.uniseg import UniMessage + +from zhenxun.services.log import logger + +from .config import CommonOverrides, LLMGenerationConfig +from .config.providers import get_ai_config +from .manager import get_global_default_model_name, get_model_instance +from .tools import tool_registry +from .types import ( + EmbeddingTaskType, + LLMContentPart, + LLMErrorCode, + LLMException, + LLMMessage, + LLMResponse, + LLMTool, + ModelName, +) +from .utils import unimsg_to_llm_parts + + +@dataclass +class AIConfig: + """AI配置类 - 简化版本""" + + model: ModelName = None + default_embedding_model: ModelName = None + temperature: float | None = None + max_tokens: int | None = None + enable_cache: bool = False + enable_code: bool = False + enable_search: bool = False + timeout: int | None = None + + enable_gemini_json_mode: bool = False + enable_gemini_thinking: bool = False + enable_gemini_safe_mode: bool = False + enable_gemini_multimodal: bool = False + enable_gemini_grounding: bool = False + default_preserve_media_in_history: bool = False + + def __post_init__(self): + """初始化后从配置中读取默认值""" + ai_config = get_ai_config() + if self.model is None: + self.model = ai_config.get("default_model_name") + if self.timeout is None: + self.timeout = ai_config.get("timeout", 180) + + +class AI: + """统一的AI服务类 - 平衡设计版本 + + 提供三层API: + 1. 简单方法:ai.chat(), ai.code(), ai.search() + 2. 标准方法:ai.analyze() 支持复杂参数 + 3. 高级方法:通过get_model_instance()直接访问 + """ + + def __init__( + self, config: AIConfig | None = None, history: list[LLMMessage] | None = None + ): + """ + 初始化AI服务 + + 参数: + config: AI 配置. + history: 可选的初始对话历史. + """ + self.config = config or AIConfig() + self.history = history or [] + + def clear_history(self): + """清空当前会话的历史记录""" + self.history = [] + logger.info("AI session history cleared.") + + def _sanitize_message_for_history(self, message: LLMMessage) -> LLMMessage: + """ + 净化用于存入历史记录的消息。 + 将非文本的多模态内容部分替换为文本占位符,以避免重复处理。 + """ + if not isinstance(message.content, list): + return message + + sanitized_message = copy.deepcopy(message) + content_list = sanitized_message.content + if not isinstance(content_list, list): + return sanitized_message + + new_content_parts: list[LLMContentPart] = [] + has_multimodal_content = False + + for part in content_list: + if isinstance(part, LLMContentPart) and part.type == "text": + new_content_parts.append(part) + else: + has_multimodal_content = True + + if has_multimodal_content: + placeholder = "[用户发送了媒体文件,内容已在首次分析时处理]" + text_part_found = False + for part in new_content_parts: + if part.type == "text": + part.text = f"{placeholder} {part.text or ''}".strip() + text_part_found = True + break + if not text_part_found: + new_content_parts.insert(0, LLMContentPart.text_part(placeholder)) + + sanitized_message.content = new_content_parts + return sanitized_message + + async def chat( + self, + message: str | LLMMessage | list[LLMContentPart], + *, + model: ModelName = None, + preserve_media_in_history: bool | None = None, + tools: list[LLMTool] | None = None, + tool_choice: str | dict[str, Any] | None = None, + **kwargs: Any, + ) -> LLMResponse: + """ + 进行一次聊天对话,支持工具调用。 + 此方法会自动使用和更新会话内的历史记录。 + + 参数: + message: 用户输入的消息。 + model: 本次对话要使用的模型。 + preserve_media_in_history: 是否在历史记录中保留原始多模态信息。 + - True: 保留,用于深度多轮媒体分析。 + - False: 不保留,替换为占位符,提高效率。 + - None (默认): 使用AI实例配置的默认值。 + tools: 本次对话可用的工具列表。 + tool_choice: 强制模型使用的工具。 + **kwargs: 传递给模型的其他生成参数。 + + 返回: + LLMResponse: 模型的完整响应,可能包含文本或工具调用请求。 + """ + current_message: LLMMessage + if isinstance(message, str): + current_message = LLMMessage.user(message) + elif isinstance(message, list) and all( + isinstance(part, LLMContentPart) for part in message + ): + current_message = LLMMessage.user(message) + elif isinstance(message, LLMMessage): + current_message = message + else: + raise LLMException( + f"AI.chat 不支持的消息类型: {type(message)}. " + "请使用 str, LLMMessage, 或 list[LLMContentPart]. " + "对于更复杂的多模态输入或文件路径,请使用 AI.analyze().", + code=LLMErrorCode.API_REQUEST_FAILED, + ) + + final_messages = [*self.history, current_message] + + response = await self._execute_generation( + messages=final_messages, + model_name=model, + error_message="聊天失败", + config_overrides=kwargs, + llm_tools=tools, + tool_choice=tool_choice, + ) + + should_preserve = ( + preserve_media_in_history + if preserve_media_in_history is not None + else self.config.default_preserve_media_in_history + ) + + if should_preserve: + logger.debug("深度分析模式:在历史记录中保留原始多模态消息。") + self.history.append(current_message) + else: + logger.debug("高效模式:净化历史记录中的多模态消息。") + sanitized_user_message = self._sanitize_message_for_history(current_message) + self.history.append(sanitized_user_message) + + self.history.append( + LLMMessage( + role="assistant", content=response.text, tool_calls=response.tool_calls + ) + ) + + return response + + async def code( + self, + prompt: str, + *, + model: ModelName = None, + timeout: int | None = None, + **kwargs: Any, + ) -> dict[str, Any]: + """ + 代码执行 + + 参数: + prompt: 代码执行的提示词。 + model: 要使用的模型名称。 + timeout: 代码执行超时时间(秒)。 + **kwargs: 传递给模型的其他参数。 + + 返回: + dict[str, Any]: 包含执行结果的字典,包含text、code_executions和success字段。 + """ + resolved_model = model or self.config.model or "Gemini/gemini-2.0-flash" + + config = CommonOverrides.gemini_code_execution() + if timeout: + config.custom_params = config.custom_params or {} + config.custom_params["code_execution_timeout"] = timeout + + messages = [LLMMessage.user(prompt)] + + response = await self._execute_generation( + messages=messages, + model_name=resolved_model, + error_message="代码执行失败", + config_overrides=kwargs, + base_config=config, + ) + + return { + "text": response.text, + "code_executions": response.code_executions or [], + "success": True, + } + + async def search( + self, + query: str | UniMessage, + *, + model: ModelName = None, + instruction: str = "", + **kwargs: Any, + ) -> dict[str, Any]: + """ + 信息搜索 - 支持多模态输入 + + 参数: + query: 搜索查询内容,支持文本或多模态消息。 + model: 要使用的模型名称。 + instruction: 搜索指令。 + **kwargs: 传递给模型的其他参数。 + + 返回: + dict[str, Any]: 包含搜索结果的字典,包含text、sources、queries和success字段 + """ + from nonebot_plugin_alconna.uniseg import UniMessage + + resolved_model = model or self.config.model or "Gemini/gemini-2.0-flash" + config = CommonOverrides.gemini_grounding() + + if isinstance(query, str): + messages = [LLMMessage.user(query)] + elif isinstance(query, UniMessage): + content_parts = await unimsg_to_llm_parts(query) + + final_messages: list[LLMMessage] = [] + if instruction: + final_messages.append(LLMMessage.system(instruction)) + + if not content_parts: + if instruction: + final_messages.append(LLMMessage.user(instruction)) + else: + raise LLMException( + "搜索内容为空或无法处理。", code=LLMErrorCode.API_REQUEST_FAILED + ) + else: + final_messages.append(LLMMessage.user(content_parts)) + + messages = final_messages + else: + raise LLMException( + f"不支持的搜索输入类型: {type(query)}. 请使用 str 或 UniMessage.", + code=LLMErrorCode.API_REQUEST_FAILED, + ) + + response = await self._execute_generation( + messages=messages, + model_name=resolved_model, + error_message="信息搜索失败", + config_overrides=kwargs, + base_config=config, + ) + + result = { + "text": response.text, + "sources": [], + "queries": [], + "success": True, + } + + if response.grounding_metadata: + result["sources"] = response.grounding_metadata.grounding_attributions or [] + result["queries"] = response.grounding_metadata.web_search_queries or [] + + return result + + async def analyze( + self, + message: UniMessage | None, + *, + instruction: str = "", + model: ModelName = None, + use_tools: list[str] | None = None, + tool_config: dict[str, Any] | None = None, + activated_tools: list[LLMTool] | None = None, + history: list[LLMMessage] | None = None, + **kwargs: Any, + ) -> LLMResponse: + """ + 内容分析 - 接收 UniMessage 物件进行多模态分析和工具呼叫。 + + 参数: + message: 要分析的消息内容(支持多模态)。 + instruction: 分析指令。 + model: 要使用的模型名称。 + use_tools: 要使用的工具名称列表。 + tool_config: 工具配置。 + activated_tools: 已激活的工具列表。 + history: 对话历史记录。 + **kwargs: 传递给模型的其他参数。 + + 返回: + LLMResponse: 模型的完整响应结果。 + """ + from nonebot_plugin_alconna.uniseg import UniMessage + + content_parts = await unimsg_to_llm_parts(message or UniMessage()) + + final_messages: list[LLMMessage] = [] + if history: + final_messages.extend(history) + + if instruction: + if not any(msg.role == "system" for msg in final_messages): + final_messages.insert(0, LLMMessage.system(instruction)) + + if not content_parts: + if instruction and not history: + final_messages.append(LLMMessage.user(instruction)) + elif not history: + raise LLMException( + "分析内容为空或无法处理。", code=LLMErrorCode.API_REQUEST_FAILED + ) + else: + final_messages.append(LLMMessage.user(content_parts)) + + llm_tools: list[LLMTool] | None = activated_tools + if not llm_tools and use_tools: + try: + llm_tools = tool_registry.get_tools(use_tools) + logger.debug(f"已从注册表加载工具定义: {use_tools}") + except ValueError as e: + raise LLMException( + f"加载工具定义失败: {e}", + code=LLMErrorCode.CONFIGURATION_ERROR, + cause=e, + ) + + tool_choice = None + if tool_config: + mode = tool_config.get("mode", "auto") + if mode in ["auto", "any", "none"]: + tool_choice = mode + + response = await self._execute_generation( + messages=final_messages, + model_name=model, + error_message="内容分析失败", + config_overrides=kwargs, + llm_tools=llm_tools, + tool_choice=tool_choice, + ) + + return response + + async def _execute_generation( + self, + messages: list[LLMMessage], + model_name: ModelName, + error_message: str, + config_overrides: dict[str, Any], + llm_tools: list[LLMTool] | None = None, + tool_choice: str | dict[str, Any] | None = None, + base_config: LLMGenerationConfig | None = None, + ) -> LLMResponse: + """通用的生成执行方法,封装模型获取和单次API调用""" + try: + resolved_model_name = self._resolve_model_name( + model_name or self.config.model + ) + final_config_dict = self._merge_config( + config_overrides, base_config=base_config + ) + + async with await get_model_instance( + resolved_model_name, override_config=final_config_dict + ) as model_instance: + return await model_instance.generate_response( + messages, + tools=llm_tools, + tool_choice=tool_choice, + ) + except LLMException: + raise + except Exception as e: + logger.error(f"{error_message}: {e}", e=e) + raise LLMException(f"{error_message}: {e}", cause=e) + + def _resolve_model_name(self, model_name: ModelName) -> str: + """解析模型名称""" + if model_name: + return model_name + + default_model = get_global_default_model_name() + if default_model: + return default_model + + raise LLMException( + "未指定模型名称且未设置全局默认模型", + code=LLMErrorCode.MODEL_NOT_FOUND, + ) + + def _merge_config( + self, + user_config: dict[str, Any], + base_config: LLMGenerationConfig | None = None, + ) -> dict[str, Any]: + """合并配置""" + final_config = {} + if base_config: + final_config.update(base_config.to_dict()) + + if self.config.temperature is not None: + final_config["temperature"] = self.config.temperature + if self.config.max_tokens is not None: + final_config["max_tokens"] = self.config.max_tokens + + if self.config.enable_cache: + final_config["enable_caching"] = True + if self.config.enable_code: + final_config["enable_code_execution"] = True + if self.config.enable_search: + final_config["enable_grounding"] = True + + if self.config.enable_gemini_json_mode: + final_config["response_mime_type"] = "application/json" + if self.config.enable_gemini_thinking: + final_config["thinking_budget"] = 0.8 + if self.config.enable_gemini_safe_mode: + final_config["safety_settings"] = ( + CommonOverrides.gemini_safe().safety_settings + ) + if self.config.enable_gemini_multimodal: + final_config.update(CommonOverrides.gemini_multimodal().to_dict()) + if self.config.enable_gemini_grounding: + final_config["enable_grounding"] = True + + final_config.update(user_config) + + return final_config + + async def embed( + self, + texts: list[str] | str, + *, + model: ModelName = None, + task_type: EmbeddingTaskType | str = EmbeddingTaskType.RETRIEVAL_DOCUMENT, + **kwargs: Any, + ) -> list[list[float]]: + """ + 生成文本嵌入向量 + + 参数: + texts: 要生成嵌入向量的文本或文本列表。 + model: 要使用的嵌入模型名称。 + task_type: 嵌入任务类型。 + **kwargs: 传递给模型的其他参数。 + + 返回: + list[list[float]]: 文本的嵌入向量列表。 + """ + if isinstance(texts, str): + texts = [texts] + if not texts: + return [] + + try: + resolved_model_str = ( + model or self.config.default_embedding_model or self.config.model + ) + if not resolved_model_str: + raise LLMException( + "使用 embed 功能时必须指定嵌入模型名称," + "或在 AIConfig 中配置 default_embedding_model。", + code=LLMErrorCode.MODEL_NOT_FOUND, + ) + resolved_model_str = self._resolve_model_name(resolved_model_str) + + async with await get_model_instance( + resolved_model_str, + override_config=None, + ) as embedding_model_instance: + return await embedding_model_instance.generate_embeddings( + texts, task_type=task_type, **kwargs + ) + except LLMException: + raise + except Exception as e: + logger.error(f"文本嵌入失败: {e}", e=e) + raise LLMException( + f"文本嵌入失败: {e}", code=LLMErrorCode.EMBEDDING_FAILED, cause=e + ) diff --git a/zhenxun/services/llm/types/__init__.py b/zhenxun/services/llm/types/__init__.py index f01bc291..72920d06 100644 --- a/zhenxun/services/llm/types/__init__.py +++ b/zhenxun/services/llm/types/__init__.py @@ -10,7 +10,13 @@ from .content import ( LLMMessage, LLMResponse, ) -from .enums import EmbeddingTaskType, ModelProvider, ResponseFormat, ToolCategory +from .enums import ( + EmbeddingTaskType, + ModelProvider, + ResponseFormat, + TaskType, + ToolCategory, +) from .exceptions import LLMErrorCode, LLMException, get_user_friendly_error_message from .models import ( LLMCacheInfo, @@ -52,6 +58,7 @@ __all__ = [ "ModelProvider", "ProviderConfig", "ResponseFormat", + "TaskType", "ToolCategory", "ToolMetadata", "UsageInfo", diff --git a/zhenxun/services/llm/types/enums.py b/zhenxun/services/llm/types/enums.py index 718a52ef..82cb49b0 100644 --- a/zhenxun/services/llm/types/enums.py +++ b/zhenxun/services/llm/types/enums.py @@ -45,6 +45,17 @@ class ToolCategory(Enum): CUSTOM = auto() +class TaskType(Enum): + """任务类型枚举""" + + CHAT = "chat" + CODE = "code" + SEARCH = "search" + ANALYSIS = "analysis" + GENERATION = "generation" + MULTIMODAL = "multimodal" + + class LLMErrorCode(Enum): """LLM 服务相关的错误代码枚举""" From 582ad8c996759a65392d6bb43a5d6d3ed7cd1d8b Mon Sep 17 00:00:00 2001 From: HibiKier <45528451+HibiKier@users.noreply.github.com> Date: Mon, 14 Jul 2025 22:59:56 +0800 Subject: [PATCH 3/4] =?UTF-8?q?:bug:=20=E4=BF=AE=E5=A4=8Dsqlite=E8=BF=9E?= =?UTF-8?q?=E6=8E=A5=E9=97=AE=E9=A2=98=20(#1979)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * :ambulance: 修复sqlite连接问题 * 🔧 移除db_url参数以简化数据库配置获取逻辑 --- zhenxun/services/db_context.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/zhenxun/services/db_context.py b/zhenxun/services/db_context.py index ffe98415..e6c42472 100644 --- a/zhenxun/services/db_context.py +++ b/zhenxun/services/db_context.py @@ -357,7 +357,7 @@ SQLITE_CONFIG = { } -def get_config(db_url: str) -> dict: +def get_config() -> dict: """获取数据库配置""" parsed = urlparse(BotConfig.db_url) @@ -404,7 +404,7 @@ def get_config(db_url: str) -> dict: config["connections"]["default"] = { "engine": "tortoise.backends.sqlite", "credentials": { - "file_path": parsed.path[1:] or ":memory:", + "file_path": parsed.path or ":memory:", }, **SQLITE_CONFIG, } @@ -426,7 +426,7 @@ async def init(): raise DbUrlIsNode("\n" + error.strip()) try: await Tortoise.init( - config=get_config(BotConfig.db_url), + config=get_config(), ) if SCRIPT_METHOD: db = Tortoise.get_connection("default") From faa91b8bd4042845d3597d83ada709f8830aae60 Mon Sep 17 00:00:00 2001 From: molanp <104612722+molanp@users.noreply.github.com> Date: Mon, 14 Jul 2025 23:20:13 +0800 Subject: [PATCH 4/4] =?UTF-8?q?=F0=9F=9A=91=20=E4=BF=AE=E5=A4=8D=E6=95=B0?= =?UTF-8?q?=E6=8D=AE=E8=BF=81=E7=A7=BBSQL=20(#1969)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * perf(zhenxun): 优化签到和道具 SQL 查询语句 - 改为通用SQL * style(zhenxun): 优化签到 SQL 查询格式 - 调整 SQL 查询的缩进和格式,提高可读性 - 没有修改实际的查询逻辑,仅优化代码结构 --------- Co-authored-by: HibiKier <45528451+HibiKier@users.noreply.github.com> --- zhenxun/builtin_plugins/__init__.py | 29 +++++++++++++++++++---------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/zhenxun/builtin_plugins/__init__.py b/zhenxun/builtin_plugins/__init__.py index f2688905..4003e506 100644 --- a/zhenxun/builtin_plugins/__init__.py +++ b/zhenxun/builtin_plugins/__init__.py @@ -50,22 +50,31 @@ async def _(bot: Bot): SIGN_SQL = """ -select distinct on("user_id") t1.user_id, t1.checkin_count, t1.add_probability, -t1.specify_probability, t1.impression -from public.sign_group_users t1 - join ( - select user_id, max(t2.impression) as max_impression - from public.sign_group_users t2 - group by user_id - ) t on t.user_id = t1.user_id and t.max_impression = t1.impression +SELECT user_id, checkin_count, add_probability, specify_probability, impression +FROM ( + SELECT + t1.user_id, + t1.checkin_count, + t1.add_probability, + t1.specify_probability, + t1.impression, + ROW_NUMBER() OVER(PARTITION BY t1.user_id ORDER BY t1.impression DESC) AS rn + FROM sign_group_users t1 + INNER JOIN ( + SELECT user_id, MAX(impression) AS max_impression + FROM sign_group_users + GROUP BY user_id + ) t2 ON t2.user_id = t1.user_id AND t2.max_impression = t1.impression +) t +WHERE rn = 1 """ BAG_SQL = """ select t1.user_id, t1.gold, t1.property -from public.bag_users t1 +from bag_users t1 join ( select user_id, max(t2.gold) as max_gold - from public.bag_users t2 + from bag_users t2 group by user_id ) t on t.user_id = t1.user_id and t.max_gold = t1.gold """