mirror of
https://github.com/zhenxun-org/zhenxun_bot.git
synced 2025-12-14 21:52:56 +08:00
⚡ 添加github镜像
This commit is contained in:
parent
a61934650e
commit
8615eb20d4
@ -103,7 +103,7 @@ async def test_plugin_store_fail(
|
||||
|
||||
init_mocked_api(mocked_api=mocked_api)
|
||||
mocked_api.get(
|
||||
"https://cdn.jsdelivr.net/gh/zhenxun-org/zhenxun_bot_plugins/plugins.json",
|
||||
"https://raw.githubusercontent.com/zhenxun-org/zhenxun_bot_plugins/main/plugins.json",
|
||||
name="basic_plugins",
|
||||
).respond(404)
|
||||
|
||||
|
||||
@ -25,14 +25,26 @@ def init_mocked_api(mocked_api: MockRouter) -> None:
|
||||
"https://data.jsdelivr.com/v1/packages/gh/zhenxun-org/zhenxun_bot_plugins@main",
|
||||
name="zhenxun_bot_plugins_metadata",
|
||||
).respond(json=get_response_json("zhenxun_bot_plugins_metadata.json"))
|
||||
mocked_api.head(
|
||||
"https://raw.githubusercontent.com/zhenxun-org/zhenxun_bot_plugins/main/plugins.json",
|
||||
name="head_basic_plugins",
|
||||
).respond(200, text="")
|
||||
mocked_api.get(
|
||||
"https://cdn.jsdelivr.net/gh/zhenxun-org/zhenxun_bot_plugins/plugins.json",
|
||||
"https://raw.githubusercontent.com/zhenxun-org/zhenxun_bot_plugins/main/plugins.json",
|
||||
name="basic_plugins",
|
||||
).respond(json=get_response_json("basic_plugins.json"))
|
||||
mocked_api.get(
|
||||
"https://cdn.jsdelivr.net/gh/zhenxun-org/zhenxun_bot_plugins@main/plugins.json",
|
||||
name="basic_plugins_jsdelivr",
|
||||
).respond(200, json=get_response_json("basic_plugins.json"))
|
||||
mocked_api.get(
|
||||
"https://raw.githubusercontent.com/zhenxun-org/zhenxun_bot_plugins_index/index/plugins.json",
|
||||
name="extra_plugins",
|
||||
).respond(200, json=get_response_json("extra_plugins.json"))
|
||||
mocked_api.get(
|
||||
"https://cdn.jsdelivr.net/gh/zhenxun-org/zhenxun_bot_plugins_index@index/plugins.json",
|
||||
name="extra_plugins_jsdelivr",
|
||||
).respond(200, json=get_response_json("extra_plugins.json"))
|
||||
mocked_api.get(
|
||||
"https://raw.githubusercontent.com/zhenxun-org/zhenxun_bot_plugins/main/plugins/search_image/__init__.py",
|
||||
name="search_image_plugin_file_init",
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
import os
|
||||
import json
|
||||
from pathlib import Path
|
||||
from collections.abc import Callable
|
||||
@ -77,8 +78,11 @@ async def app(app: App, tmp_path: Path, mocker: MockerFixture):
|
||||
|
||||
await init()
|
||||
# await driver._lifespan.startup()
|
||||
os.environ["AIOCACHE_DISABLE"] = "1"
|
||||
|
||||
yield app
|
||||
|
||||
del os.environ["AIOCACHE_DISABLE"]
|
||||
# await driver._lifespan.shutdown()
|
||||
await disconnect()
|
||||
|
||||
|
||||
@ -5,17 +5,11 @@ BASE_PATH = Path() / "zhenxun"
|
||||
BASE_PATH.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
CONFIG_URL = "https://cdn.jsdelivr.net/gh/zhenxun-org/zhenxun_bot_plugins/plugins.json"
|
||||
"""插件信息文件"""
|
||||
|
||||
CONFIG_INDEX_URL = "https://raw.githubusercontent.com/zhenxun-org/zhenxun_bot_plugins_index/index/plugins.json"
|
||||
"""插件索引库信息文件"""
|
||||
|
||||
CONFIG_INDEX_CDN_URL = "https://cdn.jsdelivr.net/gh/zhenxun-org/zhenxun_bot_plugins_index@index/plugins.json"
|
||||
"""插件索引库信息文件cdn"""
|
||||
|
||||
DEFAULT_GITHUB_URL = "https://github.com/zhenxun-org/zhenxun_bot_plugins/tree/main"
|
||||
"""默认github仓库地址"""
|
||||
"""伴生插件github仓库地址"""
|
||||
|
||||
EXTRA_GITHUB_URL = "https://github.com/zhenxun-org/zhenxun_bot_plugins_index/tree/index"
|
||||
"""插件库索引github仓库地址"""
|
||||
|
||||
GITHUB_REPO_URL_PATTERN = re.compile(
|
||||
r"^https://github.com/(?P<owner>[^/]+)/(?P<repo>[^/]+)(/tree/(?P<branch>[^/]+))?$"
|
||||
|
||||
@ -3,6 +3,7 @@ import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
import ujson as json
|
||||
from aiocache import cached
|
||||
|
||||
from zhenxun.services.log import logger
|
||||
from zhenxun.utils.http_utils import AsyncHttpx
|
||||
@ -19,12 +20,9 @@ from zhenxun.builtin_plugins.plugin_store.models import (
|
||||
|
||||
from .config import (
|
||||
BASE_PATH,
|
||||
CONFIG_URL,
|
||||
CONFIG_INDEX_URL,
|
||||
EXTRA_GITHUB_URL,
|
||||
DEFAULT_GITHUB_URL,
|
||||
CONFIG_INDEX_CDN_URL,
|
||||
JSD_PACKAGE_API_FORMAT,
|
||||
GITHUB_REPO_URL_PATTERN,
|
||||
)
|
||||
|
||||
|
||||
@ -140,6 +138,7 @@ def install_requirement(plugin_path: Path):
|
||||
|
||||
class ShopManage:
|
||||
@classmethod
|
||||
@cached(60)
|
||||
async def __get_data(cls) -> dict[str, StorePluginInfo]:
|
||||
"""获取插件信息数据
|
||||
|
||||
@ -149,12 +148,14 @@ class ShopManage:
|
||||
返回:
|
||||
dict: 插件信息数据
|
||||
"""
|
||||
res = await AsyncHttpx.get(CONFIG_URL)
|
||||
res2 = await AsyncHttpx.get(CONFIG_INDEX_URL)
|
||||
|
||||
if res2.status_code != 200:
|
||||
logger.info("访问第三方插件信息文件失败,改为进行cdn访问")
|
||||
res2 = await AsyncHttpx.get(CONFIG_INDEX_CDN_URL)
|
||||
default_github_url = await RepoInfo.parse_github_url(
|
||||
DEFAULT_GITHUB_URL
|
||||
).get_download_url_with_path("plugins.json")
|
||||
extra_github_url = await RepoInfo.parse_github_url(
|
||||
EXTRA_GITHUB_URL
|
||||
).get_download_url_with_path("plugins.json")
|
||||
res = await AsyncHttpx.get(default_github_url)
|
||||
res2 = await AsyncHttpx.get(extra_github_url)
|
||||
|
||||
# 检查请求结果
|
||||
if res.status_code != 200 or res2.status_code != 200:
|
||||
@ -274,7 +275,7 @@ class ShopManage:
|
||||
return f"插件 {plugin_key} 安装成功! 重启后生效"
|
||||
|
||||
@classmethod
|
||||
async def get_repo_package_info(cls, repo_info: RepoInfo) -> JsdPackageInfo:
|
||||
async def get_repo_package_info_of_jsd(cls, repo_info: RepoInfo) -> JsdPackageInfo:
|
||||
"""获取插件包信息
|
||||
|
||||
参数:
|
||||
@ -291,19 +292,13 @@ class ShopManage:
|
||||
raise ValueError(f"下载错误, code: {res.status_code}")
|
||||
return JsdPackageInfo(**res.json())
|
||||
|
||||
@classmethod
|
||||
def expand_github_url(cls, github_url: str) -> RepoInfo:
|
||||
if matched := GITHUB_REPO_URL_PATTERN.match(github_url):
|
||||
return RepoInfo(**matched.groupdict()) # type: ignore
|
||||
raise ValueError("github地址格式错误")
|
||||
|
||||
@classmethod
|
||||
async def install_plugin_with_repo(
|
||||
cls, github_url: str, module_path: str, is_dir: bool, is_external: bool = False
|
||||
):
|
||||
repo_info = cls.expand_github_url(github_url)
|
||||
repo_info = RepoInfo.parse_github_url(github_url)
|
||||
logger.debug(f"成功获取仓库信息: {repo_info}", "插件管理")
|
||||
jsd_package_info: JsdPackageInfo = await cls.get_repo_package_info(
|
||||
jsd_package_info: JsdPackageInfo = await cls.get_repo_package_info_of_jsd(
|
||||
repo_info=repo_info
|
||||
)
|
||||
files = full_files_path(jsd_package_info, module_path, is_dir)
|
||||
@ -313,7 +308,9 @@ class ShopManage:
|
||||
is_dir,
|
||||
)
|
||||
logger.debug(f"获取插件文件列表: {files}", "插件管理")
|
||||
download_urls = [repo_info.get_download_url_with_path(file) for file in files]
|
||||
download_urls = [
|
||||
await repo_info.get_download_url_with_path(file) for file in files
|
||||
]
|
||||
base_path = BASE_PATH / "plugins" if is_external else BASE_PATH
|
||||
download_paths: list[Path | str] = [base_path / file for file in files]
|
||||
logger.debug(f"插件下载路径: {download_paths}", "插件管理")
|
||||
@ -332,7 +329,7 @@ class ShopManage:
|
||||
)
|
||||
logger.debug(f"获取插件依赖文件列表: {req_files}", "插件管理")
|
||||
req_download_urls = [
|
||||
repo_info.get_download_url_with_path(file) for file in req_files
|
||||
await repo_info.get_download_url_with_path(file) for file in req_files
|
||||
]
|
||||
req_paths: list[Path | str] = [plugin_path / file for file in req_files]
|
||||
logger.debug(f"插件依赖文件下载路径: {req_paths}", "插件管理")
|
||||
|
||||
@ -1,7 +1,11 @@
|
||||
from aiocache import cached
|
||||
from strenum import StrEnum
|
||||
from pydantic import BaseModel, validator
|
||||
|
||||
from zhenxun.utils.enum import PluginType
|
||||
from zhenxun.utils.http_utils import AsyncHttpx
|
||||
|
||||
from .config import GITHUB_REPO_URL_PATTERN
|
||||
|
||||
type2name: dict[str, str] = {
|
||||
"NORMAL": "普通插件",
|
||||
@ -39,11 +43,39 @@ class RepoInfo(BaseModel):
|
||||
branch: str | None
|
||||
|
||||
@validator("branch", pre=True, always=True)
|
||||
def set_default_branch(cls, v):
|
||||
def _set_default_branch(cls, v):
|
||||
return "main" if v is None else v
|
||||
|
||||
def get_download_url_with_path(self, path: str):
|
||||
return f"https://raw.githubusercontent.com/{self.owner}/{self.repo}/{self.branch}/{path}"
|
||||
async def get_download_url_with_path(self, path: str):
|
||||
url_format = await self.get_fastest_format()
|
||||
return url_format.format(**self.dict(), path=path)
|
||||
|
||||
@classmethod
|
||||
def parse_github_url(cls, github_url: str) -> "RepoInfo":
|
||||
if matched := GITHUB_REPO_URL_PATTERN.match(github_url):
|
||||
return RepoInfo(**matched.groupdict())
|
||||
raise ValueError("github地址格式错误")
|
||||
|
||||
@classmethod
|
||||
@cached()
|
||||
async def get_fastest_format(cls) -> str:
|
||||
"""获取最快下载地址格式"""
|
||||
raw_format = "https://raw.githubusercontent.com/{owner}/{repo}/{branch}/{path}"
|
||||
patterns: dict[str, str] = {
|
||||
(
|
||||
"https://raw.githubusercontent.com"
|
||||
"/zhenxun-org/zhenxun_bot_plugins/main"
|
||||
"/plugins.json"
|
||||
): raw_format,
|
||||
"https://ghproxy.cc/": f"https://ghproxy.cc/{raw_format}",
|
||||
"https://mirror.ghproxy.com/": f"https://mirror.ghproxy.com/{raw_format}",
|
||||
"https://gh-proxy.com/": f"https://gh-proxy.com/{raw_format}",
|
||||
"https://cdn.jsdelivr.net/": "https://cdn.jsdelivr.net/gh/{owner}/{repo}@{branch}/{path}",
|
||||
}
|
||||
sorted_urls = await AsyncHttpx.get_fastest_mirror(list(patterns.keys()))
|
||||
if not sorted_urls:
|
||||
raise Exception("无法获取任意GitHub资源加速地址,请检查网络")
|
||||
return patterns[sorted_urls[0]]
|
||||
|
||||
|
||||
class FileType(StrEnum):
|
||||
|
||||
@ -1,20 +1,22 @@
|
||||
import time
|
||||
import asyncio
|
||||
from asyncio.exceptions import TimeoutError
|
||||
from contextlib import asynccontextmanager
|
||||
from pathlib import Path
|
||||
from typing import Any, AsyncGenerator, Dict, Literal
|
||||
from typing import Any, Literal, ClassVar
|
||||
from collections.abc import AsyncGenerator
|
||||
from contextlib import asynccontextmanager
|
||||
from asyncio.exceptions import TimeoutError
|
||||
|
||||
import aiofiles
|
||||
import httpx
|
||||
import rich
|
||||
from httpx import ConnectTimeout, Response
|
||||
import httpx
|
||||
import aiofiles
|
||||
from retrying import retry
|
||||
from playwright.async_api import Page
|
||||
from httpx import Response, ConnectTimeout
|
||||
from nonebot_plugin_alconna import UniMessage
|
||||
from nonebot_plugin_htmlrender import get_browser
|
||||
from playwright.async_api import Page
|
||||
from retrying import retry
|
||||
|
||||
from zhenxun.configs.config import BotConfig
|
||||
from zhenxun.services.log import logger
|
||||
from zhenxun.configs.config import BotConfig
|
||||
from zhenxun.utils.message import MessageUtils
|
||||
from zhenxun.utils.user_agent import get_user_agent
|
||||
|
||||
@ -22,8 +24,10 @@ from zhenxun.utils.user_agent import get_user_agent
|
||||
|
||||
|
||||
class AsyncHttpx:
|
||||
|
||||
proxy = {"http://": BotConfig.system_proxy, "https://": BotConfig.system_proxy}
|
||||
proxy: ClassVar[dict[str, str | None]] = {
|
||||
"http://": BotConfig.system_proxy,
|
||||
"https://": BotConfig.system_proxy,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@retry(stop_max_attempt_number=3)
|
||||
@ -31,12 +35,12 @@ class AsyncHttpx:
|
||||
cls,
|
||||
url: str,
|
||||
*,
|
||||
params: Dict[str, Any] | None = None,
|
||||
headers: Dict[str, str] | None = None,
|
||||
cookies: Dict[str, str] | None = None,
|
||||
params: dict[str, Any] | None = None,
|
||||
headers: dict[str, str] | None = None,
|
||||
cookies: dict[str, str] | None = None,
|
||||
verify: bool = True,
|
||||
use_proxy: bool = True,
|
||||
proxy: Dict[str, str] | None = None,
|
||||
proxy: dict[str, str] | None = None,
|
||||
timeout: int = 30,
|
||||
**kwargs,
|
||||
) -> Response:
|
||||
@ -65,21 +69,60 @@ class AsyncHttpx:
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
async def head(
|
||||
cls,
|
||||
url: str,
|
||||
*,
|
||||
params: dict[str, Any] | None = None,
|
||||
headers: dict[str, str] | None = None,
|
||||
cookies: dict[str, str] | None = None,
|
||||
verify: bool = True,
|
||||
use_proxy: bool = True,
|
||||
proxy: dict[str, str] | None = None,
|
||||
timeout: int = 30,
|
||||
**kwargs,
|
||||
) -> Response:
|
||||
"""Get
|
||||
|
||||
参数:
|
||||
url: url
|
||||
params: params
|
||||
headers: 请求头
|
||||
cookies: cookies
|
||||
verify: verify
|
||||
use_proxy: 使用默认代理
|
||||
proxy: 指定代理
|
||||
timeout: 超时时间
|
||||
"""
|
||||
if not headers:
|
||||
headers = get_user_agent()
|
||||
_proxy = proxy if proxy else cls.proxy if use_proxy else None
|
||||
async with httpx.AsyncClient(proxies=_proxy, verify=verify) as client: # type: ignore
|
||||
return await client.head(
|
||||
url,
|
||||
params=params,
|
||||
headers=headers,
|
||||
cookies=cookies,
|
||||
timeout=timeout,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
async def post(
|
||||
cls,
|
||||
url: str,
|
||||
*,
|
||||
data: Dict[str, Any] | None = None,
|
||||
data: dict[str, Any] | None = None,
|
||||
content: Any = None,
|
||||
files: Any = None,
|
||||
verify: bool = True,
|
||||
use_proxy: bool = True,
|
||||
proxy: Dict[str, str] | None = None,
|
||||
json: Dict[str, Any] | None = None,
|
||||
params: Dict[str, str] | None = None,
|
||||
headers: Dict[str, str] | None = None,
|
||||
cookies: Dict[str, str] | None = None,
|
||||
proxy: dict[str, str] | None = None,
|
||||
json: dict[str, Any] | None = None,
|
||||
params: dict[str, str] | None = None,
|
||||
headers: dict[str, str] | None = None,
|
||||
cookies: dict[str, str] | None = None,
|
||||
timeout: int = 30,
|
||||
**kwargs,
|
||||
) -> Response:
|
||||
@ -122,12 +165,12 @@ class AsyncHttpx:
|
||||
url: str,
|
||||
path: str | Path,
|
||||
*,
|
||||
params: Dict[str, str] | None = None,
|
||||
params: dict[str, str] | None = None,
|
||||
verify: bool = True,
|
||||
use_proxy: bool = True,
|
||||
proxy: Dict[str, str] | None = None,
|
||||
headers: Dict[str, str] | None = None,
|
||||
cookies: Dict[str, str] | None = None,
|
||||
proxy: dict[str, str] | None = None,
|
||||
headers: dict[str, str] | None = None,
|
||||
cookies: dict[str, str] | None = None,
|
||||
timeout: int = 30,
|
||||
stream: bool = False,
|
||||
**kwargs,
|
||||
@ -177,7 +220,8 @@ class AsyncHttpx:
|
||||
_proxy = proxy if proxy else cls.proxy if use_proxy else None
|
||||
try:
|
||||
async with httpx.AsyncClient(
|
||||
proxies=_proxy, verify=verify # type: ignore
|
||||
proxies=_proxy, # type: ignore
|
||||
verify=verify,
|
||||
) as client:
|
||||
async with client.stream(
|
||||
"GET",
|
||||
@ -229,11 +273,11 @@ class AsyncHttpx:
|
||||
path_list: list[str | Path],
|
||||
*,
|
||||
limit_async_number: int | None = None,
|
||||
params: Dict[str, str] | None = None,
|
||||
params: dict[str, str] | None = None,
|
||||
use_proxy: bool = True,
|
||||
proxy: Dict[str, str] | None = None,
|
||||
headers: Dict[str, str] | None = None,
|
||||
cookies: Dict[str, str] | None = None,
|
||||
proxy: dict[str, str] | None = None,
|
||||
headers: dict[str, str] | None = None,
|
||||
cookies: dict[str, str] | None = None,
|
||||
timeout: int = 30,
|
||||
**kwargs,
|
||||
) -> list[bool]:
|
||||
@ -295,6 +339,40 @@ class AsyncHttpx:
|
||||
tasks.clear()
|
||||
return result_
|
||||
|
||||
@classmethod
|
||||
async def get_fastest_mirror(cls, url_list: list[str]) -> list[str]:
|
||||
assert url_list
|
||||
|
||||
async def head_mirror(client: type[AsyncHttpx], url: str) -> dict[str, Any]:
|
||||
begin_time = time.time()
|
||||
|
||||
response = await client.head(url=url, timeout=6)
|
||||
response.raise_for_status()
|
||||
|
||||
elapsed_time = (time.time() - begin_time) * 1000
|
||||
content_length = int(response.headers["content-length"])
|
||||
|
||||
return {
|
||||
"url": url,
|
||||
"elapsed_time": elapsed_time,
|
||||
"content_length": content_length,
|
||||
}
|
||||
|
||||
logger.debug(f"开始获取最快镜像,可能需要一段时间... | URL列表:{url_list}")
|
||||
results = await asyncio.gather(
|
||||
*(head_mirror(cls, url) for url in url_list),
|
||||
return_exceptions=True,
|
||||
)
|
||||
_results: list[dict[str, Any]] = []
|
||||
for result in results:
|
||||
if isinstance(result, BaseException):
|
||||
logger.warning(f"获取镜像失败,错误:{result}")
|
||||
else:
|
||||
logger.debug(f"获取镜像成功,结果:{result}")
|
||||
_results.append(result)
|
||||
_results = sorted(iter(_results), key=lambda r: r["elapsed_time"])
|
||||
return [result["url"] for result in _results]
|
||||
|
||||
|
||||
class AsyncPlaywright:
|
||||
@classmethod
|
||||
@ -322,7 +400,7 @@ class AsyncPlaywright:
|
||||
element: str | list[str],
|
||||
*,
|
||||
wait_time: int | None = None,
|
||||
viewport_size: Dict[str, int] | None = None,
|
||||
viewport_size: dict[str, int] | None = None,
|
||||
wait_until: (
|
||||
Literal["domcontentloaded", "load", "networkidle"] | None
|
||||
) = "networkidle",
|
||||
@ -344,7 +422,7 @@ class AsyncPlaywright:
|
||||
type_: 保存类型
|
||||
"""
|
||||
if viewport_size is None:
|
||||
viewport_size = dict(width=2560, height=1080)
|
||||
viewport_size = {"width": 2560, "height": 1080}
|
||||
if isinstance(path, str):
|
||||
path = Path(path)
|
||||
wait_time = wait_time * 1000 if wait_time else None
|
||||
|
||||
Loading…
Reference in New Issue
Block a user