mirror of
https://github.com/zhenxun-org/zhenxun_bot.git
synced 2025-12-15 06:12:53 +08:00
modified: basic_plugins/admin_bot_manage/admin_config.py modified: basic_plugins/admin_bot_manage/custom_welcome_message.py modified: basic_plugins/admin_bot_manage/timing_task.py modified: basic_plugins/apscheduler/__init__.py modified: basic_plugins/ban/__init__.py modified: basic_plugins/ban/data_source.py modified: basic_plugins/chat_history/chat_message.py modified: basic_plugins/chat_history/chat_message_handle.py modified: basic_plugins/group_handle/__init__.py modified: basic_plugins/hooks/_utils.py modified: basic_plugins/hooks/ban_hook.py modified: basic_plugins/hooks/chkdsk_hook.py modified: basic_plugins/init_plugin_config/__init__.py deleted: basic_plugins/init_plugin_config/init_group_manager.py modified: basic_plugins/invite_manager/__init__.py new file: basic_plugins/invite_manager/utils.py modified: basic_plugins/nickname.py modified: basic_plugins/plugin_shop/__init__.py modified: basic_plugins/plugin_shop/data_source.py modified: basic_plugins/scripts.py modified: basic_plugins/shop/__init__.py modified: basic_plugins/shop/buy.py modified: basic_plugins/shop/gold.py modified: basic_plugins/shop/my_props/__init__.py deleted: basic_plugins/shop/reset_today_gold.py modified: basic_plugins/shop/shop_handle/__init__.py modified: basic_plugins/shop/shop_handle/data_source.py modified: basic_plugins/shop/use/__init__.py modified: basic_plugins/shop/use/data_source.py modified: basic_plugins/super_cmd/__init__.py modified: basic_plugins/super_cmd/bot_friend_group.py modified: basic_plugins/super_cmd/clear_data.py modified: basic_plugins/super_cmd/exec_sql.py modified: basic_plugins/super_cmd/manager_group.py modified: basic_plugins/super_cmd/reload_setting.py modified: basic_plugins/super_cmd/set_admin_permissions.py deleted: basic_plugins/super_cmd/super_task_switch.py modified: basic_plugins/super_cmd/update_friend_group_info.py modified: basic_plugins/super_help/__init__.py modified: basic_plugins/update_info.py modified: configs/config.py modified: configs/utils/__init__.py modified: models/bag_user.py modified: models/ban_user.py modified: models/chat_history.py modified: models/friend_user.py modified: models/goods_info.py modified: models/group_info.py modified: models/group_member_info.py modified: models/level_user.py modified: models/sign_group_user.py modified: models/user_shop_gold_log.py modified: plugins/aconfig/__init__.py modified: plugins/ai/__init__.py modified: plugins/ai/data_source.py modified: plugins/bilibili_sub/__init__.py modified: plugins/bilibili_sub/data_source.py modified: plugins/bilibili_sub/model.py modified: plugins/black_word/__init__.py modified: plugins/black_word/model.py modified: plugins/black_word/utils.py modified: plugins/bt/data_source.py modified: plugins/genshin/almanac/__init__.py modified: plugins/genshin/material_remind/__init__.py modified: plugins/genshin/query_user/_models/__init__.py modified: plugins/genshin/query_user/_utils/__init__.py modified: plugins/genshin/query_user/bind/__init__.py modified: plugins/genshin/query_user/genshin_sign/__init__.py modified: plugins/genshin/query_user/genshin_sign/data_source.py modified: plugins/genshin/query_user/genshin_sign/init_task.py modified: plugins/genshin/query_user/mihoyobbs_sign/__init__.py modified: plugins/genshin/query_user/query_memo/__init__.py modified: plugins/genshin/query_user/query_memo/data_source.py modified: plugins/genshin/query_user/query_role/__init__.py modified: plugins/genshin/query_user/query_role/data_source.py modified: plugins/genshin/query_user/reset_today_query_user_data/__init__.py modified: plugins/genshin/query_user/resin_remind/__init__.py modified: plugins/genshin/query_user/resin_remind/init_task.py modified: plugins/gold_redbag/model.py modified: plugins/image_management/send_image/__init__.py modified: plugins/my_info/__init__.py modified: plugins/open_cases/models/buff_prices.py modified: plugins/open_cases/models/open_cases_user.py modified: plugins/open_cases/open_cases_c.py modified: plugins/open_cases/utils.py modified: plugins/parse_bilibili_json.py modified: plugins/pid_search.py modified: plugins/pix_gallery/__init__.py modified: plugins/pix_gallery/_data_source.py modified: plugins/pix_gallery/_model/omega_pixiv_illusts.py modified: plugins/pix_gallery/_model/pixiv.py modified: plugins/pix_gallery/_model/pixiv_keyword_user.py modified: plugins/pix_gallery/pix_add_keyword.py modified: plugins/pix_gallery/pix_pass_del_keyword.py modified: plugins/pix_gallery/pix_show_info.py modified: plugins/pix_gallery/pix_update.py modified: plugins/pixiv_rank_search/data_source.py modified: plugins/poke/__init__.py modified: plugins/russian/__init__.py modified: plugins/russian/data_source.py modified: plugins/russian/model.py modified: plugins/send_dinggong_voice/__init__.py modified: plugins/send_setu_/_model.py modified: plugins/send_setu_/send_setu/__init__.py modified: plugins/send_setu_/send_setu/data_source.py modified: plugins/send_setu_/update_setu/data_source.py modified: plugins/sign_in/goods_register.py modified: plugins/sign_in/group_user_checkin.py modified: plugins/sign_in/random_event.py modified: plugins/sign_in/utils.py modified: plugins/statistics/_model.py modified: plugins/statistics/statistics_handle.py modified: plugins/statistics/statistics_hook.py modified: plugins/update_picture.py modified: plugins/web_ui/api/request.py modified: plugins/word_bank/_model.py deleted: plugins/word_bank/_old_model.py modified: plugins/word_bank/_rule.py modified: plugins/word_bank/word_handle.py modified: plugins/word_clouds/data_source.py modified: resources/image/sign/sign_res/bar.png modified: resources/image/sign/sign_res/bar_white.png modified: services/db_context.py modified: services/log.py modified: utils/browser.py modified: utils/data_utils.py modified: utils/depends/__init__.py modified: utils/http_utils.py modified: utils/image_utils.py modified: utils/manager/admin_manager.py modified: utils/message_builder.py modified: utils/utils.py
375 lines
13 KiB
Python
375 lines
13 KiB
Python
import asyncio
|
||
from asyncio.exceptions import TimeoutError
|
||
from contextlib import asynccontextmanager
|
||
from pathlib import Path
|
||
from typing import Any, AsyncGenerator, Dict, List, Literal, Optional, Union
|
||
|
||
import aiofiles
|
||
import httpx
|
||
import rich
|
||
from httpx import ConnectTimeout, Response
|
||
from nonebot.adapters.onebot.v11 import MessageSegment
|
||
from playwright.async_api import BrowserContext, Page
|
||
from retrying import retry
|
||
|
||
from services.log import logger
|
||
from utils.user_agent import get_user_agent
|
||
|
||
from .browser import get_browser
|
||
from .message_builder import image
|
||
from .utils import get_local_proxy
|
||
|
||
|
||
class AsyncHttpx:
|
||
|
||
proxy = {"http://": get_local_proxy(), "https://": get_local_proxy()}
|
||
|
||
@classmethod
|
||
@retry(stop_max_attempt_number=3)
|
||
async def get(
|
||
cls,
|
||
url: str,
|
||
*,
|
||
params: Optional[Dict[str, Any]] = None,
|
||
headers: Optional[Dict[str, str]] = None,
|
||
cookies: Optional[Dict[str, str]] = None,
|
||
verify: bool = True,
|
||
use_proxy: bool = True,
|
||
proxy: Optional[Dict[str, str]] = None,
|
||
timeout: Optional[int] = 30,
|
||
**kwargs,
|
||
) -> Response:
|
||
"""
|
||
说明:
|
||
Get
|
||
参数:
|
||
:param url: url
|
||
:param params: params
|
||
:param headers: 请求头
|
||
:param cookies: cookies
|
||
:param verify: verify
|
||
:param use_proxy: 使用默认代理
|
||
:param proxy: 指定代理
|
||
:param timeout: 超时时间
|
||
"""
|
||
if not headers:
|
||
headers = get_user_agent()
|
||
proxy = proxy if proxy else cls.proxy if use_proxy else None
|
||
async with httpx.AsyncClient(proxies=proxy, verify=verify) as client:
|
||
return await client.get(
|
||
url,
|
||
params=params,
|
||
headers=headers,
|
||
cookies=cookies,
|
||
timeout=timeout,
|
||
**kwargs,
|
||
)
|
||
|
||
@classmethod
|
||
async def post(
|
||
cls,
|
||
url: str,
|
||
*,
|
||
data: Optional[Dict[str, str]] = None,
|
||
content: Any = None,
|
||
files: Any = None,
|
||
verify: bool = True,
|
||
use_proxy: bool = True,
|
||
proxy: Dict[str, str] = None,
|
||
json: Optional[Dict[str, Union[Any]]] = None,
|
||
params: Optional[Dict[str, str]] = None,
|
||
headers: Optional[Dict[str, str]] = None,
|
||
cookies: Optional[Dict[str, str]] = None,
|
||
timeout: Optional[int] = 30,
|
||
**kwargs,
|
||
) -> Response:
|
||
"""
|
||
说明:
|
||
Post
|
||
参数:
|
||
:param url: url
|
||
:param data: data
|
||
:param content: content
|
||
:param files: files
|
||
:param use_proxy: 是否默认代理
|
||
:param proxy: 指定代理
|
||
:param json: json
|
||
:param params: params
|
||
:param headers: 请求头
|
||
:param cookies: cookies
|
||
:param timeout: 超时时间
|
||
"""
|
||
if not headers:
|
||
headers = get_user_agent()
|
||
proxy = proxy if proxy else cls.proxy if use_proxy else None
|
||
async with httpx.AsyncClient(proxies=proxy, verify=verify) as client:
|
||
return await client.post(
|
||
url,
|
||
content=content,
|
||
data=data,
|
||
files=files,
|
||
json=json,
|
||
params=params,
|
||
headers=headers,
|
||
cookies=cookies,
|
||
timeout=timeout,
|
||
**kwargs,
|
||
)
|
||
|
||
@classmethod
|
||
async def download_file(
|
||
cls,
|
||
url: str,
|
||
path: Union[str, Path],
|
||
*,
|
||
params: Optional[Dict[str, str]] = None,
|
||
verify: bool = True,
|
||
use_proxy: bool = True,
|
||
proxy: Dict[str, str] = None,
|
||
headers: Optional[Dict[str, str]] = None,
|
||
cookies: Optional[Dict[str, str]] = None,
|
||
timeout: Optional[int] = 30,
|
||
stream: bool = False,
|
||
**kwargs,
|
||
) -> bool:
|
||
"""
|
||
说明:
|
||
下载文件
|
||
参数:
|
||
:param url: url
|
||
:param path: 存储路径
|
||
:param params: params
|
||
:param verify: verify
|
||
:param use_proxy: 使用代理
|
||
:param proxy: 指定代理
|
||
:param headers: 请求头
|
||
:param cookies: cookies
|
||
:param timeout: 超时时间
|
||
:param stream: 是否使用流式下载(流式写入+进度条,适用于下载大文件)
|
||
"""
|
||
if isinstance(path, str):
|
||
path = Path(path)
|
||
path.parent.mkdir(parents=True, exist_ok=True)
|
||
try:
|
||
for _ in range(3):
|
||
if not stream:
|
||
try:
|
||
content = (
|
||
await cls.get(
|
||
url,
|
||
params=params,
|
||
headers=headers,
|
||
cookies=cookies,
|
||
use_proxy=use_proxy,
|
||
proxy=proxy,
|
||
timeout=timeout,
|
||
**kwargs,
|
||
)
|
||
).content
|
||
async with aiofiles.open(path, "wb") as wf:
|
||
await wf.write(content)
|
||
logger.info(f"下载 {url} 成功.. Path:{path.absolute()}")
|
||
return True
|
||
except (TimeoutError, ConnectTimeout):
|
||
pass
|
||
else:
|
||
if not headers:
|
||
headers = get_user_agent()
|
||
proxy = proxy if proxy else cls.proxy if use_proxy else None
|
||
try:
|
||
async with httpx.AsyncClient(
|
||
proxies=proxy, verify=verify
|
||
) as client:
|
||
async with client.stream(
|
||
"GET",
|
||
url,
|
||
params=params,
|
||
headers=headers,
|
||
cookies=cookies,
|
||
timeout=timeout,
|
||
**kwargs,
|
||
) as response:
|
||
logger.info(
|
||
f"开始下载 {path.name}.. Path: {path.absolute()}"
|
||
)
|
||
async with aiofiles.open(path, "wb") as wf:
|
||
total = int(response.headers["Content-Length"])
|
||
with rich.progress.Progress(
|
||
rich.progress.TextColumn(path.name),
|
||
"[progress.percentage]{task.percentage:>3.0f}%",
|
||
rich.progress.BarColumn(bar_width=None),
|
||
rich.progress.DownloadColumn(),
|
||
rich.progress.TransferSpeedColumn(),
|
||
) as progress:
|
||
download_task = progress.add_task(
|
||
"Download", total=total
|
||
)
|
||
async for chunk in response.aiter_bytes():
|
||
await wf.write(chunk)
|
||
await wf.flush()
|
||
progress.update(
|
||
download_task,
|
||
completed=response.num_bytes_downloaded,
|
||
)
|
||
logger.info(f"下载 {url} 成功.. Path:{path.absolute()}")
|
||
return True
|
||
except (TimeoutError, ConnectTimeout):
|
||
pass
|
||
else:
|
||
logger.error(f"下载 {url} 下载超时.. Path:{path.absolute()}")
|
||
except Exception as e:
|
||
logger.error(f"下载 {url} 未知错误 {type(e)}:{e}.. Path:{path.absolute()}")
|
||
return False
|
||
|
||
@classmethod
|
||
async def gather_download_file(
|
||
cls,
|
||
url_list: List[str],
|
||
path_list: List[Union[str, Path]],
|
||
*,
|
||
limit_async_number: Optional[int] = None,
|
||
params: Optional[Dict[str, str]] = None,
|
||
use_proxy: bool = True,
|
||
proxy: Dict[str, str] = None,
|
||
headers: Optional[Dict[str, str]] = None,
|
||
cookies: Optional[Dict[str, str]] = None,
|
||
timeout: Optional[int] = 30,
|
||
**kwargs,
|
||
) -> List[bool]:
|
||
"""
|
||
说明:
|
||
分组同时下载文件
|
||
参数:
|
||
:param url_list: url列表
|
||
:param path_list: 存储路径列表
|
||
:param limit_async_number: 限制同时请求数量
|
||
:param params: params
|
||
:param use_proxy: 使用代理
|
||
:param proxy: 指定代理
|
||
:param headers: 请求头
|
||
:param cookies: cookies
|
||
:param timeout: 超时时间
|
||
"""
|
||
if n := len(url_list) != len(path_list):
|
||
raise UrlPathNumberNotEqual(
|
||
f"Url数量与Path数量不对等,Url:{len(url_list)},Path:{len(path_list)}"
|
||
)
|
||
if limit_async_number and n > limit_async_number:
|
||
m = float(n) / limit_async_number
|
||
x = 0
|
||
j = limit_async_number
|
||
_split_url_list = []
|
||
_split_path_list = []
|
||
for _ in range(int(m)):
|
||
_split_url_list.append(url_list[x:j])
|
||
_split_path_list.append(path_list[x:j])
|
||
x += limit_async_number
|
||
j += limit_async_number
|
||
if int(m) < m:
|
||
_split_url_list.append(url_list[j:])
|
||
_split_path_list.append(path_list[j:])
|
||
else:
|
||
_split_url_list = [url_list]
|
||
_split_path_list = [path_list]
|
||
tasks = []
|
||
result_ = []
|
||
for x, y in zip(_split_url_list, _split_path_list):
|
||
for url, path in zip(x, y):
|
||
tasks.append(
|
||
asyncio.create_task(
|
||
cls.download_file(
|
||
url,
|
||
path,
|
||
params=params,
|
||
headers=headers,
|
||
cookies=cookies,
|
||
use_proxy=use_proxy,
|
||
timeout=timeout,
|
||
proxy=proxy,
|
||
**kwargs,
|
||
)
|
||
)
|
||
)
|
||
_x = await asyncio.gather(*tasks)
|
||
result_ = result_ + list(_x)
|
||
tasks.clear()
|
||
return result_
|
||
|
||
|
||
class AsyncPlaywright:
|
||
@classmethod
|
||
@asynccontextmanager
|
||
async def new_page(cls, **kwargs) -> AsyncGenerator[Page, None]:
|
||
"""
|
||
说明:
|
||
获取一个新页面
|
||
参数:
|
||
:param user_agent: 请求头
|
||
"""
|
||
browser = get_browser()
|
||
ctx = await browser.new_context(**kwargs)
|
||
page = await ctx.new_page()
|
||
try:
|
||
yield page
|
||
finally:
|
||
await page.close()
|
||
await ctx.close()
|
||
|
||
@classmethod
|
||
async def screenshot(
|
||
cls,
|
||
url: str,
|
||
path: Union[Path, str],
|
||
element: Union[str, List[str]],
|
||
*,
|
||
wait_time: Optional[int] = None,
|
||
viewport_size: Optional[Dict[str, int]] = None,
|
||
wait_until: Optional[
|
||
Literal["domcontentloaded", "load", "networkidle"]
|
||
] = "networkidle",
|
||
timeout: Optional[float] = None,
|
||
type_: Optional[Literal["jpeg", "png"]] = None,
|
||
**kwargs,
|
||
) -> Optional[MessageSegment]:
|
||
"""
|
||
说明:
|
||
截图,该方法仅用于简单快捷截图,复杂截图请操作 page
|
||
参数:
|
||
:param url: 网址
|
||
:param path: 存储路径
|
||
:param element: 元素选择
|
||
:param wait_time: 等待截取超时时间
|
||
:param viewport_size: 窗口大小
|
||
:param wait_until: 等待类型
|
||
:param timeout: 超时限制
|
||
:param type_: 保存类型
|
||
"""
|
||
if viewport_size is None:
|
||
viewport_size = dict(width=2560, height=1080)
|
||
if isinstance(path, str):
|
||
path = Path(path)
|
||
wait_time = wait_time * 1000 if wait_time else None
|
||
if isinstance(element, str):
|
||
element_list = [element]
|
||
else:
|
||
element_list = element
|
||
async with cls.new_page(viewport=viewport_size) as page:
|
||
await page.goto(url, timeout=timeout, wait_until=wait_until)
|
||
card = page
|
||
for e in element_list:
|
||
if not card:
|
||
return None
|
||
card = await card.wait_for_selector(e, timeout=wait_time)
|
||
if card:
|
||
await card.screenshot(path=path, timeout=timeout, type=type_)
|
||
return image(path)
|
||
return None
|
||
|
||
|
||
class UrlPathNumberNotEqual(Exception):
|
||
pass
|
||
|
||
|
||
class BrowserIsNone(Exception):
|
||
pass
|