2021-05-20 19:23:32 +08:00
|
|
|
|
from lxml import etree
|
|
|
|
|
|
import feedparser
|
|
|
|
|
|
from urllib import parse
|
|
|
|
|
|
from services.log import logger
|
2021-11-04 16:11:50 +08:00
|
|
|
|
from typing import List
|
2021-05-20 19:23:32 +08:00
|
|
|
|
import aiohttp
|
|
|
|
|
|
import time
|
2021-06-30 19:50:55 +08:00
|
|
|
|
from utils.utils import get_local_proxy
|
2021-05-20 19:23:32 +08:00
|
|
|
|
|
|
|
|
|
|
|
2021-11-04 16:11:50 +08:00
|
|
|
|
async def from_anime_get_info(key_word: str, max_: int) -> List[str]:
|
2021-05-20 19:23:32 +08:00
|
|
|
|
s_time = time.time()
|
|
|
|
|
|
repass = ""
|
2021-07-30 21:21:51 +08:00
|
|
|
|
url = "https://share.dmhy.org/topics/rss/rss.xml?keyword=" + parse.quote(key_word)
|
2021-05-20 19:23:32 +08:00
|
|
|
|
try:
|
2021-07-30 21:21:51 +08:00
|
|
|
|
repass = await get_repass(url, max_)
|
2021-05-20 19:23:32 +08:00
|
|
|
|
except Exception as e:
|
|
|
|
|
|
logger.error("Timeout! {}".format(e))
|
2021-11-04 16:11:50 +08:00
|
|
|
|
repass.insert(0, f"搜索 {key_word} 结果(耗时 {int(time.time() - s_time)} 秒):\n")
|
|
|
|
|
|
return repass
|
2021-05-20 19:23:32 +08:00
|
|
|
|
|
|
|
|
|
|
|
2021-11-04 16:11:50 +08:00
|
|
|
|
async def get_repass(url: str, max_: int) -> List[str]:
|
|
|
|
|
|
put_line = []
|
2021-05-20 19:23:32 +08:00
|
|
|
|
async with aiohttp.ClientSession() as session:
|
|
|
|
|
|
async with session.get(url, proxy=get_local_proxy(), timeout=20) as response:
|
|
|
|
|
|
d = feedparser.parse(await response.text())
|
2021-11-04 16:11:50 +08:00
|
|
|
|
max_ = max_ if max_ < len([e.link for e in d.entries]) else len([e.link for e in d.entries])
|
2021-07-30 21:21:51 +08:00
|
|
|
|
url_list = [e.link for e in d.entries][:max_]
|
2021-05-20 19:23:32 +08:00
|
|
|
|
for u in url_list:
|
|
|
|
|
|
try:
|
2021-07-30 21:21:51 +08:00
|
|
|
|
async with session.get(
|
|
|
|
|
|
u, proxy=get_local_proxy(), timeout=20
|
|
|
|
|
|
) as res:
|
2021-05-20 19:23:32 +08:00
|
|
|
|
html = etree.HTML(await res.text())
|
|
|
|
|
|
magent = html.xpath('.//a[@id="a_magnet"]/text()')[0]
|
2021-07-30 21:21:51 +08:00
|
|
|
|
title = html.xpath(".//h3/text()")[0]
|
|
|
|
|
|
item = html.xpath(
|
|
|
|
|
|
'//div[@class="info resource-info right"]/ul/li'
|
|
|
|
|
|
)
|
|
|
|
|
|
class_a = (
|
|
|
|
|
|
item[0]
|
|
|
|
|
|
.xpath("string(.)")[5:]
|
|
|
|
|
|
.strip()
|
|
|
|
|
|
.replace("\xa0", "")
|
|
|
|
|
|
.replace("\t", "")
|
|
|
|
|
|
)
|
|
|
|
|
|
size = item[3].xpath("string(.)")[5:].strip()
|
2021-11-04 16:11:50 +08:00
|
|
|
|
put_line.append(
|
2021-07-30 21:21:51 +08:00
|
|
|
|
"【{}】| {}\n【{}】| {}".format(class_a, title, size, magent)
|
|
|
|
|
|
)
|
2021-05-20 19:23:32 +08:00
|
|
|
|
except Exception as e:
|
2021-07-30 21:21:51 +08:00
|
|
|
|
logger.warning(f"搜番超时 e:{e}")
|
2021-11-04 16:11:50 +08:00
|
|
|
|
return put_line
|
2021-07-30 21:21:51 +08:00
|
|
|
|
|
|
|
|
|
|
|