Update PY/html/LIVES.py, PY/html/LREEOK.py, PY/html/嗷呜动漫.py, PY/html/好帅短剧.py, PY/html/红果网页.py, PY/html/金牌.py, PY/html/剧粑粑.py, PY/html/绝对影视.py, PY/html/甜圈短剧.py, PY/html/偷乐短剧.py, PY/html/旺旺.py, PY/html/小红影视.py files
This commit is contained in:
parent
eb76381653
commit
692adf02b6
768
PY/html/LIVES.py
Normal file
768
PY/html/LIVES.py
Normal file
@ -0,0 +1,768 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# by @嗷呜
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
from base64 import b64decode, b64encode
|
||||||
|
from urllib.parse import parse_qs
|
||||||
|
import requests
|
||||||
|
from pyquery import PyQuery as pq
|
||||||
|
sys.path.append('..')
|
||||||
|
from base.spider import Spider
|
||||||
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
|
||||||
|
|
||||||
|
class Spider(Spider):
|
||||||
|
|
||||||
|
def init(self, extend=""):
|
||||||
|
tid = 'douyin'
|
||||||
|
headers = self.gethr(0, tid)
|
||||||
|
response = requests.head(self.hosts[tid], headers=headers)
|
||||||
|
ttwid = response.cookies.get('ttwid')
|
||||||
|
headers.update({
|
||||||
|
'authority': self.hosts[tid].split('//')[-1],
|
||||||
|
'cookie': f'ttwid={ttwid}' if ttwid else ''
|
||||||
|
})
|
||||||
|
self.dyheaders = headers
|
||||||
|
pass
|
||||||
|
|
||||||
|
def getName(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def isVideoFormat(self, url):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def manualVideoCheck(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def destroy(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
headers = [
|
||||||
|
{
|
||||||
|
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36 Edg/126.0.0.0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"User-Agent": "Dart/3.4 (dart:io)"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
excepturl = 'https://www.baidu.com'
|
||||||
|
|
||||||
|
hosts = {
|
||||||
|
"huya": ["https://www.huya.com","https://mp.huya.com"],
|
||||||
|
"douyin": "https://live.douyin.com",
|
||||||
|
"douyu": "https://www.douyu.com",
|
||||||
|
"wangyi": "https://cc.163.com",
|
||||||
|
"bili": ["https://api.live.bilibili.com", "https://api.bilibili.com"]
|
||||||
|
}
|
||||||
|
|
||||||
|
referers = {
|
||||||
|
"huya": "https://live.cdn.huya.com",
|
||||||
|
"douyin": "https://live.douyin.com",
|
||||||
|
"douyu": "https://m.douyu.com",
|
||||||
|
"bili": "https://live.bilibili.com"
|
||||||
|
}
|
||||||
|
|
||||||
|
playheaders = {
|
||||||
|
"wangyi": {
|
||||||
|
"User-Agent": "ExoPlayer",
|
||||||
|
"Connection": "Keep-Alive",
|
||||||
|
"Icy-MetaData": "1"
|
||||||
|
},
|
||||||
|
"bili": {
|
||||||
|
'Accept': '*/*',
|
||||||
|
'Icy-MetaData': '1',
|
||||||
|
'referer': referers['bili'],
|
||||||
|
'user-agent': headers[0]['User-Agent']
|
||||||
|
},
|
||||||
|
'douyin': {
|
||||||
|
'User-Agent': 'libmpv',
|
||||||
|
'Icy-MetaData': '1'
|
||||||
|
},
|
||||||
|
'huya': {
|
||||||
|
'User-Agent': 'ExoPlayer',
|
||||||
|
'Connection': 'Keep-Alive',
|
||||||
|
'Icy-MetaData': '1'
|
||||||
|
},
|
||||||
|
'douyu': {
|
||||||
|
'User-Agent': 'libmpv',
|
||||||
|
'Icy-MetaData': '1'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def process_bili(self):
|
||||||
|
try:
|
||||||
|
self.blfdata = self.fetch(
|
||||||
|
f'{self.hosts["bili"][0]}/room/v1/Area/getList?need_entrance=1&parent_id=0',
|
||||||
|
headers=self.gethr(0, 'bili')
|
||||||
|
).json()
|
||||||
|
return ('bili', [{'key': 'cate', 'name': '分类',
|
||||||
|
'value': [{'n': i['name'], 'v': str(i['id'])}
|
||||||
|
for i in self.blfdata['data']]}])
|
||||||
|
except Exception as e:
|
||||||
|
print(f"bili处理错误: {e}")
|
||||||
|
return 'bili', None
|
||||||
|
|
||||||
|
def process_douyin(self):
|
||||||
|
try:
|
||||||
|
data = self.getpq(self.hosts['douyin'], headers=self.dyheaders)('script')
|
||||||
|
for i in data.items():
|
||||||
|
if 'categoryData' in i.text():
|
||||||
|
content = i.text()
|
||||||
|
start = content.find('{')
|
||||||
|
end = content.rfind('}') + 1
|
||||||
|
if start != -1 and end != -1:
|
||||||
|
json_str = content[start:end]
|
||||||
|
json_str = json_str.replace('\\"', '"')
|
||||||
|
try:
|
||||||
|
self.dyifdata = json.loads(json_str)
|
||||||
|
return ('douyin', [{'key': 'cate', 'name': '分类',
|
||||||
|
'value': [{'n': i['partition']['title'],
|
||||||
|
'v': f"{i['partition']['id_str']}@@{i['partition']['title']}"}
|
||||||
|
for i in self.dyifdata['categoryData']]}])
|
||||||
|
except json.JSONDecodeError as e:
|
||||||
|
print(f"douyin解析错误: {e}")
|
||||||
|
return 'douyin', None
|
||||||
|
except Exception as e:
|
||||||
|
print(f"douyin请求或处理错误: {e}")
|
||||||
|
return 'douyin', None
|
||||||
|
|
||||||
|
def process_douyu(self):
|
||||||
|
try:
|
||||||
|
self.dyufdata = self.fetch(
|
||||||
|
f'{self.referers["douyu"]}/api/cate/list',
|
||||||
|
headers=self.headers[1]
|
||||||
|
).json()
|
||||||
|
return ('douyu', [{'key': 'cate', 'name': '分类',
|
||||||
|
'value': [{'n': i['cate1Name'], 'v': str(i['cate1Id'])}
|
||||||
|
for i in self.dyufdata['data']['cate1Info']]}])
|
||||||
|
except Exception as e:
|
||||||
|
print(f"douyu错误: {e}")
|
||||||
|
return 'douyu', None
|
||||||
|
|
||||||
|
def homeContent(self, filter):
|
||||||
|
result = {}
|
||||||
|
cateManual = {
|
||||||
|
"虎牙": "huya",
|
||||||
|
"哔哩": "bili",
|
||||||
|
"抖音": "douyin",
|
||||||
|
"斗鱼": "douyu",
|
||||||
|
"网易": "wangyi"
|
||||||
|
}
|
||||||
|
classes = []
|
||||||
|
filters = {
|
||||||
|
'huya': [{'key': 'cate', 'name': '分类',
|
||||||
|
'value': [{'n': '网游', 'v': '1'}, {'n': '单机', 'v': '2'},
|
||||||
|
{'n': '娱乐', 'v': '8'}, {'n': '手游', 'v': '3'}]}]
|
||||||
|
}
|
||||||
|
|
||||||
|
with ThreadPoolExecutor(max_workers=3) as executor:
|
||||||
|
futures = {
|
||||||
|
executor.submit(self.process_bili): 'bili',
|
||||||
|
executor.submit(self.process_douyin): 'douyin',
|
||||||
|
executor.submit(self.process_douyu): 'douyu'
|
||||||
|
}
|
||||||
|
|
||||||
|
for future in futures:
|
||||||
|
platform, filter_data = future.result()
|
||||||
|
if filter_data:
|
||||||
|
filters[platform] = filter_data
|
||||||
|
|
||||||
|
for k in cateManual:
|
||||||
|
classes.append({
|
||||||
|
'type_name': k,
|
||||||
|
'type_id': cateManual[k]
|
||||||
|
})
|
||||||
|
|
||||||
|
result['class'] = classes
|
||||||
|
result['filters'] = filters
|
||||||
|
return result
|
||||||
|
|
||||||
|
def homeVideoContent(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def categoryContent(self, tid, pg, filter, extend):
|
||||||
|
vdata = []
|
||||||
|
result = {}
|
||||||
|
pagecount = 9999
|
||||||
|
result['page'] = pg
|
||||||
|
result['limit'] = 90
|
||||||
|
result['total'] = 999999
|
||||||
|
if tid == 'wangyi':
|
||||||
|
vdata, pagecount = self.wyccContent(tid, pg, filter, extend, vdata)
|
||||||
|
elif 'bili' in tid:
|
||||||
|
vdata, pagecount = self.biliContent(tid, pg, filter, extend, vdata)
|
||||||
|
elif 'huya' in tid:
|
||||||
|
vdata, pagecount = self.huyaContent(tid, pg, filter, extend, vdata)
|
||||||
|
elif 'douyin' in tid:
|
||||||
|
vdata, pagecount = self.douyinContent(tid, pg, filter, extend, vdata)
|
||||||
|
elif 'douyu' in tid:
|
||||||
|
vdata, pagecount = self.douyuContent(tid, pg, filter, extend, vdata)
|
||||||
|
result['list'] = vdata
|
||||||
|
result['pagecount'] = pagecount
|
||||||
|
return result
|
||||||
|
|
||||||
|
def wyccContent(self, tid, pg, filter, extend, vdata):
|
||||||
|
params = {
|
||||||
|
'format': 'json',
|
||||||
|
'start': (int(pg) - 1) * 20,
|
||||||
|
'size': '20',
|
||||||
|
}
|
||||||
|
response = self.fetch(f'{self.hosts[tid]}/api/category/live/', params=params, headers=self.headers[0]).json()
|
||||||
|
for i in response['lives']:
|
||||||
|
if i.get('cuteid'):
|
||||||
|
bvdata = self.buildvod(
|
||||||
|
vod_id=f"{tid}@@{i['cuteid']}",
|
||||||
|
vod_name=i.get('title'),
|
||||||
|
vod_pic=i.get('cover'),
|
||||||
|
vod_remarks=i.get('nickname'),
|
||||||
|
style={"type": "rect", "ratio": 1.33}
|
||||||
|
)
|
||||||
|
vdata.append(bvdata)
|
||||||
|
return vdata, 9999
|
||||||
|
|
||||||
|
def biliContent(self, tid, pg, filter, extend, vdata):
|
||||||
|
if extend.get('cate') and pg == '1' and 'click' not in tid:
|
||||||
|
for i in self.blfdata['data']:
|
||||||
|
if str(i['id']) == extend['cate']:
|
||||||
|
for j in i['list']:
|
||||||
|
v = self.buildvod(
|
||||||
|
vod_id=f"click_{tid}@@{i['id']}@@{j['id']}",
|
||||||
|
vod_name=j.get('name'),
|
||||||
|
vod_pic=j.get('pic'),
|
||||||
|
vod_tag=1,
|
||||||
|
style={"type": "oval", "ratio": 1}
|
||||||
|
)
|
||||||
|
vdata.append(v)
|
||||||
|
return vdata, 1
|
||||||
|
else:
|
||||||
|
path = f'/xlive/web-interface/v1/second/getListByArea?platform=web&sort=online&page_size=30&page={pg}'
|
||||||
|
if 'click' in tid:
|
||||||
|
ids = tid.split('_')[1].split('@@')
|
||||||
|
tid = ids[0]
|
||||||
|
path = f'/xlive/web-interface/v1/second/getList?platform=web&parent_area_id={ids[1]}&area_id={ids[-1]}&sort_type=&page={pg}'
|
||||||
|
data = self.fetch(f'{self.hosts[tid][0]}{path}', headers=self.gethr(0, tid)).json()
|
||||||
|
for i in data['data']['list']:
|
||||||
|
if i.get('roomid'):
|
||||||
|
data = self.buildvod(
|
||||||
|
f"{tid}@@{i['roomid']}",
|
||||||
|
i.get('title'),
|
||||||
|
i.get('cover'),
|
||||||
|
i.get('watched_show', {}).get('text_large'),
|
||||||
|
0,
|
||||||
|
i.get('uname'),
|
||||||
|
style={"type": "rect", "ratio": 1.33}
|
||||||
|
)
|
||||||
|
vdata.append(data)
|
||||||
|
return vdata, 9999
|
||||||
|
|
||||||
|
def huyaContent(self, tid, pg, filter, extend, vdata):
|
||||||
|
if extend.get('cate') and pg == '1' and 'click' not in tid:
|
||||||
|
id = extend.get('cate')
|
||||||
|
data = self.fetch(f'{self.referers[tid]}/liveconfig/game/bussLive?bussType={id}',
|
||||||
|
headers=self.headers[1]).json()
|
||||||
|
for i in data['data']:
|
||||||
|
v = self.buildvod(
|
||||||
|
vod_id=f"click_{tid}@@{int(i['gid'])}",
|
||||||
|
vod_name=i.get('gameFullName'),
|
||||||
|
vod_pic=f'https://huyaimg.msstatic.com/cdnimage/game/{int(i["gid"])}-MS.jpg',
|
||||||
|
vod_tag=1,
|
||||||
|
style={"type": "oval", "ratio": 1}
|
||||||
|
)
|
||||||
|
vdata.append(v)
|
||||||
|
return vdata, 1
|
||||||
|
else:
|
||||||
|
gid = ''
|
||||||
|
if 'click' in tid:
|
||||||
|
ids = tid.split('_')[1].split('@@')
|
||||||
|
tid = ids[0]
|
||||||
|
gid = f'&gameId={ids[1]}'
|
||||||
|
data = self.fetch(f'{self.hosts[tid][0]}/cache.php?m=LiveList&do=getLiveListByPage&tagAll=0{gid}&page={pg}',
|
||||||
|
headers=self.headers[1]).json()
|
||||||
|
for i in data['data']['datas']:
|
||||||
|
if i.get('profileRoom'):
|
||||||
|
v = self.buildvod(
|
||||||
|
f"{tid}@@{i['profileRoom']}",
|
||||||
|
i.get('introduction'),
|
||||||
|
i.get('screenshot'),
|
||||||
|
str(int(i.get('totalCount', '1')) / 10000) + '万',
|
||||||
|
0,
|
||||||
|
i.get('nick'),
|
||||||
|
style={"type": "rect", "ratio": 1.33}
|
||||||
|
|
||||||
|
)
|
||||||
|
vdata.append(v)
|
||||||
|
return vdata, 9999
|
||||||
|
|
||||||
|
def douyinContent(self, tid, pg, filter, extend, vdata):
|
||||||
|
if extend.get('cate') and pg == '1' and 'click' not in tid:
|
||||||
|
ids = extend.get('cate').split('@@')
|
||||||
|
for i in self.dyifdata['categoryData']:
|
||||||
|
c = i['partition']
|
||||||
|
if c['id_str'] == ids[0] and c['title'] == ids[1]:
|
||||||
|
vlist = i['sub_partition'].copy()
|
||||||
|
vlist.insert(0, {'partition': c})
|
||||||
|
for j in vlist:
|
||||||
|
j = j['partition']
|
||||||
|
v = self.buildvod(
|
||||||
|
vod_id=f"click_{tid}@@{j['id_str']}@@{j['type']}",
|
||||||
|
vod_name=j.get('title'),
|
||||||
|
vod_pic='https://p3-pc-weboff.byteimg.com/tos-cn-i-9r5gewecjs/pwa_v3/512x512-1.png',
|
||||||
|
vod_tag=1,
|
||||||
|
style={"type": "oval", "ratio": 1}
|
||||||
|
)
|
||||||
|
vdata.append(v)
|
||||||
|
return vdata, 1
|
||||||
|
else:
|
||||||
|
path = f'/webcast/web/partition/detail/room/?aid=6383&app_name=douyin_web&live_id=1&device_platform=web&count=15&offset={(int(pg) - 1) * 15}&partition=720&partition_type=1'
|
||||||
|
if 'click' in tid:
|
||||||
|
ids = tid.split('_')[1].split('@@')
|
||||||
|
tid = ids[0]
|
||||||
|
path = f'/webcast/web/partition/detail/room/?aid=6383&app_name=douyin_web&live_id=1&device_platform=web&count=15&offset={(int(pg) - 1) * 15}&partition={ids[1]}&partition_type={ids[-1]}&req_from=2'
|
||||||
|
data = self.fetch(f'{self.hosts[tid]}{path}', headers=self.dyheaders).json()
|
||||||
|
for i in data['data']['data']:
|
||||||
|
v = self.buildvod(
|
||||||
|
vod_id=f"{tid}@@{i['web_rid']}",
|
||||||
|
vod_name=i['room'].get('title'),
|
||||||
|
vod_pic=i['room']['cover'].get('url_list')[0],
|
||||||
|
vod_year=i.get('user_count_str'),
|
||||||
|
vod_remarks=i['room']['owner'].get('nickname'),
|
||||||
|
style={"type": "rect", "ratio": 1.33}
|
||||||
|
)
|
||||||
|
vdata.append(v)
|
||||||
|
return vdata, 9999
|
||||||
|
|
||||||
|
def douyuContent(self, tid, pg, filter, extend, vdata):
|
||||||
|
if extend.get('cate') and pg == '1' and 'click' not in tid:
|
||||||
|
for i in self.dyufdata['data']['cate2Info']:
|
||||||
|
if str(i['cate1Id']) == extend['cate']:
|
||||||
|
v = self.buildvod(
|
||||||
|
vod_id=f"click_{tid}@@{i['cate2Id']}",
|
||||||
|
vod_name=i.get('cate2Name'),
|
||||||
|
vod_pic=i.get('icon'),
|
||||||
|
vod_remarks=i.get('count'),
|
||||||
|
vod_tag=1,
|
||||||
|
style={"type": "oval", "ratio": 1}
|
||||||
|
)
|
||||||
|
vdata.append(v)
|
||||||
|
return vdata, 1
|
||||||
|
else:
|
||||||
|
path = f'/japi/weblist/apinc/allpage/6/{pg}'
|
||||||
|
if 'click' in tid:
|
||||||
|
ids = tid.split('_')[1].split('@@')
|
||||||
|
tid = ids[0]
|
||||||
|
path = f'/gapi/rkc/directory/mixList/2_{ids[1]}/{pg}'
|
||||||
|
url = f'{self.hosts[tid]}{path}'
|
||||||
|
data = self.fetch(url, headers=self.headers[1]).json()
|
||||||
|
for i in data['data']['rl']:
|
||||||
|
v = self.buildvod(
|
||||||
|
vod_id=f"{tid}@@{i['rid']}",
|
||||||
|
vod_name=i.get('rn'),
|
||||||
|
vod_pic=i.get('rs16'),
|
||||||
|
vod_year=str(int(i.get('ol', 1)) / 10000) + '万',
|
||||||
|
vod_remarks=i.get('nn'),
|
||||||
|
style={"type": "rect", "ratio": 1.33}
|
||||||
|
)
|
||||||
|
vdata.append(v)
|
||||||
|
return vdata, 9999
|
||||||
|
|
||||||
|
def detailContent(self, ids):
|
||||||
|
ids = ids[0].split('@@')
|
||||||
|
if ids[0] == 'wangyi':
|
||||||
|
vod = self.wyccDetail(ids)
|
||||||
|
elif ids[0] == 'bili':
|
||||||
|
vod = self.biliDetail(ids)
|
||||||
|
elif ids[0] == 'huya':
|
||||||
|
vod = self.huyaDetail(ids)
|
||||||
|
elif ids[0] == 'douyin':
|
||||||
|
vod = self.douyinDetail(ids)
|
||||||
|
elif ids[0] == 'douyu':
|
||||||
|
vod = self.douyuDetail(ids)
|
||||||
|
return {'list': [vod]}
|
||||||
|
|
||||||
|
def wyccDetail(self, ids):
|
||||||
|
try:
|
||||||
|
vdata = self.getpq(f'{self.hosts[ids[0]]}/{ids[1]}', self.headers[0])('script').eq(-1).text()
|
||||||
|
|
||||||
|
def get_quality_name(vbr):
|
||||||
|
if vbr <= 600:
|
||||||
|
return "标清"
|
||||||
|
elif vbr <= 1000:
|
||||||
|
return "高清"
|
||||||
|
elif vbr <= 2000:
|
||||||
|
return "超清"
|
||||||
|
else:
|
||||||
|
return "蓝光"
|
||||||
|
|
||||||
|
data = json.loads(vdata)['props']['pageProps']['roomInfoInitData']
|
||||||
|
name = data['live'].get('title', ids[0])
|
||||||
|
vod = self.buildvod(vod_name=data.get('keywords_suffix'), vod_remarks=data['live'].get('title'),
|
||||||
|
vod_content=data.get('description_suffix'))
|
||||||
|
resolution_data = data['live']['quickplay']['resolution']
|
||||||
|
all_streams = {}
|
||||||
|
sorted_qualities = sorted(resolution_data.items(),
|
||||||
|
key=lambda x: x[1]['vbr'],
|
||||||
|
reverse=True)
|
||||||
|
for quality, data in sorted_qualities:
|
||||||
|
vbr = data['vbr']
|
||||||
|
quality_name = get_quality_name(vbr)
|
||||||
|
for cdn_name, url in data['cdn'].items():
|
||||||
|
if cdn_name not in all_streams and type(url) == str and url.startswith('http'):
|
||||||
|
all_streams[cdn_name] = []
|
||||||
|
if isinstance(url, str) and url.startswith('http'):
|
||||||
|
all_streams[cdn_name].extend([quality_name, url])
|
||||||
|
plists = []
|
||||||
|
names = []
|
||||||
|
for i, (cdn_name, stream_list) in enumerate(all_streams.items(), 1):
|
||||||
|
names.append(f'线路{i}')
|
||||||
|
pstr = f"{name}${ids[0]}@@{self.e64(json.dumps(stream_list))}"
|
||||||
|
plists.append(pstr)
|
||||||
|
vod['vod_play_from'] = "$$$".join(names)
|
||||||
|
vod['vod_play_url'] = "$$$".join(plists)
|
||||||
|
return vod
|
||||||
|
except Exception as e:
|
||||||
|
return self.handle_exception(e)
|
||||||
|
|
||||||
|
def biliDetail(self, ids):
|
||||||
|
try:
|
||||||
|
vdata = self.fetch(
|
||||||
|
f'{self.hosts[ids[0]][0]}/xlive/web-room/v1/index/getInfoByRoom?room_id={ids[1]}&wts={int(time.time())}',
|
||||||
|
headers=self.gethr(0, ids[0])).json()
|
||||||
|
v = vdata['data']['room_info']
|
||||||
|
vod = self.buildvod(
|
||||||
|
vod_name=v.get('title'),
|
||||||
|
type_name=v.get('parent_area_name') + '/' + v.get('area_name'),
|
||||||
|
vod_remarks=v.get('tags'),
|
||||||
|
vod_play_from=v.get('title'),
|
||||||
|
)
|
||||||
|
data = self.fetch(
|
||||||
|
f'{self.hosts[ids[0]][0]}/xlive/web-room/v2/index/getRoomPlayInfo?room_id={ids[1]}&protocol=0%2C1&format=0%2C1%2C2&codec=0%2C1&platform=web',
|
||||||
|
headers=self.gethr(0, ids[0])).json()
|
||||||
|
vdnams = data['data']['playurl_info']['playurl']['g_qn_desc']
|
||||||
|
all_accept_qns = []
|
||||||
|
streams = data['data']['playurl_info']['playurl']['stream']
|
||||||
|
for stream in streams:
|
||||||
|
for format_item in stream['format']:
|
||||||
|
for codec in format_item['codec']:
|
||||||
|
if 'accept_qn' in codec:
|
||||||
|
all_accept_qns.append(codec['accept_qn'])
|
||||||
|
max_accept_qn = max(all_accept_qns, key=len) if all_accept_qns else []
|
||||||
|
quality_map = {
|
||||||
|
item['qn']: item['desc']
|
||||||
|
for item in vdnams
|
||||||
|
}
|
||||||
|
quality_names = [f"{quality_map.get(qn)}${ids[0]}@@{ids[1]}@@{qn}" for qn in max_accept_qn]
|
||||||
|
vod['vod_play_url'] = "#".join(quality_names)
|
||||||
|
return vod
|
||||||
|
except Exception as e:
|
||||||
|
return self.handle_exception(e)
|
||||||
|
|
||||||
|
def huyaDetail(self, ids):
|
||||||
|
try:
|
||||||
|
vdata = self.fetch(f'{self.hosts[ids[0]][1]}/cache.php?m=Live&do=profileRoom&roomid={ids[1]}',
|
||||||
|
headers=self.headers[0]).json()
|
||||||
|
v = vdata['data']['liveData']
|
||||||
|
vod = self.buildvod(
|
||||||
|
vod_name=v.get('introduction'),
|
||||||
|
type_name=v.get('gameFullName'),
|
||||||
|
vod_director=v.get('nick'),
|
||||||
|
vod_remarks=v.get('contentIntro'),
|
||||||
|
)
|
||||||
|
data = dict(reversed(list(vdata['data']['stream'].items())))
|
||||||
|
names = []
|
||||||
|
plist = []
|
||||||
|
|
||||||
|
for stream_type, stream_data in data.items():
|
||||||
|
if isinstance(stream_data, dict) and 'multiLine' in stream_data and 'rateArray' in stream_data:
|
||||||
|
names.append(f"线路{len(names) + 1}")
|
||||||
|
qualities = sorted(
|
||||||
|
stream_data['rateArray'],
|
||||||
|
key=lambda x: (x['iBitRate'], x['sDisplayName']),
|
||||||
|
reverse=True
|
||||||
|
)
|
||||||
|
cdn_urls = []
|
||||||
|
for cdn in stream_data['multiLine']:
|
||||||
|
quality_urls = []
|
||||||
|
for quality in qualities:
|
||||||
|
quality_name = quality['sDisplayName']
|
||||||
|
bit_rate = quality['iBitRate']
|
||||||
|
base_url = cdn['url']
|
||||||
|
if bit_rate > 0:
|
||||||
|
if '.m3u8' in base_url:
|
||||||
|
new_url = base_url.replace(
|
||||||
|
'ratio=2000',
|
||||||
|
f'ratio={bit_rate}'
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
new_url = base_url.replace(
|
||||||
|
'imgplus.flv',
|
||||||
|
f'imgplus_{bit_rate}.flv'
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
new_url = base_url
|
||||||
|
quality_urls.extend([quality_name, new_url])
|
||||||
|
encoded_urls = self.e64(json.dumps(quality_urls))
|
||||||
|
cdn_urls.append(f"{cdn['cdnType']}${ids[0]}@@{encoded_urls}")
|
||||||
|
|
||||||
|
if cdn_urls:
|
||||||
|
plist.append('#'.join(cdn_urls))
|
||||||
|
vod['vod_play_from'] = "$$$".join(names)
|
||||||
|
vod['vod_play_url'] = "$$$".join(plist)
|
||||||
|
return vod
|
||||||
|
except Exception as e:
|
||||||
|
return self.handle_exception(e)
|
||||||
|
|
||||||
|
def douyinDetail(self, ids):
|
||||||
|
url = f'{self.hosts[ids[0]]}/webcast/room/web/enter/?aid=6383&app_name=douyin_web&live_id=1&device_platform=web&enter_from=web_live&web_rid={ids[1]}&room_id_str=&enter_source=&Room-Enter-User-Login-Ab=0&is_need_double_stream=false&cookie_enabled=true&screen_width=1980&screen_height=1080&browser_language=zh-CN&browser_platform=Win32&browser_name=Edge&browser_version=125.0.0.0'
|
||||||
|
data = self.fetch(url, headers=self.dyheaders).json()
|
||||||
|
try:
|
||||||
|
vdata = data['data']['data'][0]
|
||||||
|
vod = self.buildvod(
|
||||||
|
vod_name=vdata['title'],
|
||||||
|
vod_remarks=vdata['user_count_str'],
|
||||||
|
)
|
||||||
|
resolution_data = vdata['stream_url']['live_core_sdk_data']['pull_data']['options']['qualities']
|
||||||
|
stream_json = vdata['stream_url']['live_core_sdk_data']['pull_data']['stream_data']
|
||||||
|
stream_json = json.loads(stream_json)
|
||||||
|
available_types = []
|
||||||
|
if any(sdk_key in stream_json['data'] and 'main' in stream_json['data'][sdk_key] for sdk_key in
|
||||||
|
stream_json['data']):
|
||||||
|
available_types.append('main')
|
||||||
|
if any(sdk_key in stream_json['data'] and 'backup' in stream_json['data'][sdk_key] for sdk_key in
|
||||||
|
stream_json['data']):
|
||||||
|
available_types.append('backup')
|
||||||
|
plist = []
|
||||||
|
for line_type in available_types:
|
||||||
|
format_arrays = {'flv': [], 'hls': [], 'lls': []}
|
||||||
|
qualities = sorted(resolution_data, key=lambda x: x['level'], reverse=True)
|
||||||
|
for quality in qualities:
|
||||||
|
sdk_key = quality['sdk_key']
|
||||||
|
if sdk_key in stream_json['data'] and line_type in stream_json['data'][sdk_key]:
|
||||||
|
stream_info = stream_json['data'][sdk_key][line_type]
|
||||||
|
if stream_info.get('flv'):
|
||||||
|
format_arrays['flv'].extend([quality['name'], stream_info['flv']])
|
||||||
|
if stream_info.get('hls'):
|
||||||
|
format_arrays['hls'].extend([quality['name'], stream_info['hls']])
|
||||||
|
if stream_info.get('lls'):
|
||||||
|
format_arrays['lls'].extend([quality['name'], stream_info['lls']])
|
||||||
|
format_urls = []
|
||||||
|
for format_name, url_array in format_arrays.items():
|
||||||
|
if url_array:
|
||||||
|
encoded_urls = self.e64(json.dumps(url_array))
|
||||||
|
format_urls.append(f"{format_name}${ids[0]}@@{encoded_urls}")
|
||||||
|
|
||||||
|
if format_urls:
|
||||||
|
plist.append('#'.join(format_urls))
|
||||||
|
|
||||||
|
names = ['线路1', '线路2'][:len(plist)]
|
||||||
|
vod['vod_play_from'] = "$$$".join(names)
|
||||||
|
vod['vod_play_url'] = "$$$".join(plist)
|
||||||
|
return vod
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
return self.handle_exception(e)
|
||||||
|
|
||||||
|
def douyuDetail(self, ids):
|
||||||
|
headers = self.gethr(0, zr=f'{self.hosts[ids[0]]}/{ids[1]}')
|
||||||
|
try:
|
||||||
|
data = self.fetch(f'{self.hosts[ids[0]]}/betard/{ids[1]}', headers=headers).json()
|
||||||
|
vname = data['room']['room_name']
|
||||||
|
vod = self.buildvod(
|
||||||
|
vod_name=vname,
|
||||||
|
vod_remarks=data['room'].get('second_lvl_name'),
|
||||||
|
vod_director=data['room'].get('nickname'),
|
||||||
|
)
|
||||||
|
vdata = self.fetch(f'{self.hosts[ids[0]]}/swf_api/homeH5Enc?rids={ids[1]}', headers=headers).json()
|
||||||
|
json_body = vdata['data']
|
||||||
|
json_body = {"html": self.douyu_text(json_body[f'room{ids[1]}']), "rid": ids[1]}
|
||||||
|
sign = self.post('http://alive.nsapps.cn/api/AllLive/DouyuSign', json=json_body, headers=self.headers[1]).json()['data']
|
||||||
|
body = f'{sign}&cdn=&rate=-1&ver=Douyu_223061205&iar=1&ive=1&hevc=0&fa=0'
|
||||||
|
body=self.params_to_json(body)
|
||||||
|
nubdata = self.post(f'{self.hosts[ids[0]]}/lapi/live/getH5Play/{ids[1]}', data=body, headers=headers).json()
|
||||||
|
plist = []
|
||||||
|
names = []
|
||||||
|
for i,x in enumerate(nubdata['data']['cdnsWithName']):
|
||||||
|
names.append(f'线路{i+1}')
|
||||||
|
d = {'sign': sign, 'cdn': x['cdn'], 'id': ids[1]}
|
||||||
|
plist.append(
|
||||||
|
f'{vname}${ids[0]}@@{self.e64(json.dumps(d))}@@{self.e64(json.dumps(nubdata["data"]["multirates"]))}')
|
||||||
|
vod['vod_play_from'] = "$$$".join(names)
|
||||||
|
vod['vod_play_url'] = "$$$".join(plist)
|
||||||
|
return vod
|
||||||
|
except Exception as e:
|
||||||
|
return self.handle_exception(e)
|
||||||
|
|
||||||
|
def douyu_text(self, text):
|
||||||
|
function_positions = [m.start() for m in re.finditer('function', text)]
|
||||||
|
total_functions = len(function_positions)
|
||||||
|
if total_functions % 2 == 0:
|
||||||
|
target_index = total_functions // 2 + 1
|
||||||
|
else:
|
||||||
|
target_index = (total_functions - 1) // 2 + 1
|
||||||
|
if total_functions >= target_index:
|
||||||
|
cut_position = function_positions[target_index - 1]
|
||||||
|
ctext = text[4:cut_position]
|
||||||
|
return re.sub(r'eval\(strc\)\([\w\d,]+\)', 'strc', ctext)
|
||||||
|
return text
|
||||||
|
|
||||||
|
def searchContent(self, key, quick, pg="1"):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def playerContent(self, flag, id, vipFlags):
|
||||||
|
try:
|
||||||
|
ids = id.split('@@')
|
||||||
|
p = 1
|
||||||
|
if ids[0] in ['wangyi', 'douyin','huya']:
|
||||||
|
p, url = 0, json.loads(self.d64(ids[1]))
|
||||||
|
elif ids[0] == 'bili':
|
||||||
|
p, url = self.biliplay(ids)
|
||||||
|
elif ids[0] == 'huya':
|
||||||
|
p, url = 0, json.loads(self.d64(ids[1]))
|
||||||
|
elif ids[0] == 'douyu':
|
||||||
|
p, url = self.douyuplay(ids)
|
||||||
|
return {'parse': p, 'url': url, 'header': self.playheaders[ids[0]]}
|
||||||
|
except Exception as e:
|
||||||
|
return {'parse': 1, 'url': self.excepturl, 'header': self.headers[0]}
|
||||||
|
|
||||||
|
def biliplay(self, ids):
|
||||||
|
try:
|
||||||
|
data = self.fetch(
|
||||||
|
f'{self.hosts[ids[0]][0]}/xlive/web-room/v2/index/getRoomPlayInfo?room_id={ids[1]}&protocol=0,1&format=0,2&codec=0&platform=web&qn={ids[2]}',
|
||||||
|
headers=self.gethr(0, ids[0])).json()
|
||||||
|
urls = []
|
||||||
|
line_index = 1
|
||||||
|
for stream in data['data']['playurl_info']['playurl']['stream']:
|
||||||
|
for format_item in stream['format']:
|
||||||
|
for codec in format_item['codec']:
|
||||||
|
for url_info in codec['url_info']:
|
||||||
|
full_url = f"{url_info['host']}/{codec['base_url'].lstrip('/')}{url_info['extra']}"
|
||||||
|
urls.extend([f"线路{line_index}", full_url])
|
||||||
|
line_index += 1
|
||||||
|
return 0, urls
|
||||||
|
except Exception as e:
|
||||||
|
return 1, self.excepturl
|
||||||
|
|
||||||
|
def douyuplay(self, ids):
|
||||||
|
try:
|
||||||
|
sdata = json.loads(self.d64(ids[1]))
|
||||||
|
headers = self.gethr(0, zr=f'{self.hosts[ids[0]]}/{sdata["id"]}')
|
||||||
|
ldata = json.loads(self.d64(ids[2]))
|
||||||
|
result_obj = {}
|
||||||
|
with ThreadPoolExecutor(max_workers=len(ldata)) as executor:
|
||||||
|
futures = [
|
||||||
|
executor.submit(
|
||||||
|
self.douyufp,
|
||||||
|
sdata,
|
||||||
|
quality,
|
||||||
|
headers,
|
||||||
|
self.hosts[ids[0]],
|
||||||
|
result_obj
|
||||||
|
) for quality in ldata
|
||||||
|
]
|
||||||
|
for future in futures:
|
||||||
|
future.result()
|
||||||
|
|
||||||
|
result = []
|
||||||
|
for bit in sorted(result_obj.keys(), reverse=True):
|
||||||
|
result.extend(result_obj[bit])
|
||||||
|
|
||||||
|
if result:
|
||||||
|
return 0, result
|
||||||
|
return 1, self.excepturl
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
return 1, self.excepturl
|
||||||
|
|
||||||
|
def douyufp(self, sdata, quality, headers, host, result_obj):
|
||||||
|
try:
|
||||||
|
body = f'{sdata["sign"]}&cdn={sdata["cdn"]}&rate={quality["rate"]}'
|
||||||
|
body=self.params_to_json(body)
|
||||||
|
data = self.post(f'{host}/lapi/live/getH5Play/{sdata["id"]}',
|
||||||
|
data=body, headers=headers).json()
|
||||||
|
if data.get('data'):
|
||||||
|
play_url = data['data']['rtmp_url'] + '/' + data['data']['rtmp_live']
|
||||||
|
bit = quality.get('bit', 0)
|
||||||
|
if bit not in result_obj:
|
||||||
|
result_obj[bit] = []
|
||||||
|
result_obj[bit].extend([quality['name'], play_url])
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error fetching {quality['name']}: {str(e)}")
|
||||||
|
|
||||||
|
def localProxy(self, param):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def e64(self, text):
|
||||||
|
try:
|
||||||
|
text_bytes = text.encode('utf-8')
|
||||||
|
encoded_bytes = b64encode(text_bytes)
|
||||||
|
return encoded_bytes.decode('utf-8')
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Base64编码错误: {str(e)}")
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def d64(self, encoded_text):
|
||||||
|
try:
|
||||||
|
encoded_bytes = encoded_text.encode('utf-8')
|
||||||
|
decoded_bytes = b64decode(encoded_bytes)
|
||||||
|
return decoded_bytes.decode('utf-8')
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Base64解码错误: {str(e)}")
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def josn_to_params(self, params, skip_empty=False):
|
||||||
|
query = []
|
||||||
|
for k, v in params.items():
|
||||||
|
if skip_empty and not v:
|
||||||
|
continue
|
||||||
|
query.append(f"{k}={v}")
|
||||||
|
return "&".join(query)
|
||||||
|
|
||||||
|
def params_to_json(self, query_string):
|
||||||
|
parsed_data = parse_qs(query_string)
|
||||||
|
result = {key: value[0] for key, value in parsed_data.items()}
|
||||||
|
return result
|
||||||
|
|
||||||
|
def buildvod(self, vod_id='', vod_name='', vod_pic='', vod_year='', vod_tag='', vod_remarks='', style='',
|
||||||
|
type_name='', vod_area='', vod_actor='', vod_director='',
|
||||||
|
vod_content='', vod_play_from='', vod_play_url=''):
|
||||||
|
vod = {
|
||||||
|
'vod_id': vod_id,
|
||||||
|
'vod_name': vod_name,
|
||||||
|
'vod_pic': vod_pic,
|
||||||
|
'vod_year': vod_year,
|
||||||
|
'vod_tag': 'folder' if vod_tag else '',
|
||||||
|
'vod_remarks': vod_remarks,
|
||||||
|
'style': style,
|
||||||
|
'type_name': type_name,
|
||||||
|
'vod_area': vod_area,
|
||||||
|
'vod_actor': vod_actor,
|
||||||
|
'vod_director': vod_director,
|
||||||
|
'vod_content': vod_content,
|
||||||
|
'vod_play_from': vod_play_from,
|
||||||
|
'vod_play_url': vod_play_url
|
||||||
|
}
|
||||||
|
vod = {key: value for key, value in vod.items() if value}
|
||||||
|
return vod
|
||||||
|
|
||||||
|
def getpq(self, url, headers=None, cookies=None):
|
||||||
|
data = self.fetch(url, headers=headers, cookies=cookies).text
|
||||||
|
try:
|
||||||
|
return pq(data)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"解析页面错误: {str(e)}")
|
||||||
|
return pq(data.encode('utf-8'))
|
||||||
|
|
||||||
|
def gethr(self, index, rf='', zr=''):
|
||||||
|
headers = self.headers[index]
|
||||||
|
if zr:
|
||||||
|
headers['referer'] = zr
|
||||||
|
else:
|
||||||
|
headers['referer'] = f"{self.referers[rf]}/"
|
||||||
|
return headers
|
||||||
|
|
||||||
|
def handle_exception(self, e):
|
||||||
|
print(f"报错: {str(e)}")
|
||||||
|
return {'vod_play_from': '哎呀翻车啦', 'vod_play_url': f'翻车啦${self.excepturl}'}
|
||||||
|
|
172
PY/html/LREEOK.py
Normal file
172
PY/html/LREEOK.py
Normal file
@ -0,0 +1,172 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# by @嗷呜
|
||||||
|
# 温馨提示:官方APP数据是错误的,你们可以给官方反馈,然后就可以写APP
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from Crypto.Hash import MD5
|
||||||
|
|
||||||
|
sys.path.append("..")
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
from pyquery import PyQuery as pq
|
||||||
|
from base.spider import Spider
|
||||||
|
|
||||||
|
|
||||||
|
class Spider(Spider):
|
||||||
|
|
||||||
|
def init(self, extend=""):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def getName(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def isVideoFormat(self, url):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def manualVideoCheck(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def action(self, action):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def destroy(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
host = 'https://www.lreeok.vip'
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36',
|
||||||
|
'Accept': 'application/json, text/javascript, */*; q=0.01',
|
||||||
|
'sec-ch-ua-platform': '"macOS"',
|
||||||
|
'sec-ch-ua': '"Not/A)Brand";v="8", "Chromium";v="134", "Google Chrome";v="134"',
|
||||||
|
'Origin': host,
|
||||||
|
'Referer': f"{host}/",
|
||||||
|
}
|
||||||
|
|
||||||
|
def homeContent(self, filter):
|
||||||
|
data = self.getpq(self.fetch(self.host, headers=self.headers).text)
|
||||||
|
result = {}
|
||||||
|
classes = []
|
||||||
|
for k in data('.head-more.box a').items():
|
||||||
|
i = k.attr('href')
|
||||||
|
if i and '/vod' in i:
|
||||||
|
classes.append({
|
||||||
|
'type_name': k.text(),
|
||||||
|
'type_id': re.search(r'\d+', i).group(0)
|
||||||
|
})
|
||||||
|
result['class'] = classes
|
||||||
|
result['list'] = self.getlist(data('.border-box.diy-center .public-list-div'))
|
||||||
|
return result
|
||||||
|
|
||||||
|
def homeVideoContent(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def categoryContent(self, tid, pg, filter, extend):
|
||||||
|
body = {'type': tid, 'class': '', 'area': '', 'lang': '', 'version': '', 'state': '', 'letter': '', 'page': pg}
|
||||||
|
data = self.post(f"{self.host}/index.php/api/vod", headers=self.headers, data=self.getbody(body)).json()
|
||||||
|
result = {}
|
||||||
|
result['list'] = data['list']
|
||||||
|
result['page'] = pg
|
||||||
|
result['pagecount'] = 9999
|
||||||
|
result['limit'] = 90
|
||||||
|
result['total'] = 999999
|
||||||
|
return result
|
||||||
|
|
||||||
|
def detailContent(self, ids):
|
||||||
|
data = self.getpq(self.fetch(f"{self.host}/voddetail/{ids[0]}.html", headers=self.headers).text)
|
||||||
|
v = data('.detail-info.lightSpeedIn .slide-info')
|
||||||
|
vod = {
|
||||||
|
'vod_year': v.eq(-1).text(),
|
||||||
|
'vod_remarks': v.eq(0).text(),
|
||||||
|
'vod_actor': v.eq(3).text(),
|
||||||
|
'vod_director': v.eq(2).text(),
|
||||||
|
'vod_content': data('.switch-box #height_limit').text()
|
||||||
|
}
|
||||||
|
np = data('.anthology.wow.fadeInUp')
|
||||||
|
ndata = np('.anthology-tab .swiper-wrapper .swiper-slide')
|
||||||
|
pdata = np('.anthology-list .anthology-list-box ul')
|
||||||
|
play, names = [], []
|
||||||
|
for i in range(len(ndata)):
|
||||||
|
n = ndata.eq(i)('a')
|
||||||
|
n('span').remove()
|
||||||
|
names.append(n.text())
|
||||||
|
vs = []
|
||||||
|
for v in pdata.eq(i)('li').items():
|
||||||
|
vs.append(f"{v.text()}${v('a').attr('href')}")
|
||||||
|
play.append('#'.join(vs))
|
||||||
|
vod["vod_play_from"] = "$$$".join(names)
|
||||||
|
vod["vod_play_url"] = "$$$".join(play)
|
||||||
|
result = {"list": [vod]}
|
||||||
|
return result
|
||||||
|
|
||||||
|
def searchContent(self, key, quick, pg="1"):
|
||||||
|
# data = self.getpq(self.fetch(f"{self.host}/vodsearch/{key}----------{pg}---.html", headers=self.headers).text)
|
||||||
|
# return {'list': self.getlist(data('.row-right .search-box .public-list-bj')), 'page': pg}
|
||||||
|
data = self.fetch(
|
||||||
|
f"{self.host}/index.php/ajax/suggest?mid={pg}&wd={key}&limit=999×tamp={int(time.time() * 1000)}",
|
||||||
|
headers=self.headers).json()
|
||||||
|
videos = []
|
||||||
|
for i in data['list']:
|
||||||
|
videos.append({
|
||||||
|
'vod_id': i['id'],
|
||||||
|
'vod_name': i['name'],
|
||||||
|
'vod_pic': i['pic']
|
||||||
|
})
|
||||||
|
return {'list': videos, 'page': pg}
|
||||||
|
|
||||||
|
def playerContent(self, flag, id, vipFlags):
|
||||||
|
h, p = {"User-Agent": "okhttp/3.14.9"}, 1
|
||||||
|
url = f"{self.host}{id}"
|
||||||
|
data = self.getpq(self.fetch(url, headers=self.headers).text)
|
||||||
|
try:
|
||||||
|
jstr = data('.player .player-left script').eq(0).text()
|
||||||
|
jsdata = json.loads(jstr.split('aaa=')[-1])
|
||||||
|
body = {'url': jsdata['url']}
|
||||||
|
if not re.search(r'\.m3u8|\.mp4', body['url']):
|
||||||
|
data = self.post(f"{self.host}/okplay/api_config.php", headers=self.headers,
|
||||||
|
data=self.getbody(body)).json()
|
||||||
|
url = data.get('url') or data.get('data', {}).get('url')
|
||||||
|
p = 0
|
||||||
|
except Exception as e:
|
||||||
|
print('错误信息:', e)
|
||||||
|
pass
|
||||||
|
result = {}
|
||||||
|
result["parse"] = p
|
||||||
|
result["url"] = url
|
||||||
|
result["header"] = h
|
||||||
|
return result
|
||||||
|
|
||||||
|
def localProxy(self, param):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def getbody(self, params):
|
||||||
|
t = int(time.time())
|
||||||
|
h = MD5.new()
|
||||||
|
h.update(f"DS{t}DCC147D11943AF75".encode('utf-8'))
|
||||||
|
key = h.hexdigest()
|
||||||
|
params.update({'time': t, 'key': key})
|
||||||
|
return params
|
||||||
|
|
||||||
|
def getlist(self, data):
|
||||||
|
videos = []
|
||||||
|
for i in data.items():
|
||||||
|
id = i('a').attr('href')
|
||||||
|
if id:
|
||||||
|
id = re.search(r'\d+', id).group(0)
|
||||||
|
img = i('img').attr('data-src')
|
||||||
|
if img and 'url=' in img: img = f'{self.host}{img}'
|
||||||
|
videos.append({
|
||||||
|
'vod_id': id,
|
||||||
|
'vod_name': i('img').attr('alt'),
|
||||||
|
'vod_pic': img,
|
||||||
|
'vod_remarks': i('.public-prt').text() or i('.public-list-prb').text()
|
||||||
|
})
|
||||||
|
return videos
|
||||||
|
|
||||||
|
def getpq(self, data):
|
||||||
|
try:
|
||||||
|
return pq(data)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"{str(e)}")
|
||||||
|
return pq(data.encode('utf-8'))
|
109
PY/html/偷乐短剧.py
Normal file
109
PY/html/偷乐短剧.py
Normal file
@ -0,0 +1,109 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# by @嗷呜
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
from pyquery import PyQuery as pq
|
||||||
|
sys.path.append('..')
|
||||||
|
from base.spider import Spider
|
||||||
|
|
||||||
|
class Spider(Spider):
|
||||||
|
|
||||||
|
def init(self, extend=""):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def getName(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def isVideoFormat(self, url):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def manualVideoCheck(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def destroy(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
host='http://www.toule.top'
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0 Safari/537.36',
|
||||||
|
'Referer':f'{host}/',
|
||||||
|
'Origin':host
|
||||||
|
}
|
||||||
|
|
||||||
|
def homeContent(self, filter):
|
||||||
|
data=self.getpq()
|
||||||
|
result = {}
|
||||||
|
classes = []
|
||||||
|
for k in data('.swiper-wrapper .swiper-slide').items():
|
||||||
|
classes.append({
|
||||||
|
'type_name': k.text(),
|
||||||
|
'type_id': k.text()
|
||||||
|
})
|
||||||
|
result['class'] = classes
|
||||||
|
result['list'] = self.getlist(data('.container.items ul li'))
|
||||||
|
return result
|
||||||
|
|
||||||
|
def homeVideoContent(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def categoryContent(self, tid, pg, filter, extend):
|
||||||
|
data=self.getpq(f"/index.php/vod/show/class/{tid}/id/1/page/{pg}.html")
|
||||||
|
result = {}
|
||||||
|
result['list'] = self.getlist(data('.container.items ul li'))
|
||||||
|
result['page'] = pg
|
||||||
|
result['pagecount'] = 9999
|
||||||
|
result['limit'] = 90
|
||||||
|
result['total'] = 999999
|
||||||
|
return result
|
||||||
|
|
||||||
|
def detailContent(self, ids):
|
||||||
|
data=self.getpq(ids[0])
|
||||||
|
v=data('.container.detail-content')
|
||||||
|
vod = {
|
||||||
|
'vod_remarks': v('.items-tags a').text(),
|
||||||
|
'vod_content': v('.text-content .detail').text(),
|
||||||
|
'vod_play_from': '嗷呜爱看短剧',
|
||||||
|
'vod_play_url': '#'.join([f"{i.text()}${i('a').attr('href')}" for i in data('.swiper-wrapper .swiper-slide').items()])
|
||||||
|
}
|
||||||
|
return {'list':[vod]}
|
||||||
|
|
||||||
|
def searchContent(self, key, quick, pg="1"):
|
||||||
|
data=self.getpq(f"/index.php/vod/search/page/{pg}/wd/{key}.html")
|
||||||
|
return {'list':self.getlist(data('.container.items ul li')),'page':pg}
|
||||||
|
|
||||||
|
def playerContent(self, flag, id, vipFlags):
|
||||||
|
data=self.getpq(id)
|
||||||
|
try:
|
||||||
|
jstr=data('.player-content script').eq(0).text()
|
||||||
|
jt=json.loads(jstr.split('=',1)[-1])
|
||||||
|
p,url=0,jt['url']
|
||||||
|
except Exception as e:
|
||||||
|
print(f"获取播放地址失败: {e}")
|
||||||
|
p,url=1,f'{self.host}{id}'
|
||||||
|
return {'parse': p, 'url': url, 'header': self.headers}
|
||||||
|
|
||||||
|
def localProxy(self, param):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def liveContent(self, url):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def getpq(self, path=''):
|
||||||
|
data=self.fetch(f"{self.host}{path}",headers=self.headers).text
|
||||||
|
try:
|
||||||
|
return pq(data)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"{str(e)}")
|
||||||
|
return pq(data.encode('utf-8'))
|
||||||
|
|
||||||
|
def getlist(self,data):
|
||||||
|
videos = []
|
||||||
|
for i in data.items():
|
||||||
|
videos.append({
|
||||||
|
'vod_id': i('.image-line').attr('href'),
|
||||||
|
'vod_name': i('img').attr('alt'),
|
||||||
|
'vod_pic': i('img').attr('src'),
|
||||||
|
'vod_remarks': i('.remarks.light').text()
|
||||||
|
})
|
||||||
|
return videos
|
315
PY/html/剧粑粑.py
Normal file
315
PY/html/剧粑粑.py
Normal file
@ -0,0 +1,315 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# by @嗷呜
|
||||||
|
import json
|
||||||
|
import random
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
from base64 import b64decode, b64encode
|
||||||
|
import concurrent.futures
|
||||||
|
import requests
|
||||||
|
from Crypto.Hash import MD5
|
||||||
|
from pyquery import PyQuery as pq
|
||||||
|
sys.path.append('..')
|
||||||
|
from base.spider import Spider
|
||||||
|
|
||||||
|
class Spider(Spider):
|
||||||
|
|
||||||
|
def init(self, extend=""):
|
||||||
|
self.host=self.gethost()
|
||||||
|
self.headers.update({
|
||||||
|
'referer': f'{self.host}/',
|
||||||
|
'origin': self.host,
|
||||||
|
})
|
||||||
|
self.session = requests.Session()
|
||||||
|
self.session.headers.update(self.headers)
|
||||||
|
self.session.get(self.host)
|
||||||
|
pass
|
||||||
|
|
||||||
|
def getName(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def isVideoFormat(self, url):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def manualVideoCheck(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def destroy(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36',
|
||||||
|
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
|
||||||
|
'sec-ch-ua': '"Not/A)Brand";v="8", "Chromium";v="134", "Google Chrome";v="134"',
|
||||||
|
'sec-ch-ua-mobile': '?0',
|
||||||
|
'sec-ch-ua-platform': '"macOS"',
|
||||||
|
'sec-fetch-site': 'same-origin',
|
||||||
|
'sec-fetch-mode': 'navigate',
|
||||||
|
'sec-fetch-user': '?1',
|
||||||
|
'sec-fetch-dest': 'document',
|
||||||
|
'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8',
|
||||||
|
}
|
||||||
|
|
||||||
|
config={
|
||||||
|
"1":[{"key":"class","name":"剧情","value":[{"n":"全部","v":""},{"n":"喜剧","v":"喜剧"},{"n":"爱情","v":"爱情"},{"n":"恐怖","v":"恐怖"},{"n":"动作","v":"动作"},{"n":"科幻","v":"科幻"},{"n":"剧情","v":"剧情"},{"n":"战争","v":"战争"},{"n":"警匪","v":"警匪"},{"n":"犯罪","v":"犯罪"},{"n":"动画","v":"动画"},{"n":"奇幻","v":"奇幻"},{"n":"武侠","v":"武侠"},{"n":"冒险","v":"冒险"},{"n":"枪战","v":"枪战"},{"n":"悬疑","v":"悬疑"},{"n":"惊悚","v":"惊悚"},{"n":"经典","v":"经典"},{"n":"青春","v":"青春"},{"n":"伦理","v":"伦理"},{"n":"文艺","v":"文艺"},{"n":"微电影","v":"微电影"},{"n":"古装","v":"古装"},{"n":"历史","v":"历史"},{"n":"运动","v":"运动"},{"n":"农村","v":"农村"},{"n":"儿童","v":"儿童"},{"n":"网络电影","v":"网络电影"}]},{"key":"area","name":"地区","value":[{"n":"全部","v":""},{"n":"大陆","v":"大陆"},{"n":"香港","v":"香港"},{"n":"台湾","v":"台湾"},{"n":"美国","v":"美国"},{"n":"法国","v":"法国"},{"n":"英国","v":"英国"},{"n":"日本","v":"日本"},{"n":"韩国","v":"韩国"},{"n":"德国","v":"德国"},{"n":"泰国","v":"泰国"},{"n":"印度","v":"印度"},{"n":"意大利","v":"意大利"},{"n":"西班牙","v":"西班牙"},{"n":"加拿大","v":"加拿大"},{"n":"其他","v":"其他"}]},{"key":"year","name":"年份","value":[{"n":"全部","v":""},{"n":"2025","v":"2025"},{"n":"2024","v":"2024"},{"n":"2023","v":"2023"},{"n":"2022","v":"2022"},{"n":"2021","v":"2021"},{"n":"2020","v":"2020"},{"n":"2019","v":"2019"},{"n":"2018","v":"2018"},{"n":"2017","v":"2017"},{"n":"2016","v":"2016"},{"n":"2015","v":"2015"},{"n":"2014","v":"2014"},{"n":"2013","v":"2013"},{"n":"2012","v":"2012"},{"n":"2011","v":"2011"},{"n":"2010","v":"2010"},{"n":"2009","v":"2009"},{"n":"2008","v":"2008"},{"n":"2007","v":"2007"},{"n":"2006","v":"2006"},{"n":"2005","v":"2005"},{"n":"2004","v":"2004"},{"n":"2003","v":"2003"},{"n":"2002","v":"2002"},{"n":"2001","v":"2001"},{"n":"2000","v":"2000"}]},{"key":"by","name":"排序","value":[{"n":"时间","v":"time"},{"n":"人气","v":"hits"},{"n":"评分","v":"score"}]}],
|
||||||
|
"2":[{"key":"class","name":"剧情","value":[{"n":"全部","v":""},{"n":"古装","v":"古装"},{"n":"战争","v":"战争"},{"n":"青春偶像","v":"青春偶像"},{"n":"喜剧","v":"喜剧"},{"n":"家庭","v":"家庭"},{"n":"犯罪","v":"犯罪"},{"n":"动作","v":"动作"},{"n":"奇幻","v":"奇幻"},{"n":"剧情","v":"剧情"},{"n":"历史","v":"历史"},{"n":"经典","v":"经典"},{"n":"乡村","v":"乡村"},{"n":"情景","v":"情景"},{"n":"商战","v":"商战"},{"n":"网剧","v":"网剧"},{"n":"其他","v":"其他"}]},{"key":"area","name":"地区","value":[{"n":"全部","v":""},{"n":"内地","v":"内地"},{"n":"香港","v":"香港"},{"n":"台湾","v":"台湾"},{"n":"美国","v":"美国"},{"n":"法国","v":"法国"},{"n":"英国","v":"英国"},{"n":"日本","v":"日本"},{"n":"韩国","v":"韩国"},{"n":"德国","v":"德国"},{"n":"泰国","v":"泰国"},{"n":"印度","v":"印度"},{"n":"意大利","v":"意大利"},{"n":"西班牙","v":"西班牙"},{"n":"加拿大","v":"加拿大"},{"n":"其他","v":"其他"}]},{"key":"year","name":"年份","value":[{"n":"全部","v":""},{"n":"2025","v":"2025"},{"n":"2024","v":"2024"},{"n":"2023","v":"2023"},{"n":"2022","v":"2022"},{"n":"2021","v":"2021"},{"n":"2020","v":"2020"},{"n":"2019","v":"2019"},{"n":"2018","v":"2018"},{"n":"2017","v":"2017"},{"n":"2016","v":"2016"},{"n":"2015","v":"2015"},{"n":"2014","v":"2014"},{"n":"2013","v":"2013"},{"n":"2012","v":"2012"},{"n":"2011","v":"2011"},{"n":"2010","v":"2010"},{"n":"2009","v":"2009"},{"n":"2008","v":"2008"},{"n":"2007","v":"2007"},{"n":"2006","v":"2006"},{"n":"2005","v":"2005"},{"n":"2004","v":"2004"},{"n":"2003","v":"2003"},{"n":"2002","v":"2002"},{"n":"2001","v":"2001"},{"n":"2000","v":"2000"}]},{"key":"by","name":"排序","value":[{"n":"时间","v":"time"},{"n":"人气","v":"hits"},{"n":"评分","v":"score"}]}],
|
||||||
|
"3":[{"key":"class","name":"剧情","value":[{"n":"全部","v":""},{"n":"选秀","v":"选秀"},{"n":"情感","v":"情感"},{"n":"访谈","v":"访谈"},{"n":"播报","v":"播报"},{"n":"旅游","v":"旅游"},{"n":"音乐","v":"音乐"},{"n":"美食","v":"美食"},{"n":"纪实","v":"纪实"},{"n":"曲艺","v":"曲艺"},{"n":"生活","v":"生活"},{"n":"游戏互动","v":"游戏互动"},{"n":"财经","v":"财经"},{"n":"求职","v":"求职"}]},{"key":"area","name":"地区","value":[{"n":"全部","v":""},{"n":"内地","v":"内地"},{"n":"港台","v":"港台"},{"n":"欧美","v":"欧美"},{"n":"日韩","v":"日韩"},{"n":"其他","v":"其他"}]},{"key":"year","name":"年份","value":[{"n":"全部","v":""},{"n":"2025","v":"2025"},{"n":"2024","v":"2024"},{"n":"2023","v":"2023"},{"n":"2022","v":"2022"},{"n":"2021","v":"2021"},{"n":"2020","v":"2020"},{"n":"2019","v":"2019"},{"n":"2018","v":"2018"},{"n":"2017","v":"2017"},{"n":"2016","v":"2016"},{"n":"2015","v":"2015"},{"n":"2014","v":"2014"},{"n":"2013","v":"2013"},{"n":"2012","v":"2012"},{"n":"2011","v":"2011"},{"n":"2010","v":"2010"},{"n":"2009","v":"2009"},{"n":"2008","v":"2008"},{"n":"2007","v":"2007"},{"n":"2006","v":"2006"},{"n":"2005","v":"2005"},{"n":"2004","v":"2004"},{"n":"2003","v":"2003"},{"n":"2002","v":"2002"},{"n":"2001","v":"2001"},{"n":"2000","v":"2000"}]},{"key":"by","name":"排序","value":[{"n":"时间","v":"time"},{"n":"人气","v":"hits"},{"n":"评分","v":"score"}]}],
|
||||||
|
"4":[{"key":"class","name":"剧情","value":[{"n":"全部","v":""},{"n":"情感","v":"情感"},{"n":"科幻","v":"科幻"},{"n":"热血","v":"热血"},{"n":"推理","v":"推理"},{"n":"搞笑","v":"搞笑"},{"n":"冒险","v":"冒险"},{"n":"萝莉","v":"萝莉"},{"n":"校园","v":"校园"},{"n":"动作","v":"动作"},{"n":"机战","v":"机战"},{"n":"运动","v":"运动"},{"n":"战争","v":"战争"},{"n":"少年","v":"少年"},{"n":"少女","v":"少女"},{"n":"社会","v":"社会"},{"n":"原创","v":"原创"},{"n":"亲子","v":"亲子"},{"n":"益智","v":"益智"},{"n":"励志","v":"励志"},{"n":"其他","v":"其他"}]},{"key":"area","name":"地区","value":[{"n":"全部","v":""},{"n":"国产","v":"国产"},{"n":"欧美","v":"欧美"},{"n":"日本","v":"日本"},{"n":"其他","v":"其他"}]},{"key":"year","name":"年份","value":[{"n":"全部","v":""},{"n":"2025","v":"2025"},{"n":"2024","v":"2024"},{"n":"2023","v":"2023"},{"n":"2022","v":"2022"},{"n":"2021","v":"2021"},{"n":"2020","v":"2020"},{"n":"2019","v":"2019"},{"n":"2018","v":"2018"},{"n":"2017","v":"2017"},{"n":"2016","v":"2016"},{"n":"2015","v":"2015"},{"n":"2014","v":"2014"},{"n":"2013","v":"2013"},{"n":"2012","v":"2012"},{"n":"2011","v":"2011"},{"n":"2010","v":"2010"},{"n":"2009","v":"2009"},{"n":"2008","v":"2008"},{"n":"2007","v":"2007"},{"n":"2006","v":"2006"},{"n":"2005","v":"2005"},{"n":"2004","v":"2004"},{"n":"2003","v":"2003"},{"n":"2002","v":"2002"},{"n":"2001","v":"2001"},{"n":"2000","v":"2000"}]},{"key":"by","name":"排序","value":[{"n":"时间","v":"time"},{"n":"人气","v":"hits"},{"n":"评分","v":"score"}]}],
|
||||||
|
}
|
||||||
|
|
||||||
|
def homeContent(self, filter):
|
||||||
|
data=self.getpq()
|
||||||
|
result = {}
|
||||||
|
classes = []
|
||||||
|
for k in data('ul.swiper-wrapper').eq(0)('li').items():
|
||||||
|
i=k('a').attr('href')
|
||||||
|
if i and 'type' in i:
|
||||||
|
classes.append({
|
||||||
|
'type_name': k.text(),
|
||||||
|
'type_id': re.findall(r'\d+', i)[0],
|
||||||
|
})
|
||||||
|
result['class'] = classes
|
||||||
|
result['list'] = self.getlist(data('.tab-content.ewave-pannel_bd li'))
|
||||||
|
result['filters'] = self.config
|
||||||
|
return result
|
||||||
|
|
||||||
|
def homeVideoContent(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def categoryContent(self, tid, pg, filter, extend):
|
||||||
|
path=f"/vodshow/{tid}-{extend.get('area','')}-{extend.get('by','')}-{extend.get('class','')}-----{pg}---{extend.get('year','')}.html"
|
||||||
|
data=self.getpq(path)
|
||||||
|
result = {}
|
||||||
|
result['list'] = self.getlist(data('ul.ewave-vodlist.clearfix li'))
|
||||||
|
result['page'] = pg
|
||||||
|
result['pagecount'] = 9999
|
||||||
|
result['limit'] = 90
|
||||||
|
result['total'] = 999999
|
||||||
|
return result
|
||||||
|
|
||||||
|
def detailContent(self, ids):
|
||||||
|
data=self.getpq(f"/voddetail/{ids[0]}.html")
|
||||||
|
v=data('.ewave-content__detail')
|
||||||
|
c=data('p')
|
||||||
|
vod = {
|
||||||
|
'type_name':c.eq(0)('a').text(),
|
||||||
|
'vod_year': v('.data.hidden-sm').text(),
|
||||||
|
'vod_remarks': v('h1').text(),
|
||||||
|
'vod_actor': c.eq(1)('a').text(),
|
||||||
|
'vod_director': c.eq(2)('a').text(),
|
||||||
|
'vod_content': c.eq(-1).text(),
|
||||||
|
'vod_play_from': '',
|
||||||
|
'vod_play_url': ''
|
||||||
|
}
|
||||||
|
nd=list(data('ul.nav-tabs.swiper-wrapper li').items())
|
||||||
|
pd=list(data('ul.ewave-content__playlist').items())
|
||||||
|
n,p=[],[]
|
||||||
|
for i,x in enumerate(nd):
|
||||||
|
n.append(x.text())
|
||||||
|
p.append('#'.join([f"{j.text()}${j('a').attr('href')}" for j in pd[i]('li').items()]))
|
||||||
|
vod['vod_play_url']='$$$'.join(p)
|
||||||
|
vod['vod_play_from']='$$$'.join(n)
|
||||||
|
return {'list':[vod]}
|
||||||
|
|
||||||
|
def searchContent(self, key, quick, pg="1"):
|
||||||
|
if pg=="1":
|
||||||
|
p=f"-------------.html?wd={key}"
|
||||||
|
else:
|
||||||
|
p=f"{key}----------{pg}---.html"
|
||||||
|
data=self.getpq(f"/vodsearch/{p}")
|
||||||
|
return {'list':self.getlist(data('ul.ewave-vodlist__media.clearfix li')),'page':pg}
|
||||||
|
|
||||||
|
def playerContent(self, flag, id, vipFlags):
|
||||||
|
try:
|
||||||
|
data=self.getpq(id)
|
||||||
|
jstr = json.loads(data('.ewave-player__video script').eq(0).text().split('=', 1)[-1])
|
||||||
|
jxpath='/bbplayer/api.php'
|
||||||
|
data=self.session.post(f"{self.host}{jxpath}",data={'vid':jstr['url']}).json()['data']
|
||||||
|
if re.search(r'\.m3u8|\.mp4',data['url']):
|
||||||
|
url=data['url']
|
||||||
|
elif data['urlmode'] == 1:
|
||||||
|
url=self.decode1(data['url'])
|
||||||
|
elif data['urlmode'] == 2:
|
||||||
|
url=self.decode2(data['url'])
|
||||||
|
elif re.search(r'\.m3u8|\.mp4',jstr['url']):
|
||||||
|
url=jstr['url']
|
||||||
|
else:
|
||||||
|
url=None
|
||||||
|
if not url:raise Exception('未找到播放地址')
|
||||||
|
p,c=0,''
|
||||||
|
except Exception as e:
|
||||||
|
self.log(f"解析失败: {e}")
|
||||||
|
p,url,c=1,f"{self.host}{id}",'document.querySelector("#playleft iframe").contentWindow.document.querySelector("#start").click()'
|
||||||
|
return {'parse': p, 'url': url, 'header': {'User-Agent':'okhttp/3.12.1'},'click': c}
|
||||||
|
|
||||||
|
def localProxy(self, param):
|
||||||
|
wdict=json.loads(self.d64(param['wdict']))
|
||||||
|
url=f"{wdict['jx']}{wdict['id']}"
|
||||||
|
data=pq(self.fetch(url,headers=self.headers).text)
|
||||||
|
html=data('script').eq(-1).text()
|
||||||
|
url = re.search(r'src="(.*?)"', html).group(1)
|
||||||
|
return [302,'text/html',None,{'Location':url}]
|
||||||
|
|
||||||
|
def liveContent(self, url):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def gethost(self):
|
||||||
|
data=pq(self.fetch('https://www.jubaba.vip',headers=self.headers).text)
|
||||||
|
hlist=list(data('.content-top ul li').items())[:2]
|
||||||
|
hsots=[j('a').attr('href') for i in hlist for j in i('a').items()]
|
||||||
|
return self.host_late(hsots)
|
||||||
|
|
||||||
|
def host_late(self, urls):
|
||||||
|
with concurrent.futures.ThreadPoolExecutor() as executor:
|
||||||
|
future_to_url = {
|
||||||
|
executor.submit(self.test_host, url): url
|
||||||
|
for url in urls
|
||||||
|
}
|
||||||
|
results = {}
|
||||||
|
for future in concurrent.futures.as_completed(future_to_url):
|
||||||
|
url = future_to_url[future]
|
||||||
|
try:
|
||||||
|
results[url] = future.result()
|
||||||
|
except Exception as e:
|
||||||
|
results[url] = float('inf')
|
||||||
|
min_url = min(results.items(), key=lambda x: x[1])[0] if results else None
|
||||||
|
if all(delay == float('inf') for delay in results.values()) or not min_url:
|
||||||
|
return urls[0]
|
||||||
|
return min_url
|
||||||
|
|
||||||
|
def test_host(self, url):
|
||||||
|
try:
|
||||||
|
start_time = time.monotonic()
|
||||||
|
response = requests.head(
|
||||||
|
url,
|
||||||
|
timeout=1.0,
|
||||||
|
allow_redirects=False,
|
||||||
|
headers=self.headers
|
||||||
|
)
|
||||||
|
response.raise_for_status()
|
||||||
|
return (time.monotonic() - start_time) * 1000
|
||||||
|
except Exception as e:
|
||||||
|
print(f"测试{url}失败: {str(e)}")
|
||||||
|
return float('inf')
|
||||||
|
|
||||||
|
def getpq(self, path='',min=0,max=3):
|
||||||
|
data = self.session.get(f"{self.host}{path}")
|
||||||
|
data=data.text
|
||||||
|
try:
|
||||||
|
if '人机验证' in data:
|
||||||
|
print(f"第{min}次尝试人机验证")
|
||||||
|
jstr=pq(data)('script').eq(-1).html()
|
||||||
|
token,tpath,stt=self.extract(jstr)
|
||||||
|
body={'value':self.encrypt(self.host,stt),'token':self.encrypt(token,stt)}
|
||||||
|
cd=self.session.post(f"{self.host}{tpath}",data=body)
|
||||||
|
if min>max:raise Exception('人机验证失败')
|
||||||
|
return self.getpq(path,min+1,max)
|
||||||
|
return pq(data)
|
||||||
|
except:
|
||||||
|
return pq(data.encode('utf-8'))
|
||||||
|
|
||||||
|
def encrypt(self, input_str,staticchars):
|
||||||
|
encodechars = ""
|
||||||
|
for char in input_str:
|
||||||
|
num0 = staticchars.find(char)
|
||||||
|
if num0 == -1:
|
||||||
|
code = char
|
||||||
|
else:
|
||||||
|
code = staticchars[(num0 + 3) % 62]
|
||||||
|
num1 = random.randint(0, 61)
|
||||||
|
num2 = random.randint(0, 61)
|
||||||
|
encodechars += staticchars[num1] + code + staticchars[num2]
|
||||||
|
return self.e64(encodechars)
|
||||||
|
|
||||||
|
def extract(self, js_code):
|
||||||
|
token_match = re.search(r'var token = encrypt\("([^"]+)"\);', js_code)
|
||||||
|
token_value = token_match.group(1) if token_match else None
|
||||||
|
url_match = re.search(r'var url = \'([^\']+)\';', js_code)
|
||||||
|
url_value = url_match.group(1) if url_match else None
|
||||||
|
staticchars_match = re.search(r'var\s+staticchars\s*=\s*["\']([^"\']+)["\'];', js_code)
|
||||||
|
staticchars = staticchars_match.group(1) if staticchars_match else None
|
||||||
|
return token_value, url_value,staticchars
|
||||||
|
|
||||||
|
def decode1(self, val):
|
||||||
|
url = self._custom_str_decode(val)
|
||||||
|
parts = url.split("/")
|
||||||
|
result = "/".join(parts[2:])
|
||||||
|
key1 = json.loads(self.d64(parts[1]))
|
||||||
|
key2 = json.loads(self.d64(parts[0]))
|
||||||
|
decoded = self.d64(result)
|
||||||
|
return self._de_string(key1, key2, decoded)
|
||||||
|
|
||||||
|
def _custom_str_decode(self, val):
|
||||||
|
decoded = self.d64(val)
|
||||||
|
key = self.md5("test")
|
||||||
|
result = ""
|
||||||
|
for i in range(len(decoded)):
|
||||||
|
result += chr(ord(decoded[i]) ^ ord(key[i % len(key)]))
|
||||||
|
return self.d64(result)
|
||||||
|
|
||||||
|
def _de_string(self, key_array, value_array, input_str):
|
||||||
|
result = ""
|
||||||
|
for char in input_str:
|
||||||
|
if re.match(r'^[a-zA-Z]$', char):
|
||||||
|
if char in key_array:
|
||||||
|
index = key_array.index(char)
|
||||||
|
result += value_array[index]
|
||||||
|
continue
|
||||||
|
result += char
|
||||||
|
return result
|
||||||
|
|
||||||
|
def decode2(self, url):
|
||||||
|
key = "PXhw7UT1B0a9kQDKZsjIASmOezxYG4CHo5Jyfg2b8FLpEvRr3WtVnlqMidu6cN"
|
||||||
|
url=self.d64(url)
|
||||||
|
result = ""
|
||||||
|
i = 1
|
||||||
|
while i < len(url):
|
||||||
|
try:
|
||||||
|
index = key.find(url[i])
|
||||||
|
if index == -1:
|
||||||
|
char = url[i]
|
||||||
|
else:
|
||||||
|
char = key[(index + 59) % 62]
|
||||||
|
result += char
|
||||||
|
except IndexError:
|
||||||
|
break
|
||||||
|
i += 3
|
||||||
|
return result
|
||||||
|
|
||||||
|
def getlist(self, data):
|
||||||
|
videos = []
|
||||||
|
for k in data.items():
|
||||||
|
j = k('.ewave-vodlist__thumb')
|
||||||
|
h=k('.text-overflow a')
|
||||||
|
if not h.attr('href'):h=j
|
||||||
|
videos.append({
|
||||||
|
'vod_id': re.findall(r'\d+', h.attr('href'))[0],
|
||||||
|
'vod_name': j.attr('title'),
|
||||||
|
'vod_pic': j.attr('data-original'),
|
||||||
|
'vod_remarks': k('.pic-text').text(),
|
||||||
|
})
|
||||||
|
return videos
|
||||||
|
|
||||||
|
def e64(self, text):
|
||||||
|
try:
|
||||||
|
text_bytes = text.encode('utf-8')
|
||||||
|
encoded_bytes = b64encode(text_bytes)
|
||||||
|
return encoded_bytes.decode('utf-8')
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Base64编码错误: {str(e)}")
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def d64(self,encoded_text):
|
||||||
|
try:
|
||||||
|
encoded_bytes = encoded_text.encode('utf-8')
|
||||||
|
decoded_bytes = b64decode(encoded_bytes)
|
||||||
|
return decoded_bytes.decode('utf-8')
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Base64解码错误: {str(e)}")
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def md5(self, text):
|
||||||
|
h = MD5.new()
|
||||||
|
h.update(text.encode('utf-8'))
|
||||||
|
return h.hexdigest()
|
280
PY/html/嗷呜动漫.py
Normal file
280
PY/html/嗷呜动漫.py
Normal file
@ -0,0 +1,280 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# by @嗷呜
|
||||||
|
import colorsys
|
||||||
|
import random
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from base64 import b64decode, b64encode
|
||||||
|
from email.utils import unquote
|
||||||
|
from Crypto.Hash import MD5
|
||||||
|
sys.path.append("..")
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
from pyquery import PyQuery as pq
|
||||||
|
from base.spider import Spider
|
||||||
|
|
||||||
|
class Spider(Spider):
|
||||||
|
|
||||||
|
def init(self, extend=""):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def getName(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def isVideoFormat(self, url):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def manualVideoCheck(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def action(self, action):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def destroy(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
host='https://www.aowu.tv'
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36',
|
||||||
|
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
|
||||||
|
'pragma': 'no-cache',
|
||||||
|
'cache-control': 'no-cache',
|
||||||
|
'sec-ch-ua': '"Not/A)Brand";v="8", "Chromium";v="134", "Google Chrome";v="134"',
|
||||||
|
'sec-ch-ua-mobile': '?0',
|
||||||
|
'sec-ch-ua-platform': '"macOS"',
|
||||||
|
'dnt': '1',
|
||||||
|
'upgrade-insecure-requests': '1',
|
||||||
|
'sec-fetch-site': 'same-origin',
|
||||||
|
'sec-fetch-mode': 'navigate',
|
||||||
|
'sec-fetch-user': '?1',
|
||||||
|
'sec-fetch-dest': 'document',
|
||||||
|
'referer': f'{host}/',
|
||||||
|
'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8',
|
||||||
|
'priority': 'u=0, i',
|
||||||
|
}
|
||||||
|
|
||||||
|
def homeContent(self, filter):
|
||||||
|
data=self.getpq(self.fetch(self.host,headers=self.headers).text)
|
||||||
|
result = {}
|
||||||
|
classes = []
|
||||||
|
ldata=data('.wrap.border-box.public-r .public-list-box')
|
||||||
|
cd={"新番":"32","番剧":"20","剧场":"33"}
|
||||||
|
for k,r in cd.items():
|
||||||
|
classes.append({
|
||||||
|
'type_name': k,
|
||||||
|
'type_id': r,
|
||||||
|
})
|
||||||
|
videos=[]
|
||||||
|
for i in ldata.items():
|
||||||
|
j = i('.public-list-exp')
|
||||||
|
k=i('.public-list-button')
|
||||||
|
videos.append({
|
||||||
|
'vod_id': j.attr('href').split('/')[-1].split('-')[0],
|
||||||
|
'vod_name': k('.time-title').text(),
|
||||||
|
'vod_pic': j('img').attr('data-src'),
|
||||||
|
'vod_year': f"·{j('.public-list-prb').text()}",
|
||||||
|
'vod_remarks': k('.public-list-subtitle').text(),
|
||||||
|
})
|
||||||
|
result['class'] = classes
|
||||||
|
result['list']=videos
|
||||||
|
return result
|
||||||
|
|
||||||
|
def homeVideoContent(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def categoryContent(self, tid, pg, filter, extend):
|
||||||
|
body = {'type':tid,'class':'','area':'','lang':'','version':'','state':'','letter':'','page':pg}
|
||||||
|
data = self.post(f"{self.host}/index.php/api/vod", headers=self.headers, data=self.getbody(body)).json()
|
||||||
|
result = {}
|
||||||
|
result['list'] = data['list']
|
||||||
|
result['page'] = pg
|
||||||
|
result['pagecount'] = 9999
|
||||||
|
result['limit'] = 90
|
||||||
|
result['total'] = 999999
|
||||||
|
return result
|
||||||
|
|
||||||
|
def detailContent(self, ids):
|
||||||
|
data = self.getpq(self.fetch(f"{self.host}/play/{ids[0]}-1-1.html", headers=self.headers).text)
|
||||||
|
v=data('.player-info-text .this-text')
|
||||||
|
vod = {
|
||||||
|
'type_name': v.eq(-1)('a').text(),
|
||||||
|
'vod_year': v.eq(1)('a').text(),
|
||||||
|
'vod_remarks': v.eq(0).text(),
|
||||||
|
'vod_actor': v.eq(2)('a').text(),
|
||||||
|
'vod_content': data('.player-content').text()
|
||||||
|
}
|
||||||
|
ns=data('.swiper-wrapper .vod-playerUrl')
|
||||||
|
ps=data('.player-list-box .anthology-list-box ul')
|
||||||
|
play,names=[],[]
|
||||||
|
for i in range(len(ns)):
|
||||||
|
n=ns.eq(i)('a')
|
||||||
|
n('span').remove()
|
||||||
|
names.append(re.sub(r"[\ue679\xa0]", "", n.text()))
|
||||||
|
play.append('#'.join([f"{v.text()}${v('a').attr('href')}" for v in ps.eq(i)('li').items()]))
|
||||||
|
vod["vod_play_from"] = "$$$".join(names)
|
||||||
|
vod["vod_play_url"] = "$$$".join(play)
|
||||||
|
result = {"list": [vod]}
|
||||||
|
return result
|
||||||
|
|
||||||
|
def searchContent(self, key, quick, pg="1"):
|
||||||
|
data = self.fetch(f"{self.host}/index.php/ajax/suggest?mid=1&wd={key}&limit=9999×tamp={int(time.time()*1000)}", headers=self.headers).json()
|
||||||
|
videos=[]
|
||||||
|
for i in data['list']:
|
||||||
|
videos.append({
|
||||||
|
'vod_id': i['id'],
|
||||||
|
'vod_name': i['name'],
|
||||||
|
'vod_pic': i['pic']
|
||||||
|
})
|
||||||
|
return {'list':videos,'page':pg}
|
||||||
|
|
||||||
|
def playerContent(self, flag, id, vipFlags):
|
||||||
|
p,url1= 1,''
|
||||||
|
yurl=f"{self.host}{id}"
|
||||||
|
data = self.getpq(self.fetch(yurl, headers=self.headers).text)
|
||||||
|
dmhtm=data('.ds-log-set')
|
||||||
|
dmdata={'vod_id':dmhtm.attr('data-id'),'vod_ep':dmhtm.attr('data-nid')}
|
||||||
|
try:
|
||||||
|
jstr = data('.player-top.box.radius script').eq(0).text()
|
||||||
|
jsdata = json.loads(jstr.split('=',1)[-1])
|
||||||
|
url1= jsdata['url']
|
||||||
|
data = self.fetch(f"{self.host}/player/?url={unquote(self.d64(jsdata['url']))}", headers=self.headers).text
|
||||||
|
data=self.p_qjs(self.getjstr(data))
|
||||||
|
url=data['qualities'] if len(data['qualities']) else data['url']
|
||||||
|
p = 0
|
||||||
|
if not url:raise Exception("未找到播放地址")
|
||||||
|
except Exception as e:
|
||||||
|
self.log(e)
|
||||||
|
url = yurl
|
||||||
|
if re.search(r'\.m3u8|\.mp4',url1):url=url1
|
||||||
|
dmurl = f"{self.getProxyUrl()}&data={self.e64(json.dumps(dmdata))}&type=dm.xml"
|
||||||
|
return {"parse": p, "url": url, "header": {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36'},'danmaku':dmurl}
|
||||||
|
|
||||||
|
def localProxy(self, param):
|
||||||
|
try:
|
||||||
|
data = json.loads(self.d64(param['data']))
|
||||||
|
headers = {
|
||||||
|
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36',
|
||||||
|
'origin': self.host,
|
||||||
|
'Content-Type': 'application/x-www-form-urlencoded'
|
||||||
|
}
|
||||||
|
params = {'vod_id': data['vod_id'], 'vod_ep': data['vod_ep']}
|
||||||
|
res = self.post(f"https://app.wuyaoy.cn/danmu/api.php/getDanmu", headers=headers, data=params).json()
|
||||||
|
danmustr = f'<?xml version="1.0" encoding="UTF-8"?>\n<i>\n\t<chatserver>chat.aowudm.com</chatserver>\n\t<chatid>88888888</chatid>\n\t<mission>0</mission>\n\t<maxlimit>99999</maxlimit>\n\t<state>0</state>\n\t<real_name>0</real_name>\n\t<source>k-v</source>\n'
|
||||||
|
my_list = ['1', '4', '5', '6']
|
||||||
|
for i in sorted(res['data'], key=lambda x: x['time']):
|
||||||
|
dms = [str(i.get('time',1)), random.choice(my_list), '25', self.get_color(), '0']
|
||||||
|
dmtxt = re.sub(r'[<>&\u0000\b]', '', self.cleanText(i.get('text', '')))
|
||||||
|
tempdata = f'\t<d p="{",".join(dms)}">{dmtxt}</d>\n'
|
||||||
|
danmustr += tempdata
|
||||||
|
danmustr += '</i>'
|
||||||
|
return [200,'text/xml',danmustr]
|
||||||
|
except Exception as e:
|
||||||
|
print(f"获取弹幕失败:{str(e)}")
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def getbody(self, params):
|
||||||
|
t=int(time.time())
|
||||||
|
h = MD5.new()
|
||||||
|
h.update(f"DS{t}DCC147D11943AF75".encode('utf-8'))
|
||||||
|
key=h.hexdigest()
|
||||||
|
params.update({'time':t,'key':key})
|
||||||
|
return params
|
||||||
|
|
||||||
|
def getpq(self, data):
|
||||||
|
data=self.cleanText(data)
|
||||||
|
try:
|
||||||
|
return pq(data)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"{str(e)}")
|
||||||
|
return pq(data.encode('utf-8'))
|
||||||
|
|
||||||
|
def get_color(self):
|
||||||
|
h = random.random()
|
||||||
|
s = random.uniform(0.7, 1.0)
|
||||||
|
v = random.uniform(0.8, 1.0)
|
||||||
|
r, g, b = colorsys.hsv_to_rgb(h, s, v)
|
||||||
|
r = int(r * 255)
|
||||||
|
g = int(g * 255)
|
||||||
|
b = int(b * 255)
|
||||||
|
decimal_color = (r << 16) + (g << 8) + b
|
||||||
|
return str(decimal_color)
|
||||||
|
|
||||||
|
def getjstr(self, data):
|
||||||
|
pattern = r'new\s+Artplayer\s*\((\{[\s\S]*?\})\);'
|
||||||
|
match = re.search(pattern, data)
|
||||||
|
config_str = match.group(1) if match else '{}'
|
||||||
|
|
||||||
|
replacements = [
|
||||||
|
(r'contextmenu\s*:\s*\[[\s\S]*?\{[\s\S]*?\}[\s\S]*?\],', 'contextmenu: [],'),
|
||||||
|
(r'customType\s*:\s*\{[\s\S]*?\},', 'customType: {},'),
|
||||||
|
(r'plugins\s*:\s*\[\s*artplayerPluginDanmuku\(\{[\s\S]*?lockTime:\s*\d+,?\s*\}\)\,?\s*\]', 'plugins: []')
|
||||||
|
]
|
||||||
|
for pattern, replacement in replacements:
|
||||||
|
config_str = re.sub(pattern, replacement, config_str)
|
||||||
|
return config_str
|
||||||
|
|
||||||
|
def p_qjs(self, config_str):
|
||||||
|
try:
|
||||||
|
from com.whl.quickjs.wrapper import QuickJSContext
|
||||||
|
ctx = QuickJSContext.create()
|
||||||
|
js_code = f"""
|
||||||
|
function extractVideoInfo() {{
|
||||||
|
try {{
|
||||||
|
const config = {config_str};
|
||||||
|
const result = {{
|
||||||
|
url: "",
|
||||||
|
qualities: []
|
||||||
|
}};
|
||||||
|
if (config.url) {{
|
||||||
|
result.url = config.url;
|
||||||
|
}}
|
||||||
|
if (config.quality && Array.isArray(config.quality)) {{
|
||||||
|
config.quality.forEach(function(q) {{
|
||||||
|
if (q && q.url) {{
|
||||||
|
result.qualities.push(q.html || "嗷呜");
|
||||||
|
result.qualities.push(q.url);
|
||||||
|
}}
|
||||||
|
}});
|
||||||
|
}}
|
||||||
|
|
||||||
|
return JSON.stringify(result);
|
||||||
|
}} catch (e) {{
|
||||||
|
return JSON.stringify({{
|
||||||
|
error: "解析错误: " + e.message,
|
||||||
|
url: "",
|
||||||
|
qualities: []
|
||||||
|
}});
|
||||||
|
}}
|
||||||
|
}}
|
||||||
|
extractVideoInfo();
|
||||||
|
"""
|
||||||
|
result_json = ctx.evaluate(js_code)
|
||||||
|
ctx.destroy()
|
||||||
|
return json.loads(result_json)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.log(f"执行失败: {e}")
|
||||||
|
return {
|
||||||
|
"error": str(e),
|
||||||
|
"url": "",
|
||||||
|
"qualities": []
|
||||||
|
}
|
||||||
|
|
||||||
|
def e64(self, text):
|
||||||
|
try:
|
||||||
|
text_bytes = text.encode('utf-8')
|
||||||
|
encoded_bytes = b64encode(text_bytes)
|
||||||
|
return encoded_bytes.decode('utf-8')
|
||||||
|
except Exception as e:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def d64(self,encoded_text):
|
||||||
|
try:
|
||||||
|
encoded_bytes = encoded_text.encode('utf-8')
|
||||||
|
decoded_bytes = b64decode(encoded_bytes)
|
||||||
|
return decoded_bytes.decode('utf-8')
|
||||||
|
except Exception as e:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
128
PY/html/好帅短剧.py
Normal file
128
PY/html/好帅短剧.py
Normal file
@ -0,0 +1,128 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# by @嗷呜
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
sys.path.append('..')
|
||||||
|
from base.spider import Spider
|
||||||
|
from pyquery import PyQuery as pq
|
||||||
|
|
||||||
|
class Spider(Spider):
|
||||||
|
|
||||||
|
def init(self, extend=""):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def isVideoFormat(self, url):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def manualVideoCheck(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def action(self, action):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def destroy(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
host='https://www.nhsyy.com'
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'Accept': '*/*',
|
||||||
|
'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
|
||||||
|
'Cache-Control': 'no-cache',
|
||||||
|
'Connection': 'keep-alive',
|
||||||
|
'DNT': '1',
|
||||||
|
'Origin': host,
|
||||||
|
'Pragma': 'no-cache',
|
||||||
|
'Referer': f'{host}/',
|
||||||
|
'Sec-Fetch-Dest': 'empty',
|
||||||
|
'Sec-Fetch-Mode': 'cors',
|
||||||
|
'Sec-Fetch-Site': 'cross-site',
|
||||||
|
'User-Agent': 'Mozilla/5.0 (Linux; Android 10; K) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0 Safari/537.36',
|
||||||
|
'sec-ch-ua': '"Not/A)Brand";v="8", "Chromium";v="130", "Google Chrome";v="130"',
|
||||||
|
'sec-ch-ua-mobile': '?1',
|
||||||
|
'sec-ch-ua-platform': '"Android"',
|
||||||
|
}
|
||||||
|
|
||||||
|
def homeContent(self, filter):
|
||||||
|
data = pq(self.fetch(self.host, headers=self.headers).text)
|
||||||
|
result = {}
|
||||||
|
classes = []
|
||||||
|
for i in data('.drop-content-items li').items():
|
||||||
|
j = i('a').attr('href')
|
||||||
|
if j and 'type' in j:
|
||||||
|
id = j.split('/')[-1].split('.')[0]
|
||||||
|
classes.append({
|
||||||
|
'type_name': i('a').text(),
|
||||||
|
'type_id': id
|
||||||
|
})
|
||||||
|
hlist = self.getlist(data('.module-lines-list .module-item'))
|
||||||
|
result['class'] = classes
|
||||||
|
result['list'] = hlist
|
||||||
|
return result
|
||||||
|
|
||||||
|
def homeVideoContent(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def categoryContent(self, tid, pg, filter, extend):
|
||||||
|
data = self.fetch(f'{self.host}/vodshwo/{tid}--------{pg}---.html', headers=self.headers).text
|
||||||
|
vlist = self.getlist(pq(data)('.module-list .module-item'))
|
||||||
|
return {"list": vlist, "page": pg, "pagecount": 9999, "limit": 90, "total": 999999}
|
||||||
|
|
||||||
|
def detailContent(self, ids):
|
||||||
|
data = pq(self.fetch(f"{self.host}{ids[0]}", headers=self.headers).text)
|
||||||
|
udata = data('.scroll-box-y .scroll-content a')
|
||||||
|
vdata = data('.video-info-main .video-info-item')
|
||||||
|
vod = {
|
||||||
|
'vod_year': vdata.eq(2)('div').text(),
|
||||||
|
'vod_remarks': vdata.eq(3)('div').text(),
|
||||||
|
'vod_actor': vdata.eq(1)('a').text(),
|
||||||
|
'vod_director': vdata.eq(0)('a').text(),
|
||||||
|
'typt_name': data('.video-info-aux a').eq(0).attr('title'),
|
||||||
|
'vod_content': vdata.eq(4)('p').eq(-1).text(),
|
||||||
|
'vod_play_from': '嗷呜爱看短剧',
|
||||||
|
'vod_play_url': '#'.join([f"{i.text()}${i.attr('href')}" for i in udata.items()]),
|
||||||
|
}
|
||||||
|
result = {"list": [vod]}
|
||||||
|
return result
|
||||||
|
|
||||||
|
def searchContent(self, key, quick, pg="1"):
|
||||||
|
dlist = self.fetch(f'{self.host}/vodsearch/{key}----------{pg}---.html', headers=self.headers).text
|
||||||
|
ldata = pq(dlist)('.module-list .module-search-item')
|
||||||
|
vlist = []
|
||||||
|
for i in ldata.items():
|
||||||
|
img = i('.module-item-pic')
|
||||||
|
vlist.append({
|
||||||
|
'vod_id': i('.video-serial').attr('href'),
|
||||||
|
'vod_name': img('img').attr('alt'),
|
||||||
|
'vod_pic': img('img').attr('data-src'),
|
||||||
|
'vod_year': i('.tag-link a').eq(0).text(),
|
||||||
|
'vod_remarks': i('.video-serial').text()
|
||||||
|
})
|
||||||
|
result = {"list": vlist, "page": pg}
|
||||||
|
return result
|
||||||
|
|
||||||
|
def playerContent(self, flag, id, vipFlags):
|
||||||
|
data=self.fetch(f"{self.host}{id}", headers=self.headers).text
|
||||||
|
jstr = pq(data)('.player-wrapper script').eq(0).text()
|
||||||
|
try:
|
||||||
|
jdata = json.loads(jstr.split('=', 1)[-1])
|
||||||
|
url = jdata.get('url') or jdata.get('next_url')
|
||||||
|
p=0
|
||||||
|
except:
|
||||||
|
url,p = f"{self.host}{id}",1
|
||||||
|
return {'parse': p, 'url': url, 'header': self.headers}
|
||||||
|
|
||||||
|
def localProxy(self, param):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def getlist(self, data):
|
||||||
|
vlist = []
|
||||||
|
for i in data.items():
|
||||||
|
img = i('.module-item-pic')
|
||||||
|
vlist.append({
|
||||||
|
'vod_id': img('a').attr('href'),
|
||||||
|
'vod_name': img('img').attr('alt'),
|
||||||
|
'vod_pic': img('img').attr('data-src'),
|
||||||
|
'vod_remarks': i('.module-item-text').text()
|
||||||
|
})
|
||||||
|
return vlist
|
174
PY/html/小红影视.py
Normal file
174
PY/html/小红影视.py
Normal file
@ -0,0 +1,174 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# by @嗷呜
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from base64 import b64decode
|
||||||
|
from Crypto.Cipher import AES
|
||||||
|
from Crypto.Hash import MD5
|
||||||
|
from Crypto.Util.Padding import unpad
|
||||||
|
sys.path.append("..")
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
from pyquery import PyQuery as pq
|
||||||
|
from base.spider import Spider
|
||||||
|
|
||||||
|
class Spider(Spider):
|
||||||
|
|
||||||
|
def init(self, extend=""):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def getName(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def isVideoFormat(self, url):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def manualVideoCheck(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def action(self, action):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def destroy(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
host='https://www.xiaohys.com'
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36',
|
||||||
|
'Accept': 'application/json, text/javascript, */*; q=0.01',
|
||||||
|
'sec-ch-ua-platform': '"macOS"',
|
||||||
|
'sec-ch-ua': '"Not/A)Brand";v="8", "Chromium";v="134", "Google Chrome";v="134"',
|
||||||
|
'Origin': host,
|
||||||
|
'Referer': f"{host}/",
|
||||||
|
}
|
||||||
|
|
||||||
|
def homeContent(self, filter):
|
||||||
|
data=self.getpq(self.fetch(self.host,headers=self.headers).text)
|
||||||
|
result = {}
|
||||||
|
classes = []
|
||||||
|
for k in data('.head-more.box a').items():
|
||||||
|
i=k.attr('href')
|
||||||
|
if i and '/show' in i:
|
||||||
|
classes.append({
|
||||||
|
'type_name': k.text(),
|
||||||
|
'type_id': i.split('/')[-1]
|
||||||
|
})
|
||||||
|
result['class'] = classes
|
||||||
|
result['list']=self.getlist(data('.border-box.diy-center .public-list-div'))
|
||||||
|
return result
|
||||||
|
|
||||||
|
def homeVideoContent(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def categoryContent(self, tid, pg, filter, extend):
|
||||||
|
body = {'type':tid,'class':'','area':'','lang':'','version':'','state':'','letter':'','page':pg}
|
||||||
|
data = self.post(f"{self.host}/index.php/api/vod", headers=self.headers, data=self.getbody(body)).json()
|
||||||
|
result = {}
|
||||||
|
result['list'] = data['list']
|
||||||
|
result['page'] = pg
|
||||||
|
result['pagecount'] = 9999
|
||||||
|
result['limit'] = 90
|
||||||
|
result['total'] = 999999
|
||||||
|
return result
|
||||||
|
|
||||||
|
def detailContent(self, ids):
|
||||||
|
data = self.getpq(self.fetch(f"{self.host}/detail/{ids[0]}/", headers=self.headers).text)
|
||||||
|
v=data('.detail-info.lightSpeedIn .slide-info')
|
||||||
|
vod = {
|
||||||
|
'vod_year': v.eq(-1).text(),
|
||||||
|
'vod_remarks': v.eq(0).text(),
|
||||||
|
'vod_actor': v.eq(3).text(),
|
||||||
|
'vod_director': v.eq(2).text(),
|
||||||
|
'vod_content': data('.switch-box #height_limit').text()
|
||||||
|
}
|
||||||
|
np=data('.anthology.wow.fadeInUp')
|
||||||
|
ndata=np('.anthology-tab .swiper-wrapper .swiper-slide')
|
||||||
|
pdata=np('.anthology-list .anthology-list-box ul')
|
||||||
|
play,names=[],[]
|
||||||
|
for i in range(len(ndata)):
|
||||||
|
n=ndata.eq(i)('a')
|
||||||
|
n('span').remove()
|
||||||
|
names.append(n.text())
|
||||||
|
vs=[]
|
||||||
|
for v in pdata.eq(i)('li').items():
|
||||||
|
vs.append(f"{v.text()}${v('a').attr('href')}")
|
||||||
|
play.append('#'.join(vs))
|
||||||
|
vod["vod_play_from"] = "$$$".join(names)
|
||||||
|
vod["vod_play_url"] = "$$$".join(play)
|
||||||
|
result = {"list": [vod]}
|
||||||
|
return result
|
||||||
|
|
||||||
|
def searchContent(self, key, quick, pg="1"):
|
||||||
|
data = self.fetch(f"{self.host}/index.php/ajax/suggest?mid=1&wd={key}&limit=9999×tamp={int(time.time()*1000)}", headers=self.headers).json()
|
||||||
|
videos=[]
|
||||||
|
for i in data['list']:
|
||||||
|
videos.append({
|
||||||
|
'vod_id': i['id'],
|
||||||
|
'vod_name': i['name'],
|
||||||
|
'vod_pic': i['pic']
|
||||||
|
})
|
||||||
|
return {'list':videos,'page':pg}
|
||||||
|
|
||||||
|
def playerContent(self, flag, id, vipFlags):
|
||||||
|
h,p,url1= {"User-Agent": "okhttp/3.14.9"},1,''
|
||||||
|
url=f"{self.host}{id}"
|
||||||
|
data = self.getpq(self.fetch(url, headers=self.headers).text)
|
||||||
|
try:
|
||||||
|
jstr = data('.player .player-left script').eq(0).text()
|
||||||
|
jsdata = json.loads(jstr.split('=',1)[-1])
|
||||||
|
body, url1= {'url': jsdata['url'],'referer':url},jsdata['url']
|
||||||
|
data = self.post(f"{self.host}/static/player/artplayer/api.php?ac=getdate", headers=self.headers, data=body).json()
|
||||||
|
l=self.aes(data['data'],data['iv'])
|
||||||
|
url=l.get('url') or l['data'].get('url')
|
||||||
|
p = 0
|
||||||
|
if not url:raise Exception('未找到播放地址')
|
||||||
|
except Exception as e:
|
||||||
|
print('错误信息:',e)
|
||||||
|
if re.search(r'\.m3u8|\.mp4',url1):url=url1
|
||||||
|
result = {}
|
||||||
|
result["parse"] = p
|
||||||
|
result["url"] = url
|
||||||
|
result["header"] = h
|
||||||
|
return result
|
||||||
|
|
||||||
|
def localProxy(self, param):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def getbody(self, params):
|
||||||
|
t=int(time.time())
|
||||||
|
h = MD5.new()
|
||||||
|
h.update(f"DS{t}DCC147D11943AF75".encode('utf-8'))
|
||||||
|
key=h.hexdigest()
|
||||||
|
params.update({'time':t,'key':key})
|
||||||
|
return params
|
||||||
|
|
||||||
|
def getlist(self,data):
|
||||||
|
videos=[]
|
||||||
|
for i in data.items():
|
||||||
|
id = i('a').attr('href')
|
||||||
|
if id:
|
||||||
|
id = re.search(r'\d+', id).group(0)
|
||||||
|
img = i('img').attr('data-src')
|
||||||
|
if img and 'url=' in img and 'http' not in img: img = f'{self.host}{img}'
|
||||||
|
videos.append({
|
||||||
|
'vod_id': id,
|
||||||
|
'vod_name': i('img').attr('alt'),
|
||||||
|
'vod_pic': img,
|
||||||
|
'vod_remarks': i('.public-prt').text() or i('.public-list-prb').text()
|
||||||
|
})
|
||||||
|
return videos
|
||||||
|
|
||||||
|
def getpq(self, data):
|
||||||
|
try:
|
||||||
|
return pq(data)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"{str(e)}")
|
||||||
|
return pq(data.encode('utf-8'))
|
||||||
|
|
||||||
|
def aes(self, text,iv):
|
||||||
|
key = b"d978a93ffb4d3a00"
|
||||||
|
iv = iv.encode("utf-8")
|
||||||
|
cipher = AES.new(key, AES.MODE_CBC, iv)
|
||||||
|
pt = unpad(cipher.decrypt(b64decode(text)), AES.block_size)
|
||||||
|
return json.loads(pt.decode("utf-8"))
|
223
PY/html/旺旺.py
Normal file
223
PY/html/旺旺.py
Normal file
File diff suppressed because one or more lines are too long
151
PY/html/甜圈短剧.py
Normal file
151
PY/html/甜圈短剧.py
Normal file
@ -0,0 +1,151 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# by @嗷呜
|
||||||
|
import sys
|
||||||
|
sys.path.append('..')
|
||||||
|
from base.spider import Spider
|
||||||
|
|
||||||
|
|
||||||
|
class Spider(Spider):
|
||||||
|
|
||||||
|
def init(self, extend=""):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def getName(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def isVideoFormat(self, url):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def manualVideoCheck(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def destroy(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
ahost='https://api.cenguigui.cn'
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36',
|
||||||
|
'sec-ch-ua-platform': '"macOS"',
|
||||||
|
'sec-ch-ua': '"Not/A)Brand";v="8", "Chromium";v="134", "Google Chrome";v="134"',
|
||||||
|
'DNT': '1',
|
||||||
|
'sec-ch-ua-mobile': '?0',
|
||||||
|
'Sec-Fetch-Site': 'cross-site',
|
||||||
|
'Sec-Fetch-Mode': 'no-cors',
|
||||||
|
'Sec-Fetch-Dest': 'video',
|
||||||
|
'Sec-Fetch-Storage-Access': 'active',
|
||||||
|
'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
|
||||||
|
}
|
||||||
|
|
||||||
|
def homeContent(self, filter):
|
||||||
|
result = {'class': [{'type_id': '推荐榜', 'type_name': '🔥 推荐榜'},
|
||||||
|
{'type_id': '新剧', 'type_name': '🎬 新剧'},
|
||||||
|
{'type_id': '逆袭', 'type_name': '🎬 逆袭'},
|
||||||
|
{'type_id': '霸总', 'type_name': '🎬 霸总'},
|
||||||
|
{'type_id': '现代言情', 'type_name': '🎬 现代言情'},
|
||||||
|
{'type_id': '打脸虐渣', 'type_name': '🎬 打脸虐渣'},
|
||||||
|
{'type_id': '豪门恩怨', 'type_name': '🎬 豪门恩怨'},
|
||||||
|
{'type_id': '神豪', 'type_name': '🎬 神豪'},
|
||||||
|
{'type_id': '马甲', 'type_name': '🎬 马甲'},
|
||||||
|
{'type_id': '都市日常', 'type_name': '🎬 都市日常'},
|
||||||
|
{'type_id': '战神归来', 'type_name': '🎬 战神归来'},
|
||||||
|
{'type_id': '小人物', 'type_name': '🎬 小人物'},
|
||||||
|
{'type_id': '女性成长', 'type_name': '🎬 女性成长'},
|
||||||
|
{'type_id': '大女主', 'type_name': '🎬 大女主'},
|
||||||
|
{'type_id': '穿越', 'type_name': '🎬 穿越'},
|
||||||
|
{'type_id': '都市修仙', 'type_name': '🎬 都市修仙'},
|
||||||
|
{'type_id': '强者回归', 'type_name': '🎬 强者回归'},
|
||||||
|
{'type_id': '亲情', 'type_name': '🎬 亲情'},
|
||||||
|
{'type_id': '古装', 'type_name': '🎬 古装'},
|
||||||
|
{'type_id': '重生', 'type_name': '🎬 重生'},
|
||||||
|
{'type_id': '闪婚', 'type_name': '🎬 闪婚'},
|
||||||
|
{'type_id': '赘婿逆袭', 'type_name': '🎬 赘婿逆袭'},
|
||||||
|
{'type_id': '虐恋', 'type_name': '🎬 虐恋'},
|
||||||
|
{'type_id': '追妻', 'type_name': '🎬 追妻'},
|
||||||
|
{'type_id': '天下无敌', 'type_name': '🎬 天下无敌'},
|
||||||
|
{'type_id': '家庭伦理', 'type_name': '🎬 家庭伦理'},
|
||||||
|
{'type_id': '萌宝', 'type_name': '🎬 萌宝'},
|
||||||
|
{'type_id': '古风权谋', 'type_name': '🎬 古风权谋'},
|
||||||
|
{'type_id': '职场', 'type_name': '🎬 职场'},
|
||||||
|
{'type_id': '奇幻脑洞', 'type_name': '🎬 奇幻脑洞'},
|
||||||
|
{'type_id': '异能', 'type_name': '🎬 异能'},
|
||||||
|
{'type_id': '无敌神医', 'type_name': '🎬 无敌神医'},
|
||||||
|
{'type_id': '古风言情', 'type_name': '🎬 古风言情'},
|
||||||
|
{'type_id': '传承觉醒', 'type_name': '🎬 传承觉醒'},
|
||||||
|
{'type_id': '现言甜宠', 'type_name': '🎬 现言甜宠'},
|
||||||
|
{'type_id': '奇幻爱情', 'type_name': '🎬 奇幻爱情'},
|
||||||
|
{'type_id': '乡村', 'type_name': '🎬 乡村'},
|
||||||
|
{'type_id': '历史古代', 'type_name': '🎬 历史古代'},
|
||||||
|
{'type_id': '王妃', 'type_name': '🎬 王妃'},
|
||||||
|
{'type_id': '高手下山', 'type_name': '🎬 高手下山'},
|
||||||
|
{'type_id': '娱乐圈', 'type_name': '🎬 娱乐圈'},
|
||||||
|
{'type_id': '强强联合', 'type_name': '🎬 强强联合'},
|
||||||
|
{'type_id': '破镜重圆', 'type_name': '🎬 破镜重圆'},
|
||||||
|
{'type_id': '暗恋成真', 'type_name': '🎬 暗恋成真'},
|
||||||
|
{'type_id': '民国', 'type_name': '🎬 民国'},
|
||||||
|
{'type_id': '欢喜冤家', 'type_name': '🎬 欢喜冤家'},
|
||||||
|
{'type_id': '系统', 'type_name': '🎬 系统'},
|
||||||
|
{'type_id': '真假千金', 'type_name': '🎬 真假千金'},
|
||||||
|
{'type_id': '龙王', 'type_name': '🎬 龙王'},
|
||||||
|
{'type_id': '校园', 'type_name': '🎬 校园'},
|
||||||
|
{'type_id': '穿书', 'type_name': '🎬 穿书'},
|
||||||
|
{'type_id': '女帝', 'type_name': '🎬 女帝'},
|
||||||
|
{'type_id': '团宠', 'type_name': '🎬 团宠'},
|
||||||
|
{'type_id': '年代爱情', 'type_name': '🎬 年代爱情'},
|
||||||
|
{'type_id': '玄幻仙侠', 'type_name': '🎬 玄幻仙侠'},
|
||||||
|
{'type_id': '青梅竹马', 'type_name': '🎬 青梅竹马'},
|
||||||
|
{'type_id': '悬疑推理', 'type_name': '🎬 悬疑推理'},
|
||||||
|
{'type_id': '皇后', 'type_name': '🎬 皇后'},
|
||||||
|
{'type_id': '替身', 'type_name': '🎬 替身'},
|
||||||
|
{'type_id': '大叔', 'type_name': '🎬 大叔'},
|
||||||
|
{'type_id': '喜剧', 'type_name': '🎬 喜剧'},
|
||||||
|
{'type_id': '剧情', 'type_name': '🎬 剧情'}]}
|
||||||
|
return result
|
||||||
|
|
||||||
|
def homeVideoContent(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def categoryContent(self, tid, pg, filter, extend):
|
||||||
|
params = {
|
||||||
|
'classname': tid,
|
||||||
|
'offset': str((int(pg) - 1)),
|
||||||
|
}
|
||||||
|
data = self.fetch(f'{self.ahost}/api/duanju/api.php', params=params, headers=self.headers).json()
|
||||||
|
videos = []
|
||||||
|
for k in data['data']:
|
||||||
|
videos.append({
|
||||||
|
'vod_id': k.get('book_id'),
|
||||||
|
'vod_name': k.get('title'),
|
||||||
|
'vod_pic': k.get('cover'),
|
||||||
|
'vod_year': k.get('score'),
|
||||||
|
'vod_remarks': f"{k.get('sub_title')}|{k.get('episode_cnt')}"
|
||||||
|
})
|
||||||
|
result = {}
|
||||||
|
result['list'] = videos
|
||||||
|
result['page'] = pg
|
||||||
|
result['pagecount'] = 9999
|
||||||
|
result['limit'] = 90
|
||||||
|
result['total'] = 999999
|
||||||
|
return result
|
||||||
|
|
||||||
|
def detailContent(self, ids):
|
||||||
|
v=self.fetch(f'{self.ahost}/api/duanju/api.php', params={'book_id': ids[0]}, headers=self.headers).json()
|
||||||
|
vod = {
|
||||||
|
'type_name': v.get('category'),
|
||||||
|
'vod_year': v.get('time'),
|
||||||
|
'vod_remarks': v.get('duration'),
|
||||||
|
'vod_content': v.get('desc'),
|
||||||
|
'vod_play_from': '嗷呜爱看短剧',
|
||||||
|
'vod_play_url': '#'.join([f"{i['title']}${i['video_id']}" for i in v['data']])
|
||||||
|
}
|
||||||
|
return {'list':[vod]}
|
||||||
|
|
||||||
|
def searchContent(self, key, quick, pg="1"):
|
||||||
|
return self.categoryContent(key, pg, True, {})
|
||||||
|
|
||||||
|
def playerContent(self, flag, id, vipFlags):
|
||||||
|
data=self.fetch(f'{self.ahost}/api/duanju/api.php', params={'video_id': id}, headers=self.headers).json()
|
||||||
|
return {'parse': 0, 'url': data['data']['url'], 'header': self.headers}
|
||||||
|
|
||||||
|
def localProxy(self, param):
|
||||||
|
pass
|
127
PY/html/红果网页.py
Normal file
127
PY/html/红果网页.py
Normal file
@ -0,0 +1,127 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# by @嗷呜
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from pyquery import PyQuery as pq
|
||||||
|
sys.path.append('..')
|
||||||
|
from base.spider import Spider
|
||||||
|
|
||||||
|
class Spider(Spider):
|
||||||
|
|
||||||
|
def init(self, extend=""):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def getName(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def isVideoFormat(self, url):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def manualVideoCheck(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def destroy(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
host='https://www.hongguodj.cc'
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'Accept': '*/*',
|
||||||
|
'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
|
||||||
|
'Cache-Control': 'no-cache',
|
||||||
|
'Connection': 'keep-alive',
|
||||||
|
'DNT': '1',
|
||||||
|
'Origin': host,
|
||||||
|
'Pragma': 'no-cache',
|
||||||
|
'Sec-Fetch-Dest': 'empty',
|
||||||
|
'Sec-Fetch-Mode': 'cors',
|
||||||
|
'Sec-Fetch-Site': 'cross-site',
|
||||||
|
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36',
|
||||||
|
'sec-ch-ua': '"Not/A)Brand";v="8", "Chromium";v="134", "Google Chrome";v="134"',
|
||||||
|
'sec-ch-ua-mobile': '?0',
|
||||||
|
'sec-ch-ua-platform': '"macOS"',
|
||||||
|
}
|
||||||
|
|
||||||
|
def homeContent(self, filter):
|
||||||
|
result = {}
|
||||||
|
classes = []
|
||||||
|
vlist = []
|
||||||
|
data = pq(self.fetch(self.host, headers=self.headers).text)
|
||||||
|
for i in list(data('.slip li').items())[1:]:
|
||||||
|
classes.append({
|
||||||
|
'type_name': i.text(),
|
||||||
|
'type_id': re.findall(r'\d+', i('a').attr('href'))[0]
|
||||||
|
})
|
||||||
|
for i in data('.wrap .rows').items():
|
||||||
|
vlist.extend(self.getlist(i('li')))
|
||||||
|
result['class'] = classes
|
||||||
|
result['list'] = vlist
|
||||||
|
return result
|
||||||
|
|
||||||
|
def homeVideoContent(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def categoryContent(self, tid, pg, filter, extend):
|
||||||
|
data=pq(self.fetch(f'{self.host}/type/{tid}-{pg}.html', headers=self.headers).text)
|
||||||
|
result = {}
|
||||||
|
result['list'] = self.getlist(data('.list ul li'))
|
||||||
|
result['page'] = pg
|
||||||
|
result['pagecount'] = 9999
|
||||||
|
result['limit'] = 90
|
||||||
|
result['total'] = 999999
|
||||||
|
return result
|
||||||
|
|
||||||
|
def detailContent(self, ids):
|
||||||
|
data=pq(self.fetch(f'{self.host}{ids[0]}', headers=self.headers).text)
|
||||||
|
v=data('.info')
|
||||||
|
p=v('p')
|
||||||
|
vod = {
|
||||||
|
'vod_name': v('h1').text(),
|
||||||
|
'type_name': p.eq(2).text(),
|
||||||
|
'vod_year': p.eq(3).text(),
|
||||||
|
'vod_area': p.eq(4).text(),
|
||||||
|
'vod_remarks': v('em').text(),
|
||||||
|
'vod_actor': p.eq(0).text(),
|
||||||
|
'vod_director': p.eq(1).text(),
|
||||||
|
'vod_content': data('#desc .text').text(),
|
||||||
|
'vod_play_from': '',
|
||||||
|
'vod_play_url': ''
|
||||||
|
}
|
||||||
|
names = [i.text() for i in data('.title.slip a').items()]
|
||||||
|
plist=[]
|
||||||
|
for i in data('.play-list ul').items():
|
||||||
|
plist.append('#'.join([f'{j("a").text()}${j("a").attr("href")}' for j in i('li').items()]))
|
||||||
|
vod['vod_play_from'] = '$$$'.join(names)
|
||||||
|
vod['vod_play_url'] = '$$$'.join(plist)
|
||||||
|
return {'list': [vod]}
|
||||||
|
|
||||||
|
def searchContent(self, key, quick, pg="1"):
|
||||||
|
data=pq(self.fetch(f'{self.host}/search/{key}----------{pg}---.html', headers=self.headers).text)
|
||||||
|
return {'list': self.getlist(data('.show.rows li')),'page':pg}
|
||||||
|
|
||||||
|
def playerContent(self, flag, id, vipFlags):
|
||||||
|
p=0
|
||||||
|
uid=f'{self.host}{id}'
|
||||||
|
data=pq(self.fetch(uid, headers=self.headers).text)
|
||||||
|
url=data('.video.ratio').attr('data-play')
|
||||||
|
if not url:
|
||||||
|
url = uid
|
||||||
|
p = 1
|
||||||
|
return {'parse': p, 'url': url, 'header': self.headers}
|
||||||
|
|
||||||
|
def localProxy(self, param):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def getlist(self,data):
|
||||||
|
vlist = []
|
||||||
|
for j in data.items():
|
||||||
|
vlist.append({
|
||||||
|
'vod_id': j('a').attr('href'),
|
||||||
|
'vod_name': j('img').attr('alt'),
|
||||||
|
'vod_pic': self.host + j('img').attr('data-src'),
|
||||||
|
'vod_year': j('.bg').text(),
|
||||||
|
'vod_remarks': j('p').text()
|
||||||
|
})
|
||||||
|
return vlist
|
||||||
|
|
||||||
|
|
147
PY/html/绝对影视.py
Normal file
147
PY/html/绝对影视.py
Normal file
@ -0,0 +1,147 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# by @嗷呜
|
||||||
|
import base64
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from Crypto.Cipher import AES
|
||||||
|
from Crypto.Util.Padding import unpad
|
||||||
|
from pyquery import PyQuery as pq
|
||||||
|
sys.path.append('..')
|
||||||
|
from base.spider import Spider
|
||||||
|
|
||||||
|
|
||||||
|
class Spider(Spider):
|
||||||
|
|
||||||
|
def init(self, extend=""):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def getName(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def isVideoFormat(self, url):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def manualVideoCheck(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def destroy(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
host = 'https://www.jdys.art'
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36',
|
||||||
|
'sec-ch-ua-platform': '"macOS"',
|
||||||
|
'sec-ch-ua': '"Not/A)Brand";v="8", "Chromium";v="134", "Google Chrome";v="134"',
|
||||||
|
'dnt': '1',
|
||||||
|
'sec-ch-ua-mobile': '?0',
|
||||||
|
'origin': host,
|
||||||
|
'sec-fetch-site': 'cross-site',
|
||||||
|
'sec-fetch-mode': 'cors',
|
||||||
|
'sec-fetch-dest': 'empty',
|
||||||
|
'referer': f'{host}/',
|
||||||
|
'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8',
|
||||||
|
'priority': 'u=1, i',
|
||||||
|
}
|
||||||
|
|
||||||
|
def homeContent(self, filter):
|
||||||
|
data = self.getpq(self.fetch(self.host, headers=self.headers).text)
|
||||||
|
result = {}
|
||||||
|
classes = []
|
||||||
|
for k in list(data('.navtop .navlist li').items())[:9]:
|
||||||
|
classes.append({
|
||||||
|
'type_name': k('a').text(),
|
||||||
|
'type_id': k('a').attr('href'),
|
||||||
|
})
|
||||||
|
result['class'] = classes
|
||||||
|
result['list'] = self.getlist(data('.mi_btcon .bt_img ul li'))
|
||||||
|
return result
|
||||||
|
|
||||||
|
def homeVideoContent(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def categoryContent(self, tid, pg, filter, extend):
|
||||||
|
data = self.getpq(self.fetch(f"{tid}{'' if pg == '1' else f'page/{pg}/'}", headers=self.headers).text)
|
||||||
|
result = {}
|
||||||
|
result['list'] = self.getlist(data('.mi_cont .bt_img ul li'))
|
||||||
|
result['page'] = pg
|
||||||
|
result['pagecount'] = 9999
|
||||||
|
result['limit'] = 90
|
||||||
|
result['total'] = 999999
|
||||||
|
return result
|
||||||
|
|
||||||
|
def detailContent(self, ids):
|
||||||
|
data = self.getpq(self.fetch(ids[0], headers=self.headers).text)
|
||||||
|
data2 = data('.moviedteail_list li')
|
||||||
|
vod = {
|
||||||
|
'vod_name': data('.dytext h1').text(),
|
||||||
|
'type_name': data2.eq(0).text(),
|
||||||
|
'vod_year': data2.eq(2).text(),
|
||||||
|
'vod_area': data2.eq(1).text(),
|
||||||
|
'vod_remarks': data2.eq(4).text(),
|
||||||
|
'vod_actor': data2.eq(7).text(),
|
||||||
|
'vod_director': data2.eq(5).text(),
|
||||||
|
'vod_content': data('.yp_context').text().strip()
|
||||||
|
}
|
||||||
|
vdata = data('.paly_list_btn a')
|
||||||
|
play = []
|
||||||
|
for i in vdata.items():
|
||||||
|
a = i.text() + "$" + i.attr.href
|
||||||
|
play.append(a)
|
||||||
|
vod["vod_play_from"] = "在线播放"
|
||||||
|
vod["vod_play_url"] = "#".join(play)
|
||||||
|
result = {"list": [vod]}
|
||||||
|
return result
|
||||||
|
|
||||||
|
def searchContent(self, key, quick, pg="1"):
|
||||||
|
data = self.getpq(self.fetch(f"{self.host}/page/{pg}/?s={key}", headers=self.headers).text)
|
||||||
|
return {'list': self.getlist(data('.mi_cont .bt_img ul li')), 'page': pg}
|
||||||
|
|
||||||
|
def playerContent(self, flag, id, vipFlags):
|
||||||
|
data = self.getpq(self.fetch(id, headers=self.headers).text)
|
||||||
|
try:
|
||||||
|
sc = data('.videoplay script').eq(-1).text()
|
||||||
|
strd = re.findall(r'var\s+[^=]*=\s*"([^"]*)";', sc)
|
||||||
|
kdata = re.findall(r'parse\((.*?)\);', sc)
|
||||||
|
jm = self.aes(strd[0], kdata[0].replace('"', ''), kdata[1].replace('"', ''))
|
||||||
|
url = re.search(r'url: "(.*?)"', jm).group(1)
|
||||||
|
p = 0
|
||||||
|
except:
|
||||||
|
p = 1
|
||||||
|
url = id
|
||||||
|
result = {}
|
||||||
|
result["parse"] = p
|
||||||
|
result["url"] = url
|
||||||
|
result["header"] = self.headers
|
||||||
|
return result
|
||||||
|
|
||||||
|
def localProxy(self, param):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def getpq(self, text):
|
||||||
|
try:
|
||||||
|
return pq(text)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"{str(e)}")
|
||||||
|
return pq(text.encode('utf-8'))
|
||||||
|
|
||||||
|
def getlist(self, data):
|
||||||
|
videos = []
|
||||||
|
for i in data.items():
|
||||||
|
videos.append({
|
||||||
|
'vod_id': i('a').attr('href'),
|
||||||
|
'vod_name': i('a img').attr('alt'),
|
||||||
|
'vod_pic': i('a img').attr('src'),
|
||||||
|
'vod_remarks': i('.dycategory').text(),
|
||||||
|
'vod_year': i('.dyplayinfo').text() or i('.rating').text(),
|
||||||
|
})
|
||||||
|
return videos
|
||||||
|
|
||||||
|
def aes(self, word, key, iv):
|
||||||
|
key = key.encode('utf-8')
|
||||||
|
iv = iv.encode('utf-8')
|
||||||
|
encrypted_data = base64.b64decode(word)
|
||||||
|
cipher = AES.new(key, AES.MODE_CBC, iv)
|
||||||
|
decrypted_data = cipher.decrypt(encrypted_data)
|
||||||
|
decrypted_data = unpad(decrypted_data, AES.block_size)
|
||||||
|
return decrypted_data.decode('utf-8')
|
210
PY/html/金牌.py
Normal file
210
PY/html/金牌.py
Normal file
@ -0,0 +1,210 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# by @嗷呜
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
import threading
|
||||||
|
import uuid
|
||||||
|
import requests
|
||||||
|
sys.path.append('..')
|
||||||
|
from base.spider import Spider
|
||||||
|
import time
|
||||||
|
from Crypto.Hash import MD5, SHA1
|
||||||
|
|
||||||
|
class Spider(Spider):
|
||||||
|
|
||||||
|
def init(self, extend=""):
|
||||||
|
if extend:
|
||||||
|
hosts=json.loads(extend)['site']
|
||||||
|
self.host = self.host_late(hosts)
|
||||||
|
pass
|
||||||
|
|
||||||
|
def getName(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def isVideoFormat(self, url):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def manualVideoCheck(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def destroy(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def homeContent(self, filter):
|
||||||
|
cdata = self.fetch(f"{self.host}/api/mw-movie/anonymous/get/filer/type", headers=self.getheaders()).json()
|
||||||
|
fdata = self.fetch(f"{self.host}/api/mw-movie/anonymous/v1/get/filer/list", headers=self.getheaders()).json()
|
||||||
|
result = {}
|
||||||
|
classes = []
|
||||||
|
filters={}
|
||||||
|
for k in cdata['data']:
|
||||||
|
classes.append({
|
||||||
|
'type_name': k['typeName'],
|
||||||
|
'type_id': str(k['typeId']),
|
||||||
|
})
|
||||||
|
sort_values = [{"n": "最近更新", "v": "2"},{"n": "人气高低", "v": "3"}, {"n": "评分高低", "v": "4"}]
|
||||||
|
for tid, d in fdata['data'].items():
|
||||||
|
current_sort_values = sort_values.copy()
|
||||||
|
if tid == '1':
|
||||||
|
del current_sort_values[0]
|
||||||
|
filters[tid] = [
|
||||||
|
{"key": "type", "name": "类型",
|
||||||
|
"value": [{"n": i["itemText"], "v": i["itemValue"]} for i in d["typeList"]]},
|
||||||
|
|
||||||
|
*([] if not d["plotList"] else [{"key": "v_class", "name": "剧情",
|
||||||
|
"value": [{"n": i["itemText"], "v": i["itemText"]}
|
||||||
|
for i in d["plotList"]]}]),
|
||||||
|
|
||||||
|
{"key": "area", "name": "地区",
|
||||||
|
"value": [{"n": i["itemText"], "v": i["itemText"]} for i in d["districtList"]]},
|
||||||
|
|
||||||
|
{"key": "year", "name": "年份",
|
||||||
|
"value": [{"n": i["itemText"], "v": i["itemText"]} for i in d["yearList"]]},
|
||||||
|
|
||||||
|
{"key": "lang", "name": "语言",
|
||||||
|
"value": [{"n": i["itemText"], "v": i["itemText"]} for i in d["languageList"]]},
|
||||||
|
|
||||||
|
{"key": "sort", "name": "排序", "value": current_sort_values}
|
||||||
|
]
|
||||||
|
result['class'] = classes
|
||||||
|
result['filters'] = filters
|
||||||
|
return result
|
||||||
|
|
||||||
|
def homeVideoContent(self):
|
||||||
|
data1 = self.fetch(f"{self.host}/api/mw-movie/anonymous/v1/home/all/list", headers=self.getheaders()).json()
|
||||||
|
data2=self.fetch(f"{self.host}/api/mw-movie/anonymous/home/hotSearch",headers=self.getheaders()).json()
|
||||||
|
data=[]
|
||||||
|
for i in data1['data'].values():
|
||||||
|
data.extend(i['list'])
|
||||||
|
data.extend(data2['data'])
|
||||||
|
vods=self.getvod(data)
|
||||||
|
return {'list':vods}
|
||||||
|
|
||||||
|
def categoryContent(self, tid, pg, filter, extend):
|
||||||
|
|
||||||
|
params = {
|
||||||
|
"area": extend.get('area', ''),
|
||||||
|
"filterStatus": "1",
|
||||||
|
"lang": extend.get('lang', ''),
|
||||||
|
"pageNum": pg,
|
||||||
|
"pageSize": "30",
|
||||||
|
"sort": extend.get('sort', '1'),
|
||||||
|
"sortBy": "1",
|
||||||
|
"type": extend.get('type', ''),
|
||||||
|
"type1": tid,
|
||||||
|
"v_class": extend.get('v_class', ''),
|
||||||
|
"year": extend.get('year', '')
|
||||||
|
}
|
||||||
|
data = self.fetch(f"{self.host}/api/mw-movie/anonymous/video/list?{self.js(params)}", headers=self.getheaders(params)).json()
|
||||||
|
result = {}
|
||||||
|
result['list'] = self.getvod(data['data']['list'])
|
||||||
|
result['page'] = pg
|
||||||
|
result['pagecount'] = 9999
|
||||||
|
result['limit'] = 90
|
||||||
|
result['total'] = 999999
|
||||||
|
return result
|
||||||
|
|
||||||
|
def detailContent(self, ids):
|
||||||
|
data=self.fetch(f"{self.host}/api/mw-movie/anonymous/video/detail?id={ids[0]}",headers=self.getheaders({'id':ids[0]})).json()
|
||||||
|
vod=self.getvod([data['data']])[0]
|
||||||
|
vod['vod_play_from']='雷蒙影视'
|
||||||
|
vod['vod_play_url'] = '#'.join(
|
||||||
|
f"{i['name'] if len(vod['episodelist']) > 1 else vod['vod_name']}${ids[0]}@@{i['nid']}" for i in
|
||||||
|
vod['episodelist'])
|
||||||
|
vod.pop('episodelist', None)
|
||||||
|
return {'list':[vod]}
|
||||||
|
|
||||||
|
def searchContent(self, key, quick, pg="1"):
|
||||||
|
params = {
|
||||||
|
"keyword": key,
|
||||||
|
"pageNum": pg,
|
||||||
|
"pageSize": "8",
|
||||||
|
"sourceCode": "1"
|
||||||
|
}
|
||||||
|
data=self.fetch(f"{self.host}/api/mw-movie/anonymous/video/searchByWord?{self.js(params)}",headers=self.getheaders(params)).json()
|
||||||
|
vods=self.getvod(data['data']['result']['list'])
|
||||||
|
return {'list':vods,'page':pg}
|
||||||
|
|
||||||
|
def playerContent(self, flag, id, vipFlags):
|
||||||
|
self.header = {
|
||||||
|
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; ) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.6478.61 Chrome/126.0.6478.61 Not/A)Brand/8 Safari/537.36',
|
||||||
|
'sec-ch-ua-platform': '"Windows"',
|
||||||
|
'DNT': '1',
|
||||||
|
'sec-ch-ua': '"Not/A)Brand";v="8", "Chromium";v="126", "Google Chrome";v="126"',
|
||||||
|
'sec-ch-ua-mobile': '?0',
|
||||||
|
'Origin': self.host,
|
||||||
|
'Referer': f'{self.host}/'
|
||||||
|
}
|
||||||
|
ids=id.split('@@')
|
||||||
|
pdata = self.fetch(f"{self.host}/api/mw-movie/anonymous/v2/video/episode/url?clientType=1&id={ids[0]}&nid={ids[1]}",headers=self.getheaders({'clientType':'1','id': ids[0], 'nid': ids[1]})).json()
|
||||||
|
vlist=[]
|
||||||
|
for i in pdata['data']['list']:vlist.extend([i['resolutionName'],i['url']])
|
||||||
|
return {'parse':0,'url':vlist,'header':self.header}
|
||||||
|
|
||||||
|
def localProxy(self, param):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def host_late(self, url_list):
|
||||||
|
if isinstance(url_list, str):
|
||||||
|
urls = [u.strip() for u in url_list.split(',')]
|
||||||
|
else:
|
||||||
|
urls = url_list
|
||||||
|
if len(urls) <= 1:
|
||||||
|
return urls[0] if urls else ''
|
||||||
|
|
||||||
|
results = {}
|
||||||
|
threads = []
|
||||||
|
|
||||||
|
def test_host(url):
|
||||||
|
try:
|
||||||
|
start_time = time.time()
|
||||||
|
response = requests.head(url, timeout=1.0, allow_redirects=False)
|
||||||
|
delay = (time.time() - start_time) * 1000
|
||||||
|
results[url] = delay
|
||||||
|
except Exception as e:
|
||||||
|
results[url] = float('inf')
|
||||||
|
for url in urls:
|
||||||
|
t = threading.Thread(target=test_host, args=(url,))
|
||||||
|
threads.append(t)
|
||||||
|
t.start()
|
||||||
|
for t in threads:
|
||||||
|
t.join()
|
||||||
|
return min(results.items(), key=lambda x: x[1])[0]
|
||||||
|
|
||||||
|
def md5(self, sign_key):
|
||||||
|
md5_hash = MD5.new()
|
||||||
|
md5_hash.update(sign_key.encode('utf-8'))
|
||||||
|
md5_result = md5_hash.hexdigest()
|
||||||
|
return md5_result
|
||||||
|
|
||||||
|
def js(self, param):
|
||||||
|
return '&'.join(f"{k}={v}" for k, v in param.items())
|
||||||
|
|
||||||
|
def getheaders(self, param=None):
|
||||||
|
if param is None:param = {}
|
||||||
|
t=str(int(time.time()*1000))
|
||||||
|
param['key']='cb808529bae6b6be45ecfab29a4889bc'
|
||||||
|
param['t']=t
|
||||||
|
sha1_hash = SHA1.new()
|
||||||
|
sha1_hash.update(self.md5(self.js(param)).encode('utf-8'))
|
||||||
|
sign = sha1_hash.hexdigest()
|
||||||
|
deviceid = str(uuid.uuid4())
|
||||||
|
headers = {
|
||||||
|
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; ) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.6478.61 Chrome/126.0.6478.61 Not/A)Brand/8 Safari/537.36',
|
||||||
|
'Accept': 'application/json, text/plain, */*',
|
||||||
|
'sign': sign,
|
||||||
|
't': t,
|
||||||
|
'deviceid':deviceid
|
||||||
|
}
|
||||||
|
return headers
|
||||||
|
|
||||||
|
def convert_field_name(self, field):
|
||||||
|
field = field.lower()
|
||||||
|
if field.startswith('vod') and len(field) > 3:
|
||||||
|
field = field.replace('vod', 'vod_')
|
||||||
|
if field.startswith('type') and len(field) > 4:
|
||||||
|
field = field.replace('type', 'type_')
|
||||||
|
return field
|
||||||
|
|
||||||
|
def getvod(self, array):
|
||||||
|
return [{self.convert_field_name(k): v for k, v in item.items()} for item in array]
|
||||||
|
|
Loading…
Reference in New Issue
Block a user