mirror of
https://github.com/lalifeier/IPTV.git
synced 2025-12-17 15:26:16 +08:00
init
This commit is contained in:
0
iptv/__init__.py
Normal file
0
iptv/__init__.py
Normal file
143
iptv/base.py
Normal file
143
iptv/base.py
Normal file
@@ -0,0 +1,143 @@
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from typing import List, Optional
|
||||
|
||||
import aiohttp
|
||||
import cv2
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from iptv.config import IP_DIR, OUTPUT_DIR
|
||||
from iptv.playwright import get_playwright
|
||||
|
||||
|
||||
class Base:
|
||||
def __init__(self):
|
||||
self.ip_dir = IP_DIR
|
||||
self.output_dir = OUTPUT_DIR
|
||||
|
||||
def sniff_ip(self):
|
||||
pass
|
||||
|
||||
def generate_playlist(self):
|
||||
pass
|
||||
|
||||
def save_ip(self, isp: str, region: str, ip: List[str]):
|
||||
if not ip:
|
||||
logging.warning(f"No validated IPs to save for {region}.")
|
||||
return
|
||||
|
||||
output_dir = os.path.join(self.ip_dir, isp)
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
output_path = os.path.join(output_dir, f"{region}.txt")
|
||||
|
||||
existing_ips = set()
|
||||
if os.path.exists(output_path):
|
||||
with open(output_path, "r", encoding="utf-8") as file:
|
||||
existing_ips = set(file.read().splitlines())
|
||||
|
||||
all_ips = sorted(existing_ips.union(ip))
|
||||
with open(output_path, "w", encoding="utf-8") as file:
|
||||
file.write("\n".join(all_ips))
|
||||
logging.info(f"Saved IPs to: {output_path}")
|
||||
|
||||
def get_ip(self, isp: str, region: str) -> list:
|
||||
ip_file_path = os.path.join(self.ip_dir, isp, f"{region}.txt")
|
||||
|
||||
if not os.path.exists(ip_file_path):
|
||||
logging.warning(f"IP file not found: {ip_file_path}. Skipping...")
|
||||
return []
|
||||
|
||||
with open(ip_file_path, "r", encoding="utf-8") as f:
|
||||
ip = f.read().splitlines()
|
||||
|
||||
return ip
|
||||
|
||||
async def fetch_page_content(self, url: str) -> Optional[str]:
|
||||
logging.info(f"Fetching content from {url}")
|
||||
playwright = await get_playwright()
|
||||
browser = await playwright.chromium.launch(headless=True)
|
||||
context = await browser.new_context()
|
||||
await context.add_init_script("Object.defineProperties(navigator, {webdriver:{get:()=>false}});")
|
||||
page = await context.new_page()
|
||||
|
||||
try:
|
||||
await page.goto(url)
|
||||
await page.wait_for_load_state("domcontentloaded")
|
||||
|
||||
# await page.locator('//span[@class="hsxa-host"]/a').first.wait_for()
|
||||
|
||||
content = await page.content()
|
||||
logging.info(f"Finished fetching content from {url}")
|
||||
return content
|
||||
except Exception as e:
|
||||
logging.error(f"Error fetching page content from {url}: {e}")
|
||||
return None
|
||||
finally:
|
||||
await browser.close()
|
||||
|
||||
async def extract_ip_from_content(self, content: str) -> List[str]:
|
||||
soup = BeautifulSoup(content, 'html.parser')
|
||||
elements = soup.select('span.hsxa-host > a')
|
||||
values = [element.text for element in elements]
|
||||
|
||||
def remove_protocol(url: str) -> str:
|
||||
return re.sub(r'^https?://', '', url)
|
||||
|
||||
values = set(remove_protocol(element.get('href', '')) for element in elements)
|
||||
return list(values)
|
||||
|
||||
async def is_url_accessible(self, url: str) -> bool:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
try:
|
||||
logging.info(f"Checking accessibility for URL: {url}")
|
||||
async with session.get(url, timeout=aiohttp.ClientTimeout(total=5)) as response:
|
||||
if response.status == 200:
|
||||
logging.info(f"URL {url} is accessible. Status code: {response.status}")
|
||||
return True
|
||||
else:
|
||||
logging.warning(f"URL {url} is not accessible. Status code: {response.status}")
|
||||
return False
|
||||
except Exception as e:
|
||||
logging.error(f"Error while checking URL {url}: {e}")
|
||||
return False
|
||||
|
||||
def is_video_stream_valid(self, url: str) -> bool:
|
||||
logging.info(f"Checking video URL: {url}")
|
||||
|
||||
try:
|
||||
cap = cv2.VideoCapture(url)
|
||||
if cap.isOpened():
|
||||
width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
|
||||
height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
|
||||
if width > 0 and height > 0:
|
||||
logging.info(f"Valid video stream found (width={width}, height={height}) at {url}")
|
||||
return True
|
||||
else:
|
||||
logging.info(f"Invalid video stream (width={width}, height={height}) at {url}")
|
||||
else:
|
||||
logging.info(f"Failed to open video stream at {url}")
|
||||
return False
|
||||
except cv2.error as e:
|
||||
logging.error(f"Error checking video stream {url}: {e}")
|
||||
return False
|
||||
finally:
|
||||
cap.release()
|
||||
|
||||
def merge_playlist(self, output_dir: str, merged_file_path: str):
|
||||
try:
|
||||
with open(merged_file_path, "w", encoding="utf-8") as outfile:
|
||||
for subdir in os.listdir(output_dir):
|
||||
subdir_path = os.path.join(output_dir, subdir)
|
||||
if not os.path.isdir(subdir_path):
|
||||
continue
|
||||
logging.info(f"Processing directory: {subdir_path}")
|
||||
for filename in os.listdir(subdir_path):
|
||||
file_path = os.path.join(subdir_path, filename)
|
||||
if os.path.isfile(file_path):
|
||||
logging.info(f"Reading file: {file_path}")
|
||||
with open(file_path, "r", encoding="utf-8") as infile:
|
||||
outfile.write(infile.read() + "\n")
|
||||
logging.info(f"All files merged into {merged_file_path}")
|
||||
except Exception as e:
|
||||
logging.error(f"Error merging files: {e}")
|
||||
44
iptv/config.py
Normal file
44
iptv/config.py
Normal file
@@ -0,0 +1,44 @@
|
||||
RTP_DIR = "rtp"
|
||||
IP_DIR = "ip"
|
||||
OUTPUT_DIR = "txt"
|
||||
|
||||
|
||||
ISP_DICT = {
|
||||
"中国电信": "Chinanet",
|
||||
"中国联通": "CHINA UNICOM China169 Backbone",
|
||||
# "中国移动": "China Mobile Communications Corporation",
|
||||
}
|
||||
|
||||
REGION_LIST = [
|
||||
"北京",
|
||||
"天津",
|
||||
"河北",
|
||||
"山西",
|
||||
"内蒙古",
|
||||
"辽宁",
|
||||
"吉林",
|
||||
"黑龙江",
|
||||
"上海",
|
||||
"江苏",
|
||||
"浙江",
|
||||
"安徽",
|
||||
"福建",
|
||||
"江西",
|
||||
"山东",
|
||||
"河南",
|
||||
"湖北",
|
||||
"湖南",
|
||||
"广东",
|
||||
"广西",
|
||||
"海南",
|
||||
"重庆",
|
||||
"四川",
|
||||
"贵州",
|
||||
"云南",
|
||||
"西藏",
|
||||
"陕西",
|
||||
"甘肃",
|
||||
"青海",
|
||||
"宁夏",
|
||||
"新疆",
|
||||
]
|
||||
264
iptv/hotel.py
Normal file
264
iptv/hotel.py
Normal file
@@ -0,0 +1,264 @@
|
||||
import asyncio
|
||||
import base64
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
from typing import List
|
||||
|
||||
import aiohttp
|
||||
from pypinyin import lazy_pinyin
|
||||
|
||||
from iptv.base import Base
|
||||
from iptv.config import IP_DIR, ISP_DICT, OUTPUT_DIR, REGION_LIST
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def is_province(region):
|
||||
return region in REGION_LIST
|
||||
|
||||
|
||||
def clean_name(name):
|
||||
# 清洗名称的函数,根据需要自行添加清洗规则
|
||||
name = name.replace("中央", "CCTV")
|
||||
name = name.replace("高清", "")
|
||||
name = name.replace("超清", "")
|
||||
name = name.replace("HD", "")
|
||||
name = name.replace("标清", "")
|
||||
name = name.replace("超高", "")
|
||||
name = name.replace("频道", "")
|
||||
name = name.replace("-", "")
|
||||
name = name.replace(" ", "")
|
||||
name = name.replace("PLUS", "+")
|
||||
name = name.replace("+", "+")
|
||||
name = name.replace("(", "")
|
||||
name = name.replace(")", "")
|
||||
name = name.replace("(", "")
|
||||
name = name.replace(")", "")
|
||||
name = name.replace("L", "")
|
||||
name = name.replace("CMIPTV", "")
|
||||
name = name.replace("cctv", "CCTV")
|
||||
name = re.sub(r"CCTV(\d+)台", r"CCTV\1", name)
|
||||
name = name.replace("CCTV1综合", "CCTV1")
|
||||
name = name.replace("CCTV2财经", "CCTV2")
|
||||
name = name.replace("CCTV3综艺", "CCTV3")
|
||||
name = name.replace("CCTV4国际", "CCTV4")
|
||||
name = name.replace("CCTV4中文国际", "CCTV4")
|
||||
name = name.replace("CCTV4欧洲", "CCTV4")
|
||||
name = name.replace("CCTV5体育", "CCTV5")
|
||||
name = name.replace("CCTV5+体育", "CCTV5+")
|
||||
name = name.replace("CCTV6电影", "CCTV6")
|
||||
name = name.replace("CCTV7军事", "CCTV7")
|
||||
name = name.replace("CCTV7军农", "CCTV7")
|
||||
name = name.replace("CCTV7农业", "CCTV7")
|
||||
name = name.replace("CCTV7国防军事", "CCTV7")
|
||||
name = name.replace("CCTV8电视剧", "CCTV8")
|
||||
name = name.replace("CCTV8纪录", "CCTV9")
|
||||
name = name.replace("CCTV9记录", "CCTV9")
|
||||
name = name.replace("CCTV9纪录", "CCTV9")
|
||||
name = name.replace("CCTV10科教", "CCTV10")
|
||||
name = name.replace("CCTV11戏曲", "CCTV11")
|
||||
name = name.replace("CCTV12社会与法", "CCTV12")
|
||||
name = name.replace("CCTV13新闻", "CCTV13")
|
||||
name = name.replace("CCTV新闻", "CCTV13")
|
||||
name = name.replace("CCTV14少儿", "CCTV14")
|
||||
name = name.replace("央视14少儿", "CCTV14")
|
||||
name = name.replace("CCTV少儿超", "CCTV14")
|
||||
name = name.replace("CCTV15音乐", "CCTV15")
|
||||
name = name.replace("CCTV音乐", "CCTV15")
|
||||
name = name.replace("CCTV16奥林匹克", "CCTV16")
|
||||
name = name.replace("CCTV17农业农村", "CCTV17")
|
||||
name = name.replace("CCTV17军农", "CCTV17")
|
||||
name = name.replace("CCTV17农业", "CCTV17")
|
||||
name = name.replace("CCTV5+体育赛视", "CCTV5+")
|
||||
name = name.replace("CCTV5+赛视", "CCTV5+")
|
||||
name = name.replace("CCTV5+体育赛事", "CCTV5+")
|
||||
name = name.replace("CCTV5+赛事", "CCTV5+")
|
||||
name = name.replace("CCTV5+体育", "CCTV5+")
|
||||
name = name.replace("CCTV5赛事", "CCTV5+")
|
||||
name = name.replace("凤凰中文台", "凤凰中文")
|
||||
name = name.replace("凤凰资讯台", "凤凰资讯")
|
||||
name = name.replace("CCTV4K测试)", "CCTV4")
|
||||
name = name.replace("CCTV164K", "CCTV16")
|
||||
name = name.replace("上海东方卫视", "上海卫视")
|
||||
name = name.replace("东方卫视", "上海卫视")
|
||||
name = name.replace("内蒙卫视", "内蒙古卫视")
|
||||
name = name.replace("福建东南卫视", "东南卫视")
|
||||
name = name.replace("广东南方卫视", "南方卫视")
|
||||
name = name.replace("金鹰卡通卫视", "金鹰卡通")
|
||||
name = name.replace("湖南金鹰卡通", "金鹰卡通")
|
||||
name = name.replace("炫动卡通", "哈哈炫动")
|
||||
name = name.replace("卡酷卡通", "卡酷少儿")
|
||||
name = name.replace("卡酷动画", "卡酷少儿")
|
||||
name = name.replace("BRTVKAKU少儿", "卡酷少儿")
|
||||
name = name.replace("优曼卡通", "优漫卡通")
|
||||
name = name.replace("优曼卡通", "优漫卡通")
|
||||
name = name.replace("嘉佳卡通", "佳嘉卡通")
|
||||
name = name.replace("世界地理", "地理世界")
|
||||
name = name.replace("CCTV世界地理", "地理世界")
|
||||
name = name.replace("BTV北京卫视", "北京卫视")
|
||||
name = name.replace("BTV冬奥纪实", "冬奥纪实")
|
||||
name = name.replace("东奥纪实", "冬奥纪实")
|
||||
name = name.replace("卫视台", "卫视")
|
||||
name = name.replace("湖南电视台", "湖南卫视")
|
||||
name = name.replace("2金鹰卡通", "金鹰卡通")
|
||||
name = name.replace("湖南教育台", "湖南教育")
|
||||
name = name.replace("湖南金鹰纪实", "金鹰纪实")
|
||||
name = name.replace("少儿科教", "少儿")
|
||||
name = name.replace("影视剧", "影视")
|
||||
return name
|
||||
|
||||
|
||||
class Hotel(Base):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.ip_dir = os.path.join(IP_DIR, "hotel")
|
||||
self.output_dir = os.path.join(OUTPUT_DIR, "hotel")
|
||||
|
||||
def generate_search_url(self, region, isp_name, org_name):
|
||||
pinyin_name = "".join(lazy_pinyin(region, errors=lambda x: x))
|
||||
if is_province(region):
|
||||
search_txt = f'"iptv/live/zh_cn.js" && country="CN" && region="{pinyin_name}" && org="{org_name}"'
|
||||
elif not is_province(region):
|
||||
search_txt = f'"iptv/live/zh_cn.js" && country="CN" && city="{pinyin_name}" && org="{org_name}"'
|
||||
else:
|
||||
return None
|
||||
|
||||
bytes_string = search_txt.encode("utf-8")
|
||||
encoded_search_txt = base64.b64encode(bytes_string).decode("utf-8")
|
||||
return f"https://fofa.info/result?qbase64={encoded_search_txt}"
|
||||
|
||||
async def validate_ip(self, ip: List[str]) -> List[str]:
|
||||
if not ip:
|
||||
logging.warning("No valid IPs to validate.")
|
||||
return []
|
||||
|
||||
validated_ip = []
|
||||
|
||||
tasks = [
|
||||
self.is_url_accessible(f"http://{ip_address}/iptv/live/1000.json?key=txiptv")
|
||||
for ip_address in ip
|
||||
]
|
||||
|
||||
for ip_address, valid in zip(ip, await asyncio.gather(*tasks)):
|
||||
if valid:
|
||||
validated_ip.append(ip_address)
|
||||
|
||||
logging.info(f"Validated {len(ip)} IPs. Found {len(validated_ip)} valid IPs.")
|
||||
return validated_ip
|
||||
|
||||
async def sniff_ip(self):
|
||||
for region in REGION_LIST:
|
||||
for isp_name, org_name in ISP_DICT.items():
|
||||
url = self.generate_search_url(region, isp_name, org_name)
|
||||
content = await self.fetch_page_content(url)
|
||||
|
||||
if not content:
|
||||
logging.warning(f"Empty content for region {region}. Skipping...")
|
||||
continue
|
||||
|
||||
ip = await self.extract_ip_from_content(content)
|
||||
|
||||
# ip_ports = set()
|
||||
# for ip_port in ip:
|
||||
# ip_address, port = ip_port.split(":")
|
||||
# for i in range(1, 256): # 第四位从1到255
|
||||
# ip_ports.add(f"{ip_address.rsplit('.', 1)[0]}.{i}:{port}")
|
||||
|
||||
validated_ips = await self.validate_ip(ip)
|
||||
|
||||
self.save_ip(isp_name, region, validated_ips)
|
||||
|
||||
async def _generate_playlist(self, ips) -> str:
|
||||
if not ips:
|
||||
return ""
|
||||
|
||||
ip_playlists = {}
|
||||
|
||||
async with aiohttp.ClientSession() as session:
|
||||
for ip in ips:
|
||||
url = f"http://{ip}/iptv/live/1000.json?key=txiptv"
|
||||
try:
|
||||
async with session.get(url, timeout=3) as response:
|
||||
if response.status == 200:
|
||||
json_data = await response.json()
|
||||
programs = []
|
||||
|
||||
for item in json_data.get("data", []):
|
||||
if isinstance(item, dict):
|
||||
name = item.get("name", "")
|
||||
chid = str(item.get("chid")).zfill(4)
|
||||
srcid = item.get("srcid")
|
||||
if name and chid and srcid:
|
||||
name = clean_name(name)
|
||||
m3u8_url = f"http://{ip}/tsfile/live/{chid}_{srcid}.m3u8"
|
||||
programs.append((name, m3u8_url))
|
||||
|
||||
ip_playlists[ip] = programs
|
||||
logging.info(f"Processed {len(programs)} programs from IP {ip}")
|
||||
|
||||
except aiohttp.ClientError as e:
|
||||
logging.error(f"Failed to fetch data from {url}. Error: {e}")
|
||||
except json.JSONDecodeError as e:
|
||||
logging.error(f"Failed to parse JSON from {url}. Error: {e}")
|
||||
except Exception as e:
|
||||
logging.error(f"Unexpected error occurred for URL {url}. Error: {e}")
|
||||
|
||||
if not ip_playlists:
|
||||
return ""
|
||||
|
||||
async def check_random_urls(urls):
|
||||
for url in urls:
|
||||
if self.is_video_stream_valid(url):
|
||||
return True
|
||||
return False
|
||||
|
||||
best_ip = None
|
||||
best_count = 0
|
||||
|
||||
for ip, programs in ip_playlists.items():
|
||||
sampled_urls = [url for _, url in random.sample(programs, min(len(programs), 3))]
|
||||
|
||||
if await check_random_urls(sampled_urls):
|
||||
if len(programs) > best_count:
|
||||
best_ip = ip
|
||||
best_count = len(programs)
|
||||
|
||||
if best_ip:
|
||||
best_playlist = "\n".join(f"{name},{url}" for name, url in ip_playlists[best_ip])
|
||||
return best_playlist
|
||||
|
||||
return ""
|
||||
|
||||
async def generate_playlist(self):
|
||||
# if os.path.exists(self.output_dir):
|
||||
# shutil.rmtree(self.output_dir)
|
||||
|
||||
os.makedirs(self.output_dir, exist_ok=True)
|
||||
for region in REGION_LIST:
|
||||
for isp_name, org_name in ISP_DICT.items():
|
||||
ip = self.get_ip(isp_name, region)
|
||||
|
||||
if not ip:
|
||||
logging.warning(f"No IP available for {region}. Skipping...")
|
||||
continue
|
||||
|
||||
playlists = await self._generate_playlist(ip)
|
||||
|
||||
if not playlists:
|
||||
continue
|
||||
|
||||
output_dir = os.path.join(self.output_dir, isp_name)
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
|
||||
output_path = os.path.join(output_dir, f"{region}.txt")
|
||||
with open(output_path, "w", encoding="utf-8") as f:
|
||||
f.write(f"{isp_name}-{region}-酒店,#genre#\n")
|
||||
|
||||
f.write(playlists)
|
||||
|
||||
logging.info(f"Created playlist file: {output_path}")
|
||||
|
||||
self.merge_playlist(self.output_dir, os.path.join(self.output_dir, "全国.txt"))
|
||||
17
iptv/playwright.py
Normal file
17
iptv/playwright.py
Normal file
@@ -0,0 +1,17 @@
|
||||
import logging
|
||||
|
||||
from playwright.async_api import Playwright, async_playwright
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
playwright: Playwright = None
|
||||
|
||||
async def get_playwright():
|
||||
global playwright
|
||||
if playwright is None:
|
||||
try:
|
||||
playwright = await async_playwright().start()
|
||||
except Exception as e:
|
||||
logging.error(f"Error starting Playwright: {e}")
|
||||
raise
|
||||
return playwright
|
||||
307
iptv/udpxy.py
Normal file
307
iptv/udpxy.py
Normal file
@@ -0,0 +1,307 @@
|
||||
import asyncio
|
||||
import base64
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from typing import List, Optional
|
||||
|
||||
import aiohttp
|
||||
from bs4 import BeautifulSoup
|
||||
from pypinyin import lazy_pinyin
|
||||
|
||||
from iptv.base import Base
|
||||
from iptv.config import IP_DIR, ISP_DICT, OUTPUT_DIR, REGION_LIST, RTP_DIR
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class UDPxy(Base):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.ip_dir = os.path.join(IP_DIR, "udpxy")
|
||||
self.output_dir = os.path.join(OUTPUT_DIR, "udpxy")
|
||||
|
||||
def extract_mcast_from_file(self, file_path: str) -> Optional[str]:
|
||||
logging.info(f"Extracting mcast from file: {file_path}")
|
||||
|
||||
try:
|
||||
with open(file_path, "r", encoding="utf-8") as f:
|
||||
file_content = f.read()
|
||||
rtp_match = re.search(
|
||||
r"rtp://(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d+)", file_content
|
||||
)
|
||||
mcast = rtp_match.group(1) if rtp_match else None
|
||||
|
||||
except FileNotFoundError:
|
||||
logging.warning(f"File not found: {file_path}")
|
||||
return None
|
||||
|
||||
if mcast:
|
||||
logging.info(f"Found mcast: {mcast}")
|
||||
else:
|
||||
logging.warning(f"No mcast found in file: {file_path}")
|
||||
return mcast
|
||||
|
||||
def generate_search_url(self, region: str, org_name: str) -> str:
|
||||
pinyin_name = "".join(lazy_pinyin(region, errors=lambda x: x))
|
||||
search_txt = (
|
||||
f'"udpxy" && country="CN" && region="{pinyin_name}" && org="{org_name}"'
|
||||
)
|
||||
# search_txt = f'"udpxy" && country="CN" && region="{pinyin_name}" && org="{org_name}" && is_domain=true'
|
||||
encoded_search_txt = base64.b64encode(search_txt.encode("utf-8")).decode(
|
||||
"utf-8"
|
||||
)
|
||||
return f"https://fofa.info/result?qbase64={encoded_search_txt}"
|
||||
|
||||
async def validate_ip(self, ip: List[str], isp, region) -> List[str]:
|
||||
if not ip:
|
||||
logging.warning("No valid IPs to validate.")
|
||||
return []
|
||||
|
||||
mcast = self.extract_mcast(isp, region)
|
||||
if not mcast:
|
||||
logging.warning(f"No rtp:// URL found in {isp} {region}. Skipping...")
|
||||
return []
|
||||
|
||||
validated_ip = []
|
||||
|
||||
async def validate_single_ip(ip_address: str) -> bool:
|
||||
url_status = f"http://{ip_address}/status"
|
||||
url_video = f"http://{ip_address}/rtp/{mcast}"
|
||||
return await self.is_url_accessible(
|
||||
url_status
|
||||
) and self.is_video_stream_valid(url_video)
|
||||
|
||||
tasks = [validate_single_ip(ip_address) for ip_address in ip]
|
||||
|
||||
for ip_address, valid in zip(ip, await asyncio.gather(*tasks)):
|
||||
if valid:
|
||||
validated_ip.append(ip_address)
|
||||
|
||||
logging.info(f"Validated {len(ip)} IPs. Found {len(validated_ip)} valid IPs.")
|
||||
return validated_ip
|
||||
|
||||
def extract_mcast(self, isp, region):
|
||||
isp_dir = os.path.join(RTP_DIR, isp)
|
||||
file_path = os.path.join(isp_dir, f"{region}.txt")
|
||||
|
||||
return self.extract_mcast_from_file(file_path)
|
||||
|
||||
async def sniff_ip(self):
|
||||
for isp in os.listdir(RTP_DIR):
|
||||
isp_dir = os.path.join(RTP_DIR, isp)
|
||||
if not os.path.isdir(isp_dir):
|
||||
continue
|
||||
|
||||
if isp not in ISP_DICT:
|
||||
logging.warning(f"Unknown ISP '{isp}'. Skipping...")
|
||||
continue
|
||||
|
||||
org_name = ISP_DICT[isp]
|
||||
|
||||
for filename in os.listdir(isp_dir):
|
||||
if not filename.endswith(".txt"):
|
||||
continue
|
||||
|
||||
region = filename.replace(".txt", "")
|
||||
|
||||
url = self.generate_search_url(region, org_name)
|
||||
content = await self.fetch_page_content(url)
|
||||
|
||||
if not content:
|
||||
logging.warning(f"Empty content for region {region}. Skipping...")
|
||||
continue
|
||||
|
||||
ip = await self.extract_ip_from_content(content)
|
||||
|
||||
validated_ips = await self.validate_ip(ip, isp, region)
|
||||
|
||||
self.save_ip(isp, region, validated_ips)
|
||||
|
||||
async def get_valid_ip(self, isp, region):
|
||||
mcast = self.extract_mcast(isp, region)
|
||||
if not mcast:
|
||||
logging.warning(f"No rtp:// URL found in {isp} {region}. Skipping...")
|
||||
return []
|
||||
|
||||
ip_file_path = os.path.join(self.ip_dir, isp, f"{region}.txt")
|
||||
|
||||
if not os.path.exists(ip_file_path):
|
||||
logging.warning(f"IP file not found: {ip_file_path}. Skipping...")
|
||||
return None
|
||||
|
||||
with open(ip_file_path, "r", encoding="utf-8") as f:
|
||||
valid_ips = f.read().splitlines()
|
||||
|
||||
if not valid_ips:
|
||||
logging.warning(f"No valid IP found in file: {ip_file_path}.")
|
||||
return None
|
||||
|
||||
invalid_ips = []
|
||||
valid_ip = None
|
||||
for ip in valid_ips:
|
||||
if await self.is_url_accessible(
|
||||
f"http://{ip}/status"
|
||||
) and self.is_video_stream_valid(f"http://{ip}/udp/{mcast}"):
|
||||
valid_ip = ip
|
||||
break
|
||||
else:
|
||||
invalid_ips.append(ip)
|
||||
|
||||
if invalid_ips:
|
||||
with open(ip_file_path, "w", encoding="utf-8") as f:
|
||||
f.write("\n".join([ip for ip in valid_ips if ip not in invalid_ips]))
|
||||
|
||||
if valid_ip:
|
||||
return valid_ip
|
||||
|
||||
logging.warning(f"No valid IP found after re-validation for {region}.")
|
||||
return None
|
||||
|
||||
async def generate_playlist(self):
|
||||
for isp in os.listdir(RTP_DIR):
|
||||
isp_dir = os.path.join(RTP_DIR, isp)
|
||||
if not os.path.isdir(isp_dir):
|
||||
logging.warning(f"Directory not found: {isp_dir}. Skipping...")
|
||||
continue
|
||||
|
||||
for filename in os.listdir(isp_dir):
|
||||
if not filename.endswith(".txt"):
|
||||
continue
|
||||
|
||||
region = filename.replace(".txt", "")
|
||||
|
||||
ip = await self.get_valid_ip(isp, region)
|
||||
|
||||
if not ip:
|
||||
logging.warning(f"No valid IP available for {region}. Skipping...")
|
||||
continue
|
||||
|
||||
file_path = os.path.join(RTP_DIR, isp, f"{region}.txt")
|
||||
with open(file_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
|
||||
playlists = content.replace("rtp://", f"http://{ip}/udp/")
|
||||
|
||||
output_dir = os.path.join(self.output_dir, isp)
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
|
||||
output_path = os.path.join(output_dir, f"{region}.txt")
|
||||
with open(output_path, "w", encoding="utf-8") as f:
|
||||
f.write(f"{isp}-{region}-组播,#genre#\n")
|
||||
f.write(playlists)
|
||||
|
||||
logging.info(f"Created playlist file: {output_path}")
|
||||
|
||||
self.merge_playlist(
|
||||
self.output_dir, os.path.join(self.output_dir, "全国.txt")
|
||||
)
|
||||
|
||||
async def fetch_ip(self, search):
|
||||
form_data = {"saerch": search, "Submit": ""}
|
||||
# print(form_data)
|
||||
headers = {
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3",
|
||||
"Referer": "http://tonkiang.us/hoteliptv.php",
|
||||
"Cache-Control": "no-cache",
|
||||
# "Content-Type": "application/x-www-form-urlencoded",
|
||||
}
|
||||
|
||||
try:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.post(
|
||||
"http://tonkiang.us/hoteliptv.php",
|
||||
data=form_data,
|
||||
headers=headers,
|
||||
) as response:
|
||||
if response.status != 200:
|
||||
logger.error(
|
||||
f"Failed to fetch IPs for search '{search}', status code: {response.status}"
|
||||
)
|
||||
return []
|
||||
|
||||
content = await response.text()
|
||||
soup = BeautifulSoup(content, "html.parser")
|
||||
elements = soup.select("div.channel > a")
|
||||
|
||||
ip_list = [element.text.strip() for element in elements]
|
||||
logger.info(f"Fetched IPs for search '{search}': {len(ip_list)}")
|
||||
return ip_list
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Exception occurred while fetching IPs for search '{search}': {e}"
|
||||
)
|
||||
return []
|
||||
|
||||
async def get_rtp(self, ip):
|
||||
playlist = {}
|
||||
try:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(
|
||||
f"http://tonkiang.us/alllist.php?s={ip}&c=false",
|
||||
headers={
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3",
|
||||
"Referer": "http://tonkiang.us/hoteliptv.php",
|
||||
"Cache-Control": "no-cache",
|
||||
},
|
||||
) as response:
|
||||
if response.status != 200:
|
||||
logger.error(
|
||||
f"Failed to get RTP for IP '{ip}', status code: {response.status}"
|
||||
)
|
||||
return {}
|
||||
|
||||
content = await response.text()
|
||||
soup = BeautifulSoup(content, "html.parser")
|
||||
elements = soup.select("div.tables")
|
||||
|
||||
for element in elements:
|
||||
channel_elements = element.select("div.channel")
|
||||
m3u8_elements = element.select("div.m3u8")
|
||||
|
||||
for channel, m3u8 in zip(channel_elements, m3u8_elements):
|
||||
name = channel.text.strip()
|
||||
url = m3u8.text.strip()
|
||||
|
||||
rtp_url = re.sub(
|
||||
r"https?://[^/]+/(udp|rtp)/", "rtp://", url
|
||||
)
|
||||
playlist[name] = rtp_url
|
||||
|
||||
logger.info(f"Playlist fetched for IP '{ip}': {len(playlist)}")
|
||||
return playlist
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Exception occurred while getting RTP for IP '{ip}': {e}")
|
||||
return {}
|
||||
|
||||
async def init_rtp(self):
|
||||
for region in REGION_LIST:
|
||||
for isp_name, org_name in ISP_DICT.items():
|
||||
search = region[0:2] + isp_name[2:]
|
||||
ips = await self.fetch_ip(search)
|
||||
|
||||
if not ips:
|
||||
logger.warning(f"No IPs found for search '{search}'")
|
||||
continue
|
||||
|
||||
validated_ips = await self.validate_ip(ips, isp_name, region)
|
||||
|
||||
self.save_ip(isp_name, region, validated_ips)
|
||||
|
||||
for ip in ips:
|
||||
playlist = await self.get_rtp(ip)
|
||||
if playlist:
|
||||
output_dir = os.path.join(RTP_DIR, isp_name)
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
|
||||
output_path = os.path.join(output_dir, f"{region}.txt")
|
||||
with open(output_path, "w", encoding="utf-8") as f:
|
||||
for name, url in playlist.items():
|
||||
f.write(f"{name},{url}\n")
|
||||
|
||||
logger.info(
|
||||
f"Playlist for region '{region}' and ISP '{isp_name}' saved to '{output_path}'"
|
||||
)
|
||||
break
|
||||
Reference in New Issue
Block a user