diff --git a/Dockerfile b/Dockerfile index 78fa90462b..c564c6fd93 100644 --- a/Dockerfile +++ b/Dockerfile @@ -40,7 +40,7 @@ RUN echo "deb https://mirrors.aliyun.com/debian/ bookworm main contrib non-free deb-src https://mirrors.aliyun.com/debian-security/ bookworm-security main contrib non-free non-free-firmware\n" \ > /etc/apt/sources.list -RUN apt-get update && apt-get install -y --no-install-recommends cron +RUN apt-get update && apt-get install -y --no-install-recommends cron ffmpeg RUN if [ "$LITE" = False ]; then apt-get install -y --no-install-recommends chromium chromium-driver; fi \ && apt-get clean && rm -rf /var/lib/apt/lists/* diff --git a/service/app.py b/service/app.py index 6bcac75cdf..dae7095273 100644 --- a/service/app.py +++ b/service/app.py @@ -2,7 +2,7 @@ import sys sys.path.append(os.path.dirname(sys.path[0])) -from flask import Flask, render_template_string +from flask import Flask, send_from_directory, make_response from utils.tools import get_result_file_content, get_ip_address, resource_path from utils.config import config import utils.constants as constants @@ -15,6 +15,12 @@ def show_index(): return get_result_file_content() +@app.route("/favicon.ico") +def favicon(): + return send_from_directory(resource_path('static/images'), 'favicon.ico', + mimetype='image/vnd.microsoft.icon') + + @app.route("/txt") def show_txt(): return get_result_file_content(file_type="txt") @@ -38,10 +44,9 @@ def show_log(): content = file.read() else: content = constants.waiting_tip - return render_template_string( - "
{{ content }}
", - content=content, - ) + response = make_response(content) + response.mimetype = "text/plain" + return response def run_service(): diff --git a/utils/channel.py b/utils/channel.py index 03b5c2dde6..0ce80940c4 100644 --- a/utils/channel.py +++ b/utils/channel.py @@ -576,7 +576,7 @@ async def process_sort_channel_list(data, ipv6=False, callback=None): need_sort_data = copy.deepcopy(data) process_nested_dict(need_sort_data, seen=set(), flag=r"cache:(.*)", force_str="!") result = {} - semaphore = asyncio.Semaphore(5) + semaphore = asyncio.Semaphore(10) async def limited_get_speed(info, ipv6_proxy, filter_resolution, timeout, callback): async with semaphore: @@ -603,7 +603,7 @@ async def limited_get_speed(info, ipv6_proxy, filter_resolution, timeout, callba open_filter_speed = config.open_filter_speed open_filter_resolution = config.open_filter_resolution min_speed = config.min_speed - min_resolution = config.min_resolution + min_resolution = config.min_resolution_value for cate, obj in data.items(): for name, info_list in obj.items(): info_list = sort_urls(name, info_list, supply=open_supply, filter_speed=open_filter_speed, @@ -645,6 +645,7 @@ def write_channel_to_file(data, ipv6=False, callback=None): ) write_content_into_txt(f"🕘️更新时间,#genre#", path, newline=False) write_content_into_txt(f"{update_time},{update_time_url}", path) + write_content_into_txt("", path) for cate, channel_obj in data.items(): print(f"\n{cate}:", end=" ") write_content_into_txt(f"{cate},#genre#", path) diff --git a/utils/speed.py b/utils/speed.py index 38a38bbf4e..1a486ff184 100644 --- a/utils/speed.py +++ b/utils/speed.py @@ -35,12 +35,12 @@ async def get_speed_with_download(url: str, session: ClientSession = None, timeo async for chunk in response.content.iter_any(): if chunk: total_size += len(chunk) - except Exception as e: + except: pass finally: - end_time = time() - total_time += end_time - start_time - info['speed'] = (total_size / total_time if total_time > 0 else 0) / 1024 / 1024 + if total_size > 0: + total_time += time() - start_time + info['speed'] = ((total_size / total_time) if total_time > 0 else 0) / 1024 / 1024 if created_session: await session.close() return info @@ -70,14 +70,12 @@ async def get_m3u8_headers(url: str, session: ClientSession = None, timeout: int def check_m3u8_valid(headers: CIMultiDictProxy[str] | dict[any, any]) -> bool: """ - Check the m3u8 url is valid + Check if the m3u8 url is valid """ - content_type = headers.get('Content-Type') - if content_type: - content_type = content_type.lower() - if 'application/vnd.apple.mpegurl' in content_type: - return True - return False + content_type = headers.get('Content-Type', '').lower() + if not content_type: + return False + return any(item in content_type for item in ['application/vnd.apple.mpegurl', 'audio/mpegurl', 'audio/x-mpegurl']) async def get_speed_m3u8(url: str, filter_resolution: bool = config.open_filter_resolution, @@ -86,48 +84,47 @@ async def get_speed_m3u8(url: str, filter_resolution: bool = config.open_filter_ Get the speed of the m3u8 url with a total timeout """ info = {'speed': None, 'delay': None, 'resolution': None} + location = None try: url = quote(url, safe=':/?$&=@[]').partition('$')[0] async with ClientSession(connector=TCPConnector(ssl=False), trust_env=True) as session: headers = await get_m3u8_headers(url, session) - if check_m3u8_valid(headers): - location = headers.get('Location') - if location: - info.update(await get_speed_m3u8(location, filter_resolution, timeout)) - else: - m3u8_obj = m3u8.load(url, timeout=2) - playlists = m3u8_obj.data.get('playlists') + location = headers.get('Location') + if location: + info.update(await get_speed_m3u8(location, filter_resolution, timeout)) + elif check_m3u8_valid(headers): + m3u8_obj = m3u8.load(url, timeout=2) + playlists = m3u8_obj.data.get('playlists') + segments = m3u8_obj.segments + if not segments and playlists: + parsed_url = urlparse(url) + uri = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path.rsplit('/', 1)[0]}/{playlists[0].get('uri', '')}" + uri_headers = await get_m3u8_headers(uri, session) + if not check_m3u8_valid(uri_headers): + if uri_headers.get('Content-Length'): + info.update(await get_speed_with_download(uri, session, timeout)) + raise Exception("Invalid m3u8") + m3u8_obj = m3u8.load(uri, timeout=2) segments = m3u8_obj.segments - if not segments and playlists: - parsed_url = urlparse(url) - url = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path.rsplit('/', 1)[0]}/{playlists[0].get('uri', '')}" - uri_headers = await get_m3u8_headers(url, session) - if not check_m3u8_valid(uri_headers): - if uri_headers.get('Content-Length'): - info.update(await get_speed_with_download(url, session, timeout)) - raise Exception("Invalid m3u8") - m3u8_obj = m3u8.load(url, timeout=2) - segments = m3u8_obj.segments - if not segments: - raise Exception("Segments not found") - ts_urls = [segment.absolute_uri for segment in segments] - speed_list = [] - start_time = time() - for ts_url in ts_urls: - if time() - start_time > timeout: - break - download_info = await get_speed_with_download(ts_url, session, timeout) - speed_list.append(download_info['speed']) - if info['delay'] is None and download_info['delay'] is not None: - info['delay'] = download_info['delay'] - info['speed'] = sum(speed_list) / len(speed_list) if speed_list else 0 - url = ts_urls[0] + if not segments: + raise Exception("Segments not found") + ts_urls = [segment.absolute_uri for segment in segments] + speed_list = [] + start_time = time() + for ts_url in ts_urls: + if time() - start_time > timeout: + break + download_info = await get_speed_with_download(ts_url, session, timeout) + speed_list.append(download_info['speed']) + if info['delay'] is None and download_info['delay'] is not None: + info['delay'] = download_info['delay'] + info['speed'] = (sum(speed_list) / len(speed_list)) if speed_list else 0 elif headers.get('Content-Length'): info.update(await get_speed_with_download(url, session, timeout)) except: pass finally: - if filter_resolution: + if filter_resolution and not location and info['delay'] is not None: info['resolution'] = await get_resolution_ffprobe(url, timeout) return info @@ -206,17 +203,19 @@ async def get_resolution_ffprobe(url: str, timeout: int = config.sort_timeout) - resolution = None proc = None try: - probe_args = ["ffprobe", "-show_format", "-show_streams", "-of", "json", url] - proc = await asyncio.create_subprocess_exec( - *probe_args, - stdout=asyncio.subprocess.PIPE, - stderr=asyncio.subprocess.PIPE - ) - out, err = await asyncio.wait_for(proc.communicate(), timeout) - if proc.returncode != 0: - raise Exception("FFprobe failed") - video_stream = json.loads(out.decode("utf-8"))["streams"][0] - resolution = f"{int(video_stream['width'])}x{int(video_stream['height'])}" + probe_args = [ + 'ffprobe', + '-v', 'error', + '-select_streams', 'v:0', + '-show_entries', 'stream=width,height', + "-of", 'json', + url + ] + proc = await asyncio.create_subprocess_exec(*probe_args, stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE) + out, _ = await asyncio.wait_for(proc.communicate(), timeout) + video_stream = json.loads(out.decode('utf-8'))["streams"][0] + resolution = f"{video_stream['width']}x{video_stream['height']}" except: if proc: proc.kill() @@ -298,7 +297,7 @@ async def get_speed(url, ipv6_proxy=None, filter_resolution=config.open_filter_r def sort_urls(name, data, supply=config.open_supply, filter_speed=config.open_filter_speed, min_speed=config.min_speed, - filter_resolution=config.open_filter_resolution, min_resolution=config.min_resolution, + filter_resolution=config.open_filter_resolution, min_resolution=config.min_resolution_value, logger=None): """ Sort the urls with info diff --git a/utils/tools.py b/utils/tools.py index 2cde2baecf..136f2443f4 100644 --- a/utils/tools.py +++ b/utils/tools.py @@ -13,7 +13,7 @@ import requests from bs4 import BeautifulSoup -from flask import render_template_string, send_file +from flask import send_file, make_response import utils.constants as constants from utils.config import config @@ -406,10 +406,9 @@ def get_result_file_content(show_content=False, file_type=None): content = file.read() else: content = constants.waiting_tip - return render_template_string( - "
{{ content }}
", - content=content, - ) + response = make_response(content) + response.mimetype = 'text/plain' + return response def remove_duplicates_from_tuple_list(tuple_list, seen, flag=None, force_str=None):