9 Commits

3 changed files with 154 additions and 14 deletions

View File

@@ -23,6 +23,7 @@ Danach:
## Proxies ## Proxies
- Proxies werden **nur** an yt-dlp/aria2 übergeben (pro Job), beeinflussen also nicht SFTP/Jellyfin. - Proxies werden **nur** an yt-dlp/aria2 übergeben (pro Job), beeinflussen also nicht SFTP/Jellyfin.
- `PROXY_LIST` enthält eine Zeile pro Proxy: `socks5://IP:PORT`, `http://IP:PORT`, ... - `PROXY_LIST` enthält eine Zeile pro Proxy: `socks5://IP:PORT`, `http://IP:PORT`, ...
- Die Proxy-Listen werden 2× täglich aus den TheSpeedX-Quellen geladen und ins richtige Format gebracht.
## Hoster-Engine ## Hoster-Engine
- Engine `hoster` nutzt **aria2c** und akzeptiert zusätzliche HTTP-Header (z.B. `Cookie:` oder `User-Agent:`) im Formular. - Engine `hoster` nutzt **aria2c** und akzeptiert zusätzliche HTTP-Header (z.B. `Cookie:` oder `User-Agent:`) im Formular.

View File

@@ -7,16 +7,18 @@ import os
import random import random
import re import re
import shlex import shlex
import socket
import subprocess import subprocess
import threading import threading
import time import time
from dataclasses import dataclass from dataclasses import dataclass
from typing import Dict, List from typing import Dict, List
from urllib.request import urlopen from urllib.request import urlopen
from urllib.parse import urlparse
import paramiko import paramiko
from fastapi import FastAPI, Form, Request from fastapi import FastAPI, Form, Request
from fastapi.responses import HTMLResponse, RedirectResponse from fastapi.responses import HTMLResponse, JSONResponse, RedirectResponse
from fastapi.staticfiles import StaticFiles from fastapi.staticfiles import StaticFiles
OUTPUT_DIR = os.environ.get("OUTPUT_DIR", "/output").rstrip("/") OUTPUT_DIR = os.environ.get("OUTPUT_DIR", "/output").rstrip("/")
@@ -43,6 +45,7 @@ PROXY_SOURCES = {
"socks4": "https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/socks4.txt", "socks4": "https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/socks4.txt",
"http": "https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/http.txt", "http": "https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/http.txt",
} }
PROXY_CHECK_TIMEOUT = float(os.environ.get("PROXY_CHECK_TIMEOUT", "3.0"))
URL_RE = re.compile(r"^https?://", re.I) URL_RE = re.compile(r"^https?://", re.I)
YOUTUBE_RE = re.compile(r"(youtube\.com|youtu\.be)", re.I) YOUTUBE_RE = re.compile(r"(youtube\.com|youtu\.be)", re.I)
@@ -94,6 +97,7 @@ class Job:
library: str library: str
proxy: str proxy: str
headers: List[str] headers: List[str]
progress: float
status: str status: str
message: str message: str
@@ -122,14 +126,41 @@ def parse_proxy_list(raw: str) -> List[str]:
def pick_proxy(forced_proxy: str = "") -> str: def pick_proxy(forced_proxy: str = "") -> str:
global _rr_idx global _rr_idx
if forced_proxy: if forced_proxy:
return forced_proxy.strip() return forced_proxy.strip() if proxy_is_usable(forced_proxy.strip()) else ""
if PROXY_MODE == "off" or not PROXIES: with lock:
proxies = list(PROXIES)
if PROXY_MODE == "off" or not proxies:
return "" return ""
if PROXY_MODE == "random": if PROXY_MODE == "random":
return random.choice(PROXIES) random.shuffle(proxies)
p = PROXIES[_rr_idx % len(PROXIES)] for candidate in proxies:
_rr_idx += 1 if proxy_is_usable(candidate):
return p return candidate
return ""
start_idx = _rr_idx % len(proxies)
for offset in range(len(proxies)):
idx = (start_idx + offset) % len(proxies)
candidate = proxies[idx]
if proxy_is_usable(candidate):
_rr_idx = idx + 1
return candidate
return ""
def proxy_is_usable(proxy: str) -> bool:
proxy = proxy.strip()
if not proxy:
return False
parsed = urlparse(proxy if "://" in proxy else f"http://{proxy}")
host = parsed.hostname
port = parsed.port
if not host or not port:
return False
try:
with socket.create_connection((host, port), timeout=PROXY_CHECK_TIMEOUT):
return True
except OSError:
return False
def format_proxy_lines(raw: str, scheme: str) -> str: def format_proxy_lines(raw: str, scheme: str) -> str:
@@ -179,7 +210,28 @@ def load_proxy_sources() -> List[str]:
return parse_proxy_list(combined) return parse_proxy_list(combined)
PROXIES = parse_proxy_list("\n".join([PROXY_LIST_RAW, "\n".join(load_proxy_sources())])) PROXIES: List[str] = []
def refresh_proxies() -> None:
global PROXIES
combined = "\n".join([PROXY_LIST_RAW, "\n".join(load_proxy_sources())])
updated = parse_proxy_list(combined)
with lock:
PROXIES = updated
def proxy_refresh_loop(interval_seconds: int = 12 * 60 * 60) -> None:
while True:
try:
refresh_proxies()
except Exception as exc:
print(f"Proxy refresh failed: {exc}")
time.sleep(interval_seconds)
refresh_proxies()
threading.Thread(target=proxy_refresh_loop, daemon=True).start()
def parse_header_lines(raw: str) -> List[str]: def parse_header_lines(raw: str) -> List[str]:
@@ -211,20 +263,49 @@ def pick_engine(url: str, forced: str) -> str:
return "direct" return "direct"
def run_ytdlp(url: str, out_dir: str, fmt: str, proxy: str): def run_ytdlp(url: str, out_dir: str, fmt: str, proxy: str, progress_cb):
cmd = ["yt-dlp", "-f", fmt, "-o", f"{out_dir}/%(title)s.%(ext)s", url] cmd = ["yt-dlp", "--newline", "-f", fmt, "-o", f"{out_dir}/%(title)s.%(ext)s", url]
if proxy: if proxy:
cmd += ["--proxy", proxy] cmd += ["--proxy", proxy]
subprocess.check_call(cmd) proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True)
if not proc.stdout:
raise RuntimeError("yt-dlp failed to start")
progress_re = re.compile(r"\\[download\\]\\s+([\\d.]+)%")
for line in proc.stdout:
match = progress_re.search(line)
if match:
progress_cb(float(match.group(1)))
ret = proc.wait()
if ret != 0:
raise subprocess.CalledProcessError(ret, cmd)
def run_aria2(url: str, out_dir: str, proxy: str, headers: List[str] | None = None): def run_aria2(url: str, out_dir: str, proxy: str, progress_cb, headers: List[str] | None = None):
cmd = ["aria2c", "--dir", out_dir, "--allow-overwrite=true", "--auto-file-renaming=false", url] cmd = [
"aria2c",
"--dir",
out_dir,
"--allow-overwrite=true",
"--auto-file-renaming=false",
"--summary-interval=1",
url,
]
if proxy: if proxy:
cmd += ["--all-proxy", proxy] cmd += ["--all-proxy", proxy]
for header in headers or []: for header in headers or []:
cmd += ["--header", header] cmd += ["--header", header]
subprocess.check_call(cmd) proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True)
if not proc.stdout:
raise RuntimeError("aria2c failed to start")
percent_re = re.compile(r"\\((\\d+)%\\)")
for line in proc.stdout:
match = percent_re.search(line)
if match:
progress_cb(float(match.group(1)))
ret = proc.wait()
if ret != 0:
raise subprocess.CalledProcessError(ret, cmd)
def md5_file(path: str) -> str: def md5_file(path: str) -> str:
@@ -339,6 +420,18 @@ def worker(jobid: str):
header_note = f"Headers={len(headers)}" if headers else "Headers=none" header_note = f"Headers={len(headers)}" if headers else "Headers=none"
job.status = "downloading" job.status = "downloading"
job.message = f"Engine={engine} Proxy={'none' if not proxy else proxy} {header_note}" job.message = f"Engine={engine} Proxy={'none' if not proxy else proxy} {header_note}"
job.progress = 0.0
def update_progress(value: float):
with lock:
jobs[jobid].progress = max(0.0, min(100.0, value))
if engine == "ytdlp":
run_ytdlp(job.url, OUTPUT_DIR, YTDLP_FORMAT, proxy, update_progress)
elif engine == "hoster":
run_aria2(job.url, OUTPUT_DIR, proxy, update_progress, headers=headers)
else:
run_aria2(job.url, OUTPUT_DIR, proxy, update_progress)
if engine == "ytdlp": if engine == "ytdlp":
run_ytdlp(job.url, OUTPUT_DIR, YTDLP_FORMAT, proxy) run_ytdlp(job.url, OUTPUT_DIR, YTDLP_FORMAT, proxy)
@@ -389,6 +482,7 @@ def worker(jobid: str):
with lock: with lock:
job.status = "finished" job.status = "finished"
job.progress = 100.0
job.message = f"OK ({len(new_files)} file(s))" job.message = f"OK ({len(new_files)} file(s))"
except Exception as e: except Exception as e:
@@ -418,6 +512,9 @@ def render_downloads(error: str = "") -> str:
f"<td>{j.engine}</td>" f"<td>{j.engine}</td>"
f"<td>{j.library}</td>" f"<td>{j.library}</td>"
f"<td>{'none' if not j.proxy else j.proxy}</td>" f"<td>{'none' if not j.proxy else j.proxy}</td>"
f"<td><div class='progress' data-jobid='{j.id}'>"
f"<div class='progress-bar' style='width:{j.progress:.1f}%'></div>"
f"<span class='progress-text'>{j.progress:.1f}%</span></div></td>"
f"<td><b>{j.status}</b><br/><small>{j.message}</small></td>" f"<td><b>{j.status}</b><br/><small>{j.message}</small></td>"
f"</tr>" f"</tr>"
) )
@@ -474,6 +571,31 @@ def render_downloads(error: str = "") -> str:
</p> </p>
<table> <table>
<thead><tr><th>JobID</th><th>URL</th><th>Engine</th><th>Library</th><th>Proxy</th><th>Progress</th><th>Status</th></tr></thead>
<tbody>
{rows if rows else "<tr><td colspan='7'><em>No jobs yet.</em></td></tr>"}
</tbody>
</table>
<script>
async function refreshProgress() {{
try {{
const res = await fetch("/jobs");
const data = await res.json();
data.forEach((job) => {{
const el = document.querySelector(`.progress[data-jobid='${{job.id}}']`);
if (!el) return;
const bar = el.querySelector(".progress-bar");
const text = el.querySelector(".progress-text");
const pct = Math.max(0, Math.min(100, job.progress || 0));
bar.style.width = pct + "%";
text.textContent = pct.toFixed(1) + "%";
}});
}} catch (e) {{
console.warn("progress refresh failed", e);
}}
}}
setInterval(refreshProgress, 2000);
</script>
<thead><tr><th>JobID</th><th>URL</th><th>Engine</th><th>Library</th><th>Proxy</th><th>Status</th></tr></thead> <thead><tr><th>JobID</th><th>URL</th><th>Engine</th><th>Library</th><th>Proxy</th><th>Status</th></tr></thead>
<tbody> <tbody>
{rows if rows else "<tr><td colspan='6'><em>No jobs yet.</em></td></tr>"} {rows if rows else "<tr><td colspan='6'><em>No jobs yet.</em></td></tr>"}
@@ -488,6 +610,19 @@ def index():
return HTMLResponse(render_downloads()) return HTMLResponse(render_downloads())
@app.get("/jobs", response_class=JSONResponse)
def jobs_status():
with lock:
payload = [
{
"id": job.id,
"progress": job.progress,
}
for job in jobs.values()
]
return JSONResponse(payload)
@app.post("/submit") @app.post("/submit")
def submit( def submit(
url: str = Form(...), url: str = Form(...),
@@ -519,6 +654,7 @@ def submit(
library=library, library=library,
proxy=chosen_proxy, proxy=chosen_proxy,
headers=header_lines, headers=header_lines,
progress=0.0,
status="queued", status="queued",
message="queued", message="queued",
) )

View File

@@ -11,3 +11,6 @@ th { background:#fbfbfb; text-align:left; }
code { font-family: ui-monospace, SFMono-Regular, Menlo, Consolas, monospace; font-size: 12px; background:#f2f2f2; padding:2px 4px; border-radius:4px; } code { font-family: ui-monospace, SFMono-Regular, Menlo, Consolas, monospace; font-size: 12px; background:#f2f2f2; padding:2px 4px; border-radius:4px; }
.hint { color:#555; font-size: 12px; margin-top: 10px; } .hint { color:#555; font-size: 12px; margin-top: 10px; }
.error { color:#b00020; font-weight: 700; } .error { color:#b00020; font-weight: 700; }
.progress { position: relative; height: 18px; background: #eee; border-radius: 10px; overflow: hidden; min-width: 120px; }
.progress-bar { height: 100%; background: #4b7bec; transition: width 0.4s ease; }
.progress-text { position: absolute; inset: 0; display: flex; align-items: center; justify-content: center; font-size: 12px; color: #fff; font-weight: 600; text-shadow: 0 1px 2px rgba(0,0,0,0.35); }