8 Commits

3 changed files with 122 additions and 35 deletions

2
.env
View File

@@ -3,7 +3,7 @@ WEBGUI_PORT=8081
# Optional Basic Auth (leave empty to disable)
BASIC_AUTH_USER=admin
BASIC_AUTH_PASS=change_me
BASIC_AUTH_PASS=123456
# Paths inside container
OUTPUT_DIR=/output

View File

@@ -18,7 +18,7 @@ from urllib.parse import urlparse
import paramiko
from fastapi import FastAPI, Form, Request
from fastapi.responses import HTMLResponse, RedirectResponse
from fastapi.responses import HTMLResponse, JSONResponse, RedirectResponse
from fastapi.staticfiles import StaticFiles
OUTPUT_DIR = os.environ.get("OUTPUT_DIR", "/output").rstrip("/")
@@ -96,7 +96,9 @@ class Job:
engine: str
library: str
proxy: str
proxy_forced: bool
headers: List[str]
progress: float
status: str
message: str
@@ -126,8 +128,7 @@ def pick_proxy(forced_proxy: str = "") -> str:
global _rr_idx
if forced_proxy:
return forced_proxy.strip() if proxy_is_usable(forced_proxy.strip()) else ""
with lock:
proxies = list(PROXIES)
proxies = snapshot_proxies()
if PROXY_MODE == "off" or not proxies:
return ""
if PROXY_MODE == "random":
@@ -162,7 +163,6 @@ def proxy_is_usable(proxy: str) -> bool:
return False
def format_proxy_lines(raw: str, scheme: str) -> str:
scheme = scheme.strip().lower()
if scheme not in {"socks5", "socks4", "http", "https"}:
@@ -221,6 +221,11 @@ def refresh_proxies() -> None:
PROXIES = updated
def snapshot_proxies() -> List[str]:
with lock:
return list(PROXIES)
def proxy_refresh_loop(interval_seconds: int = 12 * 60 * 60) -> None:
while True:
try:
@@ -234,24 +239,6 @@ refresh_proxies()
threading.Thread(target=proxy_refresh_loop, daemon=True).start()
def parse_header_lines(raw: str) -> List[str]:
headers = []
for line in (raw or "").splitlines():
s = line.strip()
if not s or s.startswith("#"):
continue
if ":" not in s:
raise ValueError(f"Invalid header line: {s}")
name, value = s.split(":", 1)
name = name.strip()
value = value.strip()
if not name or not value:
raise ValueError(f"Invalid header line: {s}")
headers.append(f"{name}: {value}")
return headers
def parse_header_lines(raw: str) -> List[str]:
headers = []
for line in (raw or "").splitlines():
@@ -281,20 +268,48 @@ def pick_engine(url: str, forced: str) -> str:
return "direct"
def run_ytdlp(url: str, out_dir: str, fmt: str, proxy: str):
cmd = ["yt-dlp", "-f", fmt, "-o", f"{out_dir}/%(title)s.%(ext)s", url]
def run_ytdlp(url: str, out_dir: str, fmt: str, proxy: str, progress_cb):
cmd = ["yt-dlp", "--newline", "-f", fmt, "-o", f"{out_dir}/%(title)s.%(ext)s", url]
if proxy:
cmd += ["--proxy", proxy]
subprocess.check_call(cmd)
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True)
if not proc.stdout:
raise RuntimeError("yt-dlp failed to start")
progress_re = re.compile(r"\\[download\\]\\s+([\\d.]+)%")
for line in proc.stdout:
match = progress_re.search(line)
if match:
progress_cb(float(match.group(1)))
ret = proc.wait()
if ret != 0:
raise subprocess.CalledProcessError(ret, cmd)
def run_aria2(url: str, out_dir: str, proxy: str, headers: List[str] | None = None):
cmd = ["aria2c", "--dir", out_dir, "--allow-overwrite=true", "--auto-file-renaming=false", url]
def run_aria2(url: str, out_dir: str, proxy: str, progress_cb, headers: List[str] | None = None):
cmd = [
"aria2c",
"--dir",
out_dir,
"--allow-overwrite=true",
"--auto-file-renaming=false",
"--summary-interval=1",
url,
]
if proxy:
cmd += ["--all-proxy", proxy]
for header in headers or []:
cmd += ["--header", header]
subprocess.check_call(cmd)
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True)
if not proc.stdout:
raise RuntimeError("aria2c failed to start")
percent_re = re.compile(r"\\((\\d+)%\\)")
for line in proc.stdout:
match = percent_re.search(line)
if match:
progress_cb(float(match.group(1)))
ret = proc.wait()
if ret != 0:
raise subprocess.CalledProcessError(ret, cmd)
def md5_file(path: str) -> str:
@@ -404,18 +419,48 @@ def worker(jobid: str):
engine = pick_engine(job.url, job.engine)
proxy = job.proxy
headers = job.headers
proxy_forced = job.proxy_forced
proxy_candidates = []
with lock:
header_note = f"Headers={len(headers)}" if headers else "Headers=none"
job.status = "downloading"
job.message = f"Engine={engine} Proxy={'none' if not proxy else proxy} {header_note}"
job.progress = 0.0
def update_progress(value: float):
with lock:
jobs[jobid].progress = max(0.0, min(100.0, value))
if engine == "ytdlp":
run_ytdlp(job.url, OUTPUT_DIR, YTDLP_FORMAT, proxy)
elif engine == "hoster":
run_aria2(job.url, OUTPUT_DIR, proxy, headers=headers)
run_ytdlp(job.url, OUTPUT_DIR, YTDLP_FORMAT, proxy, update_progress)
else:
run_aria2(job.url, OUTPUT_DIR, proxy)
if proxy:
if proxy_forced:
proxy_candidates = [proxy]
else:
proxy_list = snapshot_proxies()
proxy_candidates = [proxy] + [p for p in proxy_list if p != proxy]
else:
proxy_candidates = [""]
for idx, candidate in enumerate(proxy_candidates):
try:
if engine == "hoster":
run_aria2(job.url, OUTPUT_DIR, candidate, update_progress, headers=headers)
else:
run_aria2(job.url, OUTPUT_DIR, candidate, update_progress)
break
except subprocess.CalledProcessError as exc:
if idx == len(proxy_candidates) - 1:
raise
with lock:
job.message = f"Proxy failed, trying next ({idx + 1}/{len(proxy_candidates) - 1})"
except Exception as exc:
if idx == len(proxy_candidates) - 1:
raise
with lock:
job.message = f"Proxy failed, trying next ({idx + 1}/{len(proxy_candidates) - 1})"
new_files = list_output_files(before)
if not new_files:
@@ -459,6 +504,7 @@ def worker(jobid: str):
with lock:
job.status = "finished"
job.progress = 100.0
job.message = f"OK ({len(new_files)} file(s))"
except Exception as e:
@@ -488,6 +534,9 @@ def render_downloads(error: str = "") -> str:
f"<td>{j.engine}</td>"
f"<td>{j.library}</td>"
f"<td>{'none' if not j.proxy else j.proxy}</td>"
f"<td><div class='progress' data-jobid='{j.id}'>"
f"<div class='progress-bar' style='width:{j.progress:.1f}%'></div>"
f"<span class='progress-text'>{j.progress:.1f}%</span></div></td>"
f"<td><b>{j.status}</b><br/><small>{j.message}</small></td>"
f"</tr>"
)
@@ -544,11 +593,31 @@ def render_downloads(error: str = "") -> str:
</p>
<table>
<thead><tr><th>JobID</th><th>URL</th><th>Engine</th><th>Library</th><th>Proxy</th><th>Status</th></tr></thead>
<thead><tr><th>JobID</th><th>URL</th><th>Engine</th><th>Library</th><th>Proxy</th><th>Progress</th><th>Status</th></tr></thead>
<tbody>
{rows if rows else "<tr><td colspan='6'><em>No jobs yet.</em></td></tr>"}
{rows if rows else "<tr><td colspan='7'><em>No jobs yet.</em></td></tr>"}
</tbody>
</table>
<script>
async function refreshProgress() {{
try {{
const res = await fetch("/jobs");
const data = await res.json();
data.forEach((job) => {{
const el = document.querySelector(`.progress[data-jobid='${{job.id}}']`);
if (!el) return;
const bar = el.querySelector(".progress-bar");
const text = el.querySelector(".progress-text");
const pct = Math.max(0, Math.min(100, job.progress || 0));
bar.style.width = pct + "%";
text.textContent = pct.toFixed(1) + "%";
}});
}} catch (e) {{
console.warn("progress refresh failed", e);
}}
}}
setInterval(refreshProgress, 2000);
</script>
</body></html>
"""
@@ -558,6 +627,19 @@ def index():
return HTMLResponse(render_downloads())
@app.get("/jobs", response_class=JSONResponse)
def jobs_status():
with lock:
payload = [
{
"id": job.id,
"progress": job.progress,
}
for job in jobs.values()
]
return JSONResponse(payload)
@app.post("/submit")
def submit(
url: str = Form(...),
@@ -588,7 +670,9 @@ def submit(
engine=engine,
library=library,
proxy=chosen_proxy,
proxy_forced=bool(proxy.strip()),
headers=header_lines,
progress=0.0,
status="queued",
message="queued",
)

View File

@@ -11,3 +11,6 @@ th { background:#fbfbfb; text-align:left; }
code { font-family: ui-monospace, SFMono-Regular, Menlo, Consolas, monospace; font-size: 12px; background:#f2f2f2; padding:2px 4px; border-radius:4px; }
.hint { color:#555; font-size: 12px; margin-top: 10px; }
.error { color:#b00020; font-weight: 700; }
.progress { position: relative; height: 18px; background: #eee; border-radius: 10px; overflow: hidden; min-width: 120px; }
.progress-bar { height: 100%; background: #4b7bec; transition: width 0.4s ease; }
.progress-text { position: absolute; inset: 0; display: flex; align-items: center; justify-content: center; font-size: 12px; color: #fff; font-weight: 600; text-shadow: 0 1px 2px rgba(0,0,0,0.35); }