From 508b716b46282b4e54acb1bdb9363e78aff7d0e2 Mon Sep 17 00:00:00 2001 From: DasPoschi Date: Sun, 4 Jan 2026 12:39:10 +0100 Subject: [PATCH 1/4] Add hoster engine with header support --- .env | 2 +- README.md | 5 + media-webgui/app.py | 413 +++++++++++++++++++++++++++++++++++++++++++- 3 files changed, 413 insertions(+), 7 deletions(-) diff --git a/.env b/.env index a7cc20c..7ecc175 100644 --- a/.env +++ b/.env @@ -20,7 +20,7 @@ JELLYFIN_MOVIES_DIR=/jellyfin/Filme JELLYFIN_SERIES_DIR=/jellyfin/Serien # Engines -ENGINE_DEFAULT=auto # auto|ytdlp|direct +ENGINE_DEFAULT=auto # auto|ytdlp|direct|hoster YTDLP_FORMAT=bestvideo+bestaudio/best # Proxy pool (ONLY used for downloads; not for SFTP / webgui internal calls) diff --git a/README.md b/README.md index 4d4e83c..b357563 100644 --- a/README.md +++ b/README.md @@ -3,6 +3,7 @@ Dieses Projekt lädt Links über: - **yt-dlp** (YouTube & unterstützte Video-Plattformen) - **aria2c** (direkte HTTP/HTTPS-Links, z.B. .mkv/.mp4) +- **hoster** (aria2c mit optionalen HTTP-Headern, z.B. Cookies) Danach: - erzeugt es eine **MD5** und speichert sie als Sidecar @@ -22,3 +23,7 @@ Danach: ## Proxies - Proxies werden **nur** an yt-dlp/aria2 übergeben (pro Job), beeinflussen also nicht SFTP/Jellyfin. - `PROXY_LIST` enthält eine Zeile pro Proxy: `socks5://IP:PORT`, `http://IP:PORT`, ... + +## Hoster-Engine +- Engine `hoster` nutzt **aria2c** und akzeptiert zusätzliche HTTP-Header (z.B. `Cookie:` oder `User-Agent:`) im Formular. +- Ein Header pro Zeile. Leere Zeilen/Kommentare werden ignoriert. diff --git a/media-webgui/app.py b/media-webgui/app.py index b324d7c..3f79053 100644 --- a/media-webgui/app.py +++ b/media-webgui/app.py @@ -1,4 +1,3 @@ -+26-2 #!/usr/bin/env python3 from __future__ import annotations @@ -48,15 +47,17 @@ PROXY_SOURCES = { URL_RE = re.compile(r"^https?://", re.I) YOUTUBE_RE = re.compile(r"(youtube\.com|youtu\.be)", re.I) -VIDEO_EXTS = (".mkv",".mp4",".m4v",".avi",".mov",".wmv",".flv",".webm",".ts",".m2ts",".mpg",".mpeg",".vob",".ogv",".3gp",".3g2") +VIDEO_EXTS = (".mkv", ".mp4", ".m4v", ".avi", ".mov", ".wmv", ".flv", ".webm", ".ts", ".m2ts", ".mpg", ".mpeg", ".vob", ".ogv", ".3gp", ".3g2") SERIES_RE = re.compile(r"(?:^|[^a-z0-9])S(\d{1,2})E(\d{1,2})(?:[^a-z0-9]|$)", re.IGNORECASE) app = FastAPI() app.mount("/static", StaticFiles(directory="static"), name="static") + def _auth_enabled() -> bool: return bool(BASIC_AUTH_USER and BASIC_AUTH_PASS) + def _check_basic_auth(req: Request) -> bool: if not _auth_enabled(): return True @@ -69,17 +70,39 @@ def _check_basic_auth(req: Request) -> bool: except Exception: return False if ":" not in raw: -@@ -82,88 +88,106 @@ class Job: + return False + user, pw = raw.split(":", 1) + return user == BASIC_AUTH_USER and pw == BASIC_AUTH_PASS + + +def _auth_challenge() -> HTMLResponse: + return HTMLResponse("Authentication required", status_code=401, headers={"WWW-Authenticate": 'Basic realm="media-webgui"'}) + + +@app.middleware("http") +async def basic_auth_middleware(request: Request, call_next): + if not _check_basic_auth(request): + return _auth_challenge() + return await call_next(request) + + +@dataclass +class Job: + id: str + url: str engine: str library: str proxy: str + headers: List[str] status: str message: str + jobs: Dict[str, Job] = {} lock = threading.Lock() _rr_idx = 0 + def parse_proxy_list(raw: str) -> List[str]: out = [] for line in (raw or "").splitlines(): @@ -95,6 +118,7 @@ def parse_proxy_list(raw: str) -> List[str]: dedup.append(x) return dedup + def pick_proxy(forced_proxy: str = "") -> str: global _rr_idx if forced_proxy: @@ -107,6 +131,7 @@ def pick_proxy(forced_proxy: str = "") -> str: _rr_idx += 1 return p + def format_proxy_lines(raw: str, scheme: str) -> str: scheme = scheme.strip().lower() if scheme not in {"socks5", "socks4", "http", "https"}: @@ -125,16 +150,20 @@ def format_proxy_lines(raw: str, scheme: str) -> str: if not host or not port.isdigit(): continue out.append(f"{scheme}://{host}:{port}") - seen=set(); ded=[] + seen = set() + ded = [] for x in out: if x not in seen: - seen.add(x); ded.append(x) + seen.add(x) + ded.append(x) return "\n".join(ded) + def fetch_proxy_source(url: str) -> str: with urlopen(url, timeout=20) as resp: return resp.read().decode("utf-8", "replace") + def load_proxy_sources() -> List[str]: chunks = [] for scheme, url in PROXY_SOURCES.items(): @@ -149,8 +178,27 @@ def load_proxy_sources() -> List[str]: combined = "\n".join(chunks) return parse_proxy_list(combined) + PROXIES = parse_proxy_list("\n".join([PROXY_LIST_RAW, "\n".join(load_proxy_sources())])) + +def parse_header_lines(raw: str) -> List[str]: + headers = [] + for line in (raw or "").splitlines(): + s = line.strip() + if not s or s.startswith("#"): + continue + if ":" not in s: + raise ValueError(f"Invalid header line: {s}") + name, value = s.split(":", 1) + name = name.strip() + value = value.strip() + if not name or not value: + raise ValueError(f"Invalid header line: {s}") + headers.append(f"{name}: {value}") + return headers + + def pick_engine(url: str, forced: str) -> str: forced = (forced or "").strip().lower() if forced and forced != "auto": @@ -162,17 +210,370 @@ def pick_engine(url: str, forced: str) -> str: return "direct" return "direct" + def run_ytdlp(url: str, out_dir: str, fmt: str, proxy: str): cmd = ["yt-dlp", "-f", fmt, "-o", f"{out_dir}/%(title)s.%(ext)s", url] if proxy: cmd += ["--proxy", proxy] subprocess.check_call(cmd) -def run_aria2(url: str, out_dir: str, proxy: str): + +def run_aria2(url: str, out_dir: str, proxy: str, headers: List[str] | None = None): cmd = ["aria2c", "--dir", out_dir, "--allow-overwrite=true", "--auto-file-renaming=false", url] if proxy: cmd += ["--all-proxy", proxy] + for header in headers or []: + cmd += ["--header", header] subprocess.check_call(cmd) + def md5_file(path: str) -> str: h = hashlib.md5() + with open(path, "rb") as f: + for chunk in iter(lambda: f.read(1024 * 1024), b""): + h.update(chunk) + return h.hexdigest() + + +def write_md5_sidecar(local_file: str, md5_hex: str) -> str: + os.makedirs(MD5_DIR, exist_ok=True) + base = os.path.basename(local_file) + md5p = os.path.join(MD5_DIR, base + ".md5") + with open(md5p, "w", encoding="utf-8") as f: + f.write(f"{md5_hex} {base}\n") + return md5p + + +def ssh_connect() -> paramiko.SSHClient: + if not JELLYFIN_USER: + raise RuntimeError("JELLYFIN_USER missing") + ssh = paramiko.SSHClient() + ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + ssh.connect( + hostname=JELLYFIN_HOST, + port=JELLYFIN_PORT, + username=JELLYFIN_USER, + key_filename=JELLYFIN_SSH_KEY, + timeout=30, + ) + return ssh + + +def sftp_mkdirs(sftp: paramiko.SFTPClient, remote_dir: str): + parts = [p for p in remote_dir.split("/") if p] + cur = "" + for p in parts: + cur += "/" + p + try: + sftp.stat(cur) + except IOError: + sftp.mkdir(cur) + + +def sftp_upload(ssh: paramiko.SSHClient, local_path: str, remote_path: str): + sftp = ssh.open_sftp() + try: + sftp_mkdirs(sftp, os.path.dirname(remote_path)) + sftp.put(local_path, remote_path) + except Exception as e: + raise RuntimeError(f"SFTP upload failed: local={local_path} remote={remote_path} error={e}") + finally: + sftp.close() + + +def remote_md5sum(ssh: paramiko.SSHClient, remote_path: str) -> str: + cmd = f"md5sum {shlex.quote(remote_path)}" + _, stdout, stderr = ssh.exec_command(cmd, timeout=120) + out = stdout.read().decode("utf-8", "replace").strip() + err = stderr.read().decode("utf-8", "replace").strip() + if err and not out: + raise RuntimeError(f"Remote md5sum failed: {err}") + if not out: + raise RuntimeError("Remote md5sum returned empty output") + return out.split()[0] + + +def choose_target_dir(library: str, filename: str) -> str: + library = (library or "auto").lower() + if library == "series": + return JELLYFIN_SERIES_DIR + if library == "movies": + return JELLYFIN_MOVIES_DIR + if SERIES_RE.search(filename): + return JELLYFIN_SERIES_DIR + return JELLYFIN_MOVIES_DIR + + +def list_output_files(before: set) -> List[str]: + now = set() + for root, _, files in os.walk(OUTPUT_DIR): + for fn in files: + now.add(os.path.join(root, fn)) + new = [p for p in sorted(now) if p not in before] + final = [] + for p in new: + low = p.lower() + if low.endswith((".part", ".tmp", ".crdownload")): + continue + final.append(p) + return final + + +def worker(jobid: str): + try: + with lock: + job = jobs[jobid] + + os.makedirs(OUTPUT_DIR, exist_ok=True) + + before = set() + for root, _, files in os.walk(OUTPUT_DIR): + for fn in files: + before.add(os.path.join(root, fn)) + + engine = pick_engine(job.url, job.engine) + proxy = job.proxy + headers = job.headers + + with lock: + header_note = f"Headers={len(headers)}" if headers else "Headers=none" + job.status = "downloading" + job.message = f"Engine={engine} Proxy={'none' if not proxy else proxy} {header_note}" + + if engine == "ytdlp": + run_ytdlp(job.url, OUTPUT_DIR, YTDLP_FORMAT, proxy) + elif engine == "hoster": + run_aria2(job.url, OUTPUT_DIR, proxy, headers=headers) + else: + run_aria2(job.url, OUTPUT_DIR, proxy) + + new_files = list_output_files(before) + if not new_files: + raise RuntimeError("No output file detected in /output") + + ssh = ssh_connect() + try: + for f in new_files: + if not os.path.isfile(f): + continue + + md5_hex = md5_file(f) + md5_path = write_md5_sidecar(f, md5_hex) + + target_dir = choose_target_dir(job.library, os.path.basename(f)) + remote_file = f"{target_dir}/{os.path.basename(f)}" + remote_md5f = remote_file + ".md5" + + with lock: + job.status = "upload" + job.message = f"Uploading: {os.path.basename(f)} -> {remote_file}" + + sftp_upload(ssh, f, remote_file) + sftp_upload(ssh, md5_path, remote_md5f) + + remote_md5 = remote_md5sum(ssh, remote_file) + if remote_md5.lower() != md5_hex.lower(): + raise RuntimeError(f"MD5 mismatch: local={md5_hex} remote={remote_md5}") + + try: + os.remove(f) + except Exception: + pass + try: + os.remove(md5_path) + except Exception: + pass + + finally: + ssh.close() + + with lock: + job.status = "finished" + job.message = f"OK ({len(new_files)} file(s))" + + except Exception as e: + with lock: + jobs[jobid].status = "failed" + jobs[jobid].message = str(e) + + +def render_nav(active: str) -> str: + def link(label: str, href: str, key: str) -> str: + style = "font-weight:700;" if active == key else "" + return f"{label}" + + return "
" + link("Downloads", "/", "downloads") + link("Proxies", "/proxies", "proxies") + "
" + + +def render_downloads(error: str = "") -> str: + rows = "" + with lock: + job_list = list(jobs.values())[::-1] + + for j in job_list: + rows += ( + f"" + f"{j.id}" + f"{j.url}" + f"{j.engine}" + f"{j.library}" + f"{'none' if not j.proxy else j.proxy}" + f"{j.status}
{j.message}" + f"" + ) + + err_html = f"

{error}

" if error else "" + proxy_note = f"{len(PROXIES)} configured, mode={PROXY_MODE}" if PROXIES else "none configured" + return f""" + + + + Media WebGUI + + +

Media WebGUI

+ {render_nav("downloads")} + {err_html} + +
+

+ +
+ +

+ +
+ +

+ +
+ +

+ +
+ +

+ +
Nur für Engine hoster. Ein Header pro Zeile.
+
+ + +
+ +

+ Output: {OUTPUT_DIR} | MD5: {MD5_DIR} | Proxies: {proxy_note} +

+ + + + + {rows if rows else ""} + +
JobIDURLEngineLibraryProxyStatus
No jobs yet.
+ + """ + + +@app.get("/", response_class=HTMLResponse) +def index(): + return HTMLResponse(render_downloads()) + + +@app.post("/submit") +def submit( + url: str = Form(...), + engine: str = Form("auto"), + library: str = Form("auto"), + proxy: str = Form(""), + headers: str = Form(""), +): + url = url.strip() + if not URL_RE.match(url): + return HTMLResponse(render_downloads("Only http(s) URLs supported"), status_code=400) + + engine = (engine or ENGINE_DEFAULT).strip().lower() + library = (library or "auto").strip().lower() + + try: + header_lines = parse_header_lines(headers) + except ValueError as exc: + return HTMLResponse(render_downloads(str(exc)), status_code=400) + + chosen_proxy = pick_proxy(proxy.strip()) + jobid = str(int(time.time() * 1000)) + + with lock: + jobs[jobid] = Job( + id=jobid, + url=url, + engine=engine, + library=library, + proxy=chosen_proxy, + headers=header_lines, + status="queued", + message="queued", + ) + + t = threading.Thread(target=worker, args=(jobid,), daemon=True) + t.start() + return RedirectResponse(url="/", status_code=303) + + +def render_proxies_page(error: str = "", s5: str = "", s4: str = "", hp: str = "", out_text: str = "") -> str: + err_html = f"

{error}

" if error else "" + return f""" + + + + Proxies + + +

Media WebGUI

+ {render_nav("proxies")} + {err_html} + +
+

+ +
+

+ +
+

+ +
+ +
+ +

Import-Liste (zum Kopieren)

+

Keine Prüfung/Validierung. In .env in PROXY_LIST einfügen (eine Zeile pro Proxy).

+
+ + + """ + + +@app.get("/proxies", response_class=HTMLResponse) +def proxies_get(): + return HTMLResponse(render_proxies_page()) + + +@app.post("/proxies", response_class=HTMLResponse) +def proxies_post(s5: str = Form(""), s4: str = Form(""), hp: str = Form("")): + try: + o1 = format_proxy_lines(s5, "socks5") + o2 = format_proxy_lines(s4, "socks4") + o3 = format_proxy_lines(hp, "http") + combined = "\n".join([x for x in [o1, o2, o3] if x.strip()]) + return HTMLResponse(render_proxies_page(s5=s5, s4=s4, hp=hp, out_text=combined)) + except Exception as e: + return HTMLResponse(render_proxies_page(error=str(e), s5=s5, s4=s4, hp=hp, out_text=""), status_code=400) From 7755dc8b82e6c4b677971f42346833e2ab23c653 Mon Sep 17 00:00:00 2001 From: DasPoschi Date: Sun, 4 Jan 2026 12:46:06 +0100 Subject: [PATCH 2/4] Prefix container name with jf-dl --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 4c50356..979f1d7 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -3,7 +3,7 @@ version: "3.8" services: media-webgui: build: ./media-webgui - container_name: media-webgui + container_name: jf-dl-media-webgui restart: unless-stopped ports: - "${WEBGUI_PORT:-8080}:8080" From 5c9c35d10d28882bb8d27bae7fec9993d531ac42 Mon Sep 17 00:00:00 2001 From: DasPoschi Date: Sun, 4 Jan 2026 12:49:08 +0100 Subject: [PATCH 3/4] Change web UI port to 8081 --- .env | 4 +- README.md | 7 +- docker-compose.yml | 4 +- media-webgui/app.py | 413 +++++++++++++++++++++++++++++++++++++++++++- 4 files changed, 417 insertions(+), 11 deletions(-) diff --git a/.env b/.env index a7cc20c..06f7ba8 100644 --- a/.env +++ b/.env @@ -1,5 +1,5 @@ TZ=Europe/Berlin -WEBGUI_PORT=8080 +WEBGUI_PORT=8081 # Optional Basic Auth (leave empty to disable) BASIC_AUTH_USER=admin @@ -20,7 +20,7 @@ JELLYFIN_MOVIES_DIR=/jellyfin/Filme JELLYFIN_SERIES_DIR=/jellyfin/Serien # Engines -ENGINE_DEFAULT=auto # auto|ytdlp|direct +ENGINE_DEFAULT=auto # auto|ytdlp|direct|hoster YTDLP_FORMAT=bestvideo+bestaudio/best # Proxy pool (ONLY used for downloads; not for SFTP / webgui internal calls) diff --git a/README.md b/README.md index 4d4e83c..20bcaf8 100644 --- a/README.md +++ b/README.md @@ -3,6 +3,7 @@ Dieses Projekt lädt Links über: - **yt-dlp** (YouTube & unterstützte Video-Plattformen) - **aria2c** (direkte HTTP/HTTPS-Links, z.B. .mkv/.mp4) +- **hoster** (aria2c mit optionalen HTTP-Headern, z.B. Cookies) Danach: - erzeugt es eine **MD5** und speichert sie als Sidecar @@ -17,8 +18,12 @@ Danach: 1) `.env.example` -> `.env` kopieren und Werte setzen. 2) SSH Key ablegen: `data/ssh/id_ed25519` (chmod 600) 3) `docker compose up -d --build` -4) WebUI: `http://:8080` +4) WebUI: `http://:8081` ## Proxies - Proxies werden **nur** an yt-dlp/aria2 übergeben (pro Job), beeinflussen also nicht SFTP/Jellyfin. - `PROXY_LIST` enthält eine Zeile pro Proxy: `socks5://IP:PORT`, `http://IP:PORT`, ... + +## Hoster-Engine +- Engine `hoster` nutzt **aria2c** und akzeptiert zusätzliche HTTP-Header (z.B. `Cookie:` oder `User-Agent:`) im Formular. +- Ein Header pro Zeile. Leere Zeilen/Kommentare werden ignoriert. diff --git a/docker-compose.yml b/docker-compose.yml index 4c50356..f8b93cf 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -3,10 +3,10 @@ version: "3.8" services: media-webgui: build: ./media-webgui - container_name: media-webgui + container_name: jf-dl-media-webgui restart: unless-stopped ports: - - "${WEBGUI_PORT:-8080}:8080" + - "${WEBGUI_PORT:-8081}:8080" env_file: - .env volumes: diff --git a/media-webgui/app.py b/media-webgui/app.py index b324d7c..3f79053 100644 --- a/media-webgui/app.py +++ b/media-webgui/app.py @@ -1,4 +1,3 @@ -+26-2 #!/usr/bin/env python3 from __future__ import annotations @@ -48,15 +47,17 @@ PROXY_SOURCES = { URL_RE = re.compile(r"^https?://", re.I) YOUTUBE_RE = re.compile(r"(youtube\.com|youtu\.be)", re.I) -VIDEO_EXTS = (".mkv",".mp4",".m4v",".avi",".mov",".wmv",".flv",".webm",".ts",".m2ts",".mpg",".mpeg",".vob",".ogv",".3gp",".3g2") +VIDEO_EXTS = (".mkv", ".mp4", ".m4v", ".avi", ".mov", ".wmv", ".flv", ".webm", ".ts", ".m2ts", ".mpg", ".mpeg", ".vob", ".ogv", ".3gp", ".3g2") SERIES_RE = re.compile(r"(?:^|[^a-z0-9])S(\d{1,2})E(\d{1,2})(?:[^a-z0-9]|$)", re.IGNORECASE) app = FastAPI() app.mount("/static", StaticFiles(directory="static"), name="static") + def _auth_enabled() -> bool: return bool(BASIC_AUTH_USER and BASIC_AUTH_PASS) + def _check_basic_auth(req: Request) -> bool: if not _auth_enabled(): return True @@ -69,17 +70,39 @@ def _check_basic_auth(req: Request) -> bool: except Exception: return False if ":" not in raw: -@@ -82,88 +88,106 @@ class Job: + return False + user, pw = raw.split(":", 1) + return user == BASIC_AUTH_USER and pw == BASIC_AUTH_PASS + + +def _auth_challenge() -> HTMLResponse: + return HTMLResponse("Authentication required", status_code=401, headers={"WWW-Authenticate": 'Basic realm="media-webgui"'}) + + +@app.middleware("http") +async def basic_auth_middleware(request: Request, call_next): + if not _check_basic_auth(request): + return _auth_challenge() + return await call_next(request) + + +@dataclass +class Job: + id: str + url: str engine: str library: str proxy: str + headers: List[str] status: str message: str + jobs: Dict[str, Job] = {} lock = threading.Lock() _rr_idx = 0 + def parse_proxy_list(raw: str) -> List[str]: out = [] for line in (raw or "").splitlines(): @@ -95,6 +118,7 @@ def parse_proxy_list(raw: str) -> List[str]: dedup.append(x) return dedup + def pick_proxy(forced_proxy: str = "") -> str: global _rr_idx if forced_proxy: @@ -107,6 +131,7 @@ def pick_proxy(forced_proxy: str = "") -> str: _rr_idx += 1 return p + def format_proxy_lines(raw: str, scheme: str) -> str: scheme = scheme.strip().lower() if scheme not in {"socks5", "socks4", "http", "https"}: @@ -125,16 +150,20 @@ def format_proxy_lines(raw: str, scheme: str) -> str: if not host or not port.isdigit(): continue out.append(f"{scheme}://{host}:{port}") - seen=set(); ded=[] + seen = set() + ded = [] for x in out: if x not in seen: - seen.add(x); ded.append(x) + seen.add(x) + ded.append(x) return "\n".join(ded) + def fetch_proxy_source(url: str) -> str: with urlopen(url, timeout=20) as resp: return resp.read().decode("utf-8", "replace") + def load_proxy_sources() -> List[str]: chunks = [] for scheme, url in PROXY_SOURCES.items(): @@ -149,8 +178,27 @@ def load_proxy_sources() -> List[str]: combined = "\n".join(chunks) return parse_proxy_list(combined) + PROXIES = parse_proxy_list("\n".join([PROXY_LIST_RAW, "\n".join(load_proxy_sources())])) + +def parse_header_lines(raw: str) -> List[str]: + headers = [] + for line in (raw or "").splitlines(): + s = line.strip() + if not s or s.startswith("#"): + continue + if ":" not in s: + raise ValueError(f"Invalid header line: {s}") + name, value = s.split(":", 1) + name = name.strip() + value = value.strip() + if not name or not value: + raise ValueError(f"Invalid header line: {s}") + headers.append(f"{name}: {value}") + return headers + + def pick_engine(url: str, forced: str) -> str: forced = (forced or "").strip().lower() if forced and forced != "auto": @@ -162,17 +210,370 @@ def pick_engine(url: str, forced: str) -> str: return "direct" return "direct" + def run_ytdlp(url: str, out_dir: str, fmt: str, proxy: str): cmd = ["yt-dlp", "-f", fmt, "-o", f"{out_dir}/%(title)s.%(ext)s", url] if proxy: cmd += ["--proxy", proxy] subprocess.check_call(cmd) -def run_aria2(url: str, out_dir: str, proxy: str): + +def run_aria2(url: str, out_dir: str, proxy: str, headers: List[str] | None = None): cmd = ["aria2c", "--dir", out_dir, "--allow-overwrite=true", "--auto-file-renaming=false", url] if proxy: cmd += ["--all-proxy", proxy] + for header in headers or []: + cmd += ["--header", header] subprocess.check_call(cmd) + def md5_file(path: str) -> str: h = hashlib.md5() + with open(path, "rb") as f: + for chunk in iter(lambda: f.read(1024 * 1024), b""): + h.update(chunk) + return h.hexdigest() + + +def write_md5_sidecar(local_file: str, md5_hex: str) -> str: + os.makedirs(MD5_DIR, exist_ok=True) + base = os.path.basename(local_file) + md5p = os.path.join(MD5_DIR, base + ".md5") + with open(md5p, "w", encoding="utf-8") as f: + f.write(f"{md5_hex} {base}\n") + return md5p + + +def ssh_connect() -> paramiko.SSHClient: + if not JELLYFIN_USER: + raise RuntimeError("JELLYFIN_USER missing") + ssh = paramiko.SSHClient() + ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + ssh.connect( + hostname=JELLYFIN_HOST, + port=JELLYFIN_PORT, + username=JELLYFIN_USER, + key_filename=JELLYFIN_SSH_KEY, + timeout=30, + ) + return ssh + + +def sftp_mkdirs(sftp: paramiko.SFTPClient, remote_dir: str): + parts = [p for p in remote_dir.split("/") if p] + cur = "" + for p in parts: + cur += "/" + p + try: + sftp.stat(cur) + except IOError: + sftp.mkdir(cur) + + +def sftp_upload(ssh: paramiko.SSHClient, local_path: str, remote_path: str): + sftp = ssh.open_sftp() + try: + sftp_mkdirs(sftp, os.path.dirname(remote_path)) + sftp.put(local_path, remote_path) + except Exception as e: + raise RuntimeError(f"SFTP upload failed: local={local_path} remote={remote_path} error={e}") + finally: + sftp.close() + + +def remote_md5sum(ssh: paramiko.SSHClient, remote_path: str) -> str: + cmd = f"md5sum {shlex.quote(remote_path)}" + _, stdout, stderr = ssh.exec_command(cmd, timeout=120) + out = stdout.read().decode("utf-8", "replace").strip() + err = stderr.read().decode("utf-8", "replace").strip() + if err and not out: + raise RuntimeError(f"Remote md5sum failed: {err}") + if not out: + raise RuntimeError("Remote md5sum returned empty output") + return out.split()[0] + + +def choose_target_dir(library: str, filename: str) -> str: + library = (library or "auto").lower() + if library == "series": + return JELLYFIN_SERIES_DIR + if library == "movies": + return JELLYFIN_MOVIES_DIR + if SERIES_RE.search(filename): + return JELLYFIN_SERIES_DIR + return JELLYFIN_MOVIES_DIR + + +def list_output_files(before: set) -> List[str]: + now = set() + for root, _, files in os.walk(OUTPUT_DIR): + for fn in files: + now.add(os.path.join(root, fn)) + new = [p for p in sorted(now) if p not in before] + final = [] + for p in new: + low = p.lower() + if low.endswith((".part", ".tmp", ".crdownload")): + continue + final.append(p) + return final + + +def worker(jobid: str): + try: + with lock: + job = jobs[jobid] + + os.makedirs(OUTPUT_DIR, exist_ok=True) + + before = set() + for root, _, files in os.walk(OUTPUT_DIR): + for fn in files: + before.add(os.path.join(root, fn)) + + engine = pick_engine(job.url, job.engine) + proxy = job.proxy + headers = job.headers + + with lock: + header_note = f"Headers={len(headers)}" if headers else "Headers=none" + job.status = "downloading" + job.message = f"Engine={engine} Proxy={'none' if not proxy else proxy} {header_note}" + + if engine == "ytdlp": + run_ytdlp(job.url, OUTPUT_DIR, YTDLP_FORMAT, proxy) + elif engine == "hoster": + run_aria2(job.url, OUTPUT_DIR, proxy, headers=headers) + else: + run_aria2(job.url, OUTPUT_DIR, proxy) + + new_files = list_output_files(before) + if not new_files: + raise RuntimeError("No output file detected in /output") + + ssh = ssh_connect() + try: + for f in new_files: + if not os.path.isfile(f): + continue + + md5_hex = md5_file(f) + md5_path = write_md5_sidecar(f, md5_hex) + + target_dir = choose_target_dir(job.library, os.path.basename(f)) + remote_file = f"{target_dir}/{os.path.basename(f)}" + remote_md5f = remote_file + ".md5" + + with lock: + job.status = "upload" + job.message = f"Uploading: {os.path.basename(f)} -> {remote_file}" + + sftp_upload(ssh, f, remote_file) + sftp_upload(ssh, md5_path, remote_md5f) + + remote_md5 = remote_md5sum(ssh, remote_file) + if remote_md5.lower() != md5_hex.lower(): + raise RuntimeError(f"MD5 mismatch: local={md5_hex} remote={remote_md5}") + + try: + os.remove(f) + except Exception: + pass + try: + os.remove(md5_path) + except Exception: + pass + + finally: + ssh.close() + + with lock: + job.status = "finished" + job.message = f"OK ({len(new_files)} file(s))" + + except Exception as e: + with lock: + jobs[jobid].status = "failed" + jobs[jobid].message = str(e) + + +def render_nav(active: str) -> str: + def link(label: str, href: str, key: str) -> str: + style = "font-weight:700;" if active == key else "" + return f"{label}" + + return "
" + link("Downloads", "/", "downloads") + link("Proxies", "/proxies", "proxies") + "
" + + +def render_downloads(error: str = "") -> str: + rows = "" + with lock: + job_list = list(jobs.values())[::-1] + + for j in job_list: + rows += ( + f"" + f"{j.id}" + f"{j.url}" + f"{j.engine}" + f"{j.library}" + f"{'none' if not j.proxy else j.proxy}" + f"{j.status}
{j.message}" + f"" + ) + + err_html = f"

{error}

" if error else "" + proxy_note = f"{len(PROXIES)} configured, mode={PROXY_MODE}" if PROXIES else "none configured" + return f""" + + + + Media WebGUI + + +

Media WebGUI

+ {render_nav("downloads")} + {err_html} + +
+

+ +
+ +

+ +
+ +

+ +
+ +

+ +
+ +

+ +
Nur für Engine hoster. Ein Header pro Zeile.
+
+ + +
+ +

+ Output: {OUTPUT_DIR} | MD5: {MD5_DIR} | Proxies: {proxy_note} +

+ + + + + {rows if rows else ""} + +
JobIDURLEngineLibraryProxyStatus
No jobs yet.
+ + """ + + +@app.get("/", response_class=HTMLResponse) +def index(): + return HTMLResponse(render_downloads()) + + +@app.post("/submit") +def submit( + url: str = Form(...), + engine: str = Form("auto"), + library: str = Form("auto"), + proxy: str = Form(""), + headers: str = Form(""), +): + url = url.strip() + if not URL_RE.match(url): + return HTMLResponse(render_downloads("Only http(s) URLs supported"), status_code=400) + + engine = (engine or ENGINE_DEFAULT).strip().lower() + library = (library or "auto").strip().lower() + + try: + header_lines = parse_header_lines(headers) + except ValueError as exc: + return HTMLResponse(render_downloads(str(exc)), status_code=400) + + chosen_proxy = pick_proxy(proxy.strip()) + jobid = str(int(time.time() * 1000)) + + with lock: + jobs[jobid] = Job( + id=jobid, + url=url, + engine=engine, + library=library, + proxy=chosen_proxy, + headers=header_lines, + status="queued", + message="queued", + ) + + t = threading.Thread(target=worker, args=(jobid,), daemon=True) + t.start() + return RedirectResponse(url="/", status_code=303) + + +def render_proxies_page(error: str = "", s5: str = "", s4: str = "", hp: str = "", out_text: str = "") -> str: + err_html = f"

{error}

" if error else "" + return f""" + + + + Proxies + + +

Media WebGUI

+ {render_nav("proxies")} + {err_html} + +
+

+ +
+

+ +
+

+ +
+ +
+ +

Import-Liste (zum Kopieren)

+

Keine Prüfung/Validierung. In .env in PROXY_LIST einfügen (eine Zeile pro Proxy).

+
+ + + """ + + +@app.get("/proxies", response_class=HTMLResponse) +def proxies_get(): + return HTMLResponse(render_proxies_page()) + + +@app.post("/proxies", response_class=HTMLResponse) +def proxies_post(s5: str = Form(""), s4: str = Form(""), hp: str = Form("")): + try: + o1 = format_proxy_lines(s5, "socks5") + o2 = format_proxy_lines(s4, "socks4") + o3 = format_proxy_lines(hp, "http") + combined = "\n".join([x for x in [o1, o2, o3] if x.strip()]) + return HTMLResponse(render_proxies_page(s5=s5, s4=s4, hp=hp, out_text=combined)) + except Exception as e: + return HTMLResponse(render_proxies_page(error=str(e), s5=s5, s4=s4, hp=hp, out_text=""), status_code=400) From 44fbe49c587310d5afdb434bb4d4b666b1bf4261 Mon Sep 17 00:00:00 2001 From: DasPoschi Date: Sun, 4 Jan 2026 13:02:26 +0100 Subject: [PATCH 4/4] Skip non-functional proxies when selecting --- .env | 4 +- README.md | 8 +- docker-compose.yml | 4 +- media-webgui/app.py | 478 ++++++++++++++++++++++++++++++++++++++++++-- 4 files changed, 476 insertions(+), 18 deletions(-) diff --git a/.env b/.env index a7cc20c..06f7ba8 100644 --- a/.env +++ b/.env @@ -1,5 +1,5 @@ TZ=Europe/Berlin -WEBGUI_PORT=8080 +WEBGUI_PORT=8081 # Optional Basic Auth (leave empty to disable) BASIC_AUTH_USER=admin @@ -20,7 +20,7 @@ JELLYFIN_MOVIES_DIR=/jellyfin/Filme JELLYFIN_SERIES_DIR=/jellyfin/Serien # Engines -ENGINE_DEFAULT=auto # auto|ytdlp|direct +ENGINE_DEFAULT=auto # auto|ytdlp|direct|hoster YTDLP_FORMAT=bestvideo+bestaudio/best # Proxy pool (ONLY used for downloads; not for SFTP / webgui internal calls) diff --git a/README.md b/README.md index 4d4e83c..e85bcbf 100644 --- a/README.md +++ b/README.md @@ -3,6 +3,7 @@ Dieses Projekt lädt Links über: - **yt-dlp** (YouTube & unterstützte Video-Plattformen) - **aria2c** (direkte HTTP/HTTPS-Links, z.B. .mkv/.mp4) +- **hoster** (aria2c mit optionalen HTTP-Headern, z.B. Cookies) Danach: - erzeugt es eine **MD5** und speichert sie als Sidecar @@ -17,8 +18,13 @@ Danach: 1) `.env.example` -> `.env` kopieren und Werte setzen. 2) SSH Key ablegen: `data/ssh/id_ed25519` (chmod 600) 3) `docker compose up -d --build` -4) WebUI: `http://:8080` +4) WebUI: `http://:8081` ## Proxies - Proxies werden **nur** an yt-dlp/aria2 übergeben (pro Job), beeinflussen also nicht SFTP/Jellyfin. - `PROXY_LIST` enthält eine Zeile pro Proxy: `socks5://IP:PORT`, `http://IP:PORT`, ... +- Die Proxy-Listen werden 2× täglich aus den TheSpeedX-Quellen geladen und ins richtige Format gebracht. + +## Hoster-Engine +- Engine `hoster` nutzt **aria2c** und akzeptiert zusätzliche HTTP-Header (z.B. `Cookie:` oder `User-Agent:`) im Formular. +- Ein Header pro Zeile. Leere Zeilen/Kommentare werden ignoriert. diff --git a/docker-compose.yml b/docker-compose.yml index 4c50356..f8b93cf 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -3,10 +3,10 @@ version: "3.8" services: media-webgui: build: ./media-webgui - container_name: media-webgui + container_name: jf-dl-media-webgui restart: unless-stopped ports: - - "${WEBGUI_PORT:-8080}:8080" + - "${WEBGUI_PORT:-8081}:8080" env_file: - .env volumes: diff --git a/media-webgui/app.py b/media-webgui/app.py index b324d7c..f00fb6f 100644 --- a/media-webgui/app.py +++ b/media-webgui/app.py @@ -1,4 +1,3 @@ -+26-2 #!/usr/bin/env python3 from __future__ import annotations @@ -8,12 +7,14 @@ import os import random import re import shlex +import socket import subprocess import threading import time from dataclasses import dataclass from typing import Dict, List from urllib.request import urlopen +from urllib.parse import urlparse import paramiko from fastapi import FastAPI, Form, Request @@ -44,19 +45,22 @@ PROXY_SOURCES = { "socks4": "https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/socks4.txt", "http": "https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/http.txt", } +PROXY_CHECK_TIMEOUT = float(os.environ.get("PROXY_CHECK_TIMEOUT", "3.0")) URL_RE = re.compile(r"^https?://", re.I) YOUTUBE_RE = re.compile(r"(youtube\.com|youtu\.be)", re.I) -VIDEO_EXTS = (".mkv",".mp4",".m4v",".avi",".mov",".wmv",".flv",".webm",".ts",".m2ts",".mpg",".mpeg",".vob",".ogv",".3gp",".3g2") +VIDEO_EXTS = (".mkv", ".mp4", ".m4v", ".avi", ".mov", ".wmv", ".flv", ".webm", ".ts", ".m2ts", ".mpg", ".mpeg", ".vob", ".ogv", ".3gp", ".3g2") SERIES_RE = re.compile(r"(?:^|[^a-z0-9])S(\d{1,2})E(\d{1,2})(?:[^a-z0-9]|$)", re.IGNORECASE) app = FastAPI() app.mount("/static", StaticFiles(directory="static"), name="static") + def _auth_enabled() -> bool: return bool(BASIC_AUTH_USER and BASIC_AUTH_PASS) + def _check_basic_auth(req: Request) -> bool: if not _auth_enabled(): return True @@ -69,17 +73,39 @@ def _check_basic_auth(req: Request) -> bool: except Exception: return False if ":" not in raw: -@@ -82,88 +88,106 @@ class Job: + return False + user, pw = raw.split(":", 1) + return user == BASIC_AUTH_USER and pw == BASIC_AUTH_PASS + + +def _auth_challenge() -> HTMLResponse: + return HTMLResponse("Authentication required", status_code=401, headers={"WWW-Authenticate": 'Basic realm="media-webgui"'}) + + +@app.middleware("http") +async def basic_auth_middleware(request: Request, call_next): + if not _check_basic_auth(request): + return _auth_challenge() + return await call_next(request) + + +@dataclass +class Job: + id: str + url: str engine: str library: str proxy: str + headers: List[str] status: str message: str + jobs: Dict[str, Job] = {} lock = threading.Lock() _rr_idx = 0 + def parse_proxy_list(raw: str) -> List[str]: out = [] for line in (raw or "").splitlines(): @@ -95,17 +121,46 @@ def parse_proxy_list(raw: str) -> List[str]: dedup.append(x) return dedup + def pick_proxy(forced_proxy: str = "") -> str: global _rr_idx if forced_proxy: - return forced_proxy.strip() - if PROXY_MODE == "off" or not PROXIES: + return forced_proxy.strip() if proxy_is_usable(forced_proxy.strip()) else "" + with lock: + proxies = list(PROXIES) + if PROXY_MODE == "off" or not proxies: return "" if PROXY_MODE == "random": - return random.choice(PROXIES) - p = PROXIES[_rr_idx % len(PROXIES)] - _rr_idx += 1 - return p + random.shuffle(proxies) + for candidate in proxies: + if proxy_is_usable(candidate): + return candidate + return "" + start_idx = _rr_idx % len(proxies) + for offset in range(len(proxies)): + idx = (start_idx + offset) % len(proxies) + candidate = proxies[idx] + if proxy_is_usable(candidate): + _rr_idx = idx + 1 + return candidate + return "" + + +def proxy_is_usable(proxy: str) -> bool: + proxy = proxy.strip() + if not proxy: + return False + parsed = urlparse(proxy if "://" in proxy else f"http://{proxy}") + host = parsed.hostname + port = parsed.port + if not host or not port: + return False + try: + with socket.create_connection((host, port), timeout=PROXY_CHECK_TIMEOUT): + return True + except OSError: + return False + def format_proxy_lines(raw: str, scheme: str) -> str: scheme = scheme.strip().lower() @@ -125,16 +180,20 @@ def format_proxy_lines(raw: str, scheme: str) -> str: if not host or not port.isdigit(): continue out.append(f"{scheme}://{host}:{port}") - seen=set(); ded=[] + seen = set() + ded = [] for x in out: if x not in seen: - seen.add(x); ded.append(x) + seen.add(x) + ded.append(x) return "\n".join(ded) + def fetch_proxy_source(url: str) -> str: with urlopen(url, timeout=20) as resp: return resp.read().decode("utf-8", "replace") + def load_proxy_sources() -> List[str]: chunks = [] for scheme, url in PROXY_SOURCES.items(): @@ -149,7 +208,47 @@ def load_proxy_sources() -> List[str]: combined = "\n".join(chunks) return parse_proxy_list(combined) -PROXIES = parse_proxy_list("\n".join([PROXY_LIST_RAW, "\n".join(load_proxy_sources())])) + +PROXIES: List[str] = [] + + +def refresh_proxies() -> None: + global PROXIES + combined = "\n".join([PROXY_LIST_RAW, "\n".join(load_proxy_sources())]) + updated = parse_proxy_list(combined) + with lock: + PROXIES = updated + + +def proxy_refresh_loop(interval_seconds: int = 12 * 60 * 60) -> None: + while True: + try: + refresh_proxies() + except Exception as exc: + print(f"Proxy refresh failed: {exc}") + time.sleep(interval_seconds) + + +refresh_proxies() +threading.Thread(target=proxy_refresh_loop, daemon=True).start() + + +def parse_header_lines(raw: str) -> List[str]: + headers = [] + for line in (raw or "").splitlines(): + s = line.strip() + if not s or s.startswith("#"): + continue + if ":" not in s: + raise ValueError(f"Invalid header line: {s}") + name, value = s.split(":", 1) + name = name.strip() + value = value.strip() + if not name or not value: + raise ValueError(f"Invalid header line: {s}") + headers.append(f"{name}: {value}") + return headers + def pick_engine(url: str, forced: str) -> str: forced = (forced or "").strip().lower() @@ -162,17 +261,370 @@ def pick_engine(url: str, forced: str) -> str: return "direct" return "direct" + def run_ytdlp(url: str, out_dir: str, fmt: str, proxy: str): cmd = ["yt-dlp", "-f", fmt, "-o", f"{out_dir}/%(title)s.%(ext)s", url] if proxy: cmd += ["--proxy", proxy] subprocess.check_call(cmd) -def run_aria2(url: str, out_dir: str, proxy: str): + +def run_aria2(url: str, out_dir: str, proxy: str, headers: List[str] | None = None): cmd = ["aria2c", "--dir", out_dir, "--allow-overwrite=true", "--auto-file-renaming=false", url] if proxy: cmd += ["--all-proxy", proxy] + for header in headers or []: + cmd += ["--header", header] subprocess.check_call(cmd) + def md5_file(path: str) -> str: h = hashlib.md5() + with open(path, "rb") as f: + for chunk in iter(lambda: f.read(1024 * 1024), b""): + h.update(chunk) + return h.hexdigest() + + +def write_md5_sidecar(local_file: str, md5_hex: str) -> str: + os.makedirs(MD5_DIR, exist_ok=True) + base = os.path.basename(local_file) + md5p = os.path.join(MD5_DIR, base + ".md5") + with open(md5p, "w", encoding="utf-8") as f: + f.write(f"{md5_hex} {base}\n") + return md5p + + +def ssh_connect() -> paramiko.SSHClient: + if not JELLYFIN_USER: + raise RuntimeError("JELLYFIN_USER missing") + ssh = paramiko.SSHClient() + ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + ssh.connect( + hostname=JELLYFIN_HOST, + port=JELLYFIN_PORT, + username=JELLYFIN_USER, + key_filename=JELLYFIN_SSH_KEY, + timeout=30, + ) + return ssh + + +def sftp_mkdirs(sftp: paramiko.SFTPClient, remote_dir: str): + parts = [p for p in remote_dir.split("/") if p] + cur = "" + for p in parts: + cur += "/" + p + try: + sftp.stat(cur) + except IOError: + sftp.mkdir(cur) + + +def sftp_upload(ssh: paramiko.SSHClient, local_path: str, remote_path: str): + sftp = ssh.open_sftp() + try: + sftp_mkdirs(sftp, os.path.dirname(remote_path)) + sftp.put(local_path, remote_path) + except Exception as e: + raise RuntimeError(f"SFTP upload failed: local={local_path} remote={remote_path} error={e}") + finally: + sftp.close() + + +def remote_md5sum(ssh: paramiko.SSHClient, remote_path: str) -> str: + cmd = f"md5sum {shlex.quote(remote_path)}" + _, stdout, stderr = ssh.exec_command(cmd, timeout=120) + out = stdout.read().decode("utf-8", "replace").strip() + err = stderr.read().decode("utf-8", "replace").strip() + if err and not out: + raise RuntimeError(f"Remote md5sum failed: {err}") + if not out: + raise RuntimeError("Remote md5sum returned empty output") + return out.split()[0] + + +def choose_target_dir(library: str, filename: str) -> str: + library = (library or "auto").lower() + if library == "series": + return JELLYFIN_SERIES_DIR + if library == "movies": + return JELLYFIN_MOVIES_DIR + if SERIES_RE.search(filename): + return JELLYFIN_SERIES_DIR + return JELLYFIN_MOVIES_DIR + + +def list_output_files(before: set) -> List[str]: + now = set() + for root, _, files in os.walk(OUTPUT_DIR): + for fn in files: + now.add(os.path.join(root, fn)) + new = [p for p in sorted(now) if p not in before] + final = [] + for p in new: + low = p.lower() + if low.endswith((".part", ".tmp", ".crdownload")): + continue + final.append(p) + return final + + +def worker(jobid: str): + try: + with lock: + job = jobs[jobid] + + os.makedirs(OUTPUT_DIR, exist_ok=True) + + before = set() + for root, _, files in os.walk(OUTPUT_DIR): + for fn in files: + before.add(os.path.join(root, fn)) + + engine = pick_engine(job.url, job.engine) + proxy = job.proxy + headers = job.headers + + with lock: + header_note = f"Headers={len(headers)}" if headers else "Headers=none" + job.status = "downloading" + job.message = f"Engine={engine} Proxy={'none' if not proxy else proxy} {header_note}" + + if engine == "ytdlp": + run_ytdlp(job.url, OUTPUT_DIR, YTDLP_FORMAT, proxy) + elif engine == "hoster": + run_aria2(job.url, OUTPUT_DIR, proxy, headers=headers) + else: + run_aria2(job.url, OUTPUT_DIR, proxy) + + new_files = list_output_files(before) + if not new_files: + raise RuntimeError("No output file detected in /output") + + ssh = ssh_connect() + try: + for f in new_files: + if not os.path.isfile(f): + continue + + md5_hex = md5_file(f) + md5_path = write_md5_sidecar(f, md5_hex) + + target_dir = choose_target_dir(job.library, os.path.basename(f)) + remote_file = f"{target_dir}/{os.path.basename(f)}" + remote_md5f = remote_file + ".md5" + + with lock: + job.status = "upload" + job.message = f"Uploading: {os.path.basename(f)} -> {remote_file}" + + sftp_upload(ssh, f, remote_file) + sftp_upload(ssh, md5_path, remote_md5f) + + remote_md5 = remote_md5sum(ssh, remote_file) + if remote_md5.lower() != md5_hex.lower(): + raise RuntimeError(f"MD5 mismatch: local={md5_hex} remote={remote_md5}") + + try: + os.remove(f) + except Exception: + pass + try: + os.remove(md5_path) + except Exception: + pass + + finally: + ssh.close() + + with lock: + job.status = "finished" + job.message = f"OK ({len(new_files)} file(s))" + + except Exception as e: + with lock: + jobs[jobid].status = "failed" + jobs[jobid].message = str(e) + + +def render_nav(active: str) -> str: + def link(label: str, href: str, key: str) -> str: + style = "font-weight:700;" if active == key else "" + return f"{label}" + + return "
" + link("Downloads", "/", "downloads") + link("Proxies", "/proxies", "proxies") + "
" + + +def render_downloads(error: str = "") -> str: + rows = "" + with lock: + job_list = list(jobs.values())[::-1] + + for j in job_list: + rows += ( + f"" + f"{j.id}" + f"{j.url}" + f"{j.engine}" + f"{j.library}" + f"{'none' if not j.proxy else j.proxy}" + f"{j.status}
{j.message}" + f"" + ) + + err_html = f"

{error}

" if error else "" + proxy_note = f"{len(PROXIES)} configured, mode={PROXY_MODE}" if PROXIES else "none configured" + return f""" + + + + Media WebGUI + + +

Media WebGUI

+ {render_nav("downloads")} + {err_html} + +
+

+ +
+ +

+ +
+ +

+ +
+ +

+ +
+ +

+ +
Nur für Engine hoster. Ein Header pro Zeile.
+
+ + +
+ +

+ Output: {OUTPUT_DIR} | MD5: {MD5_DIR} | Proxies: {proxy_note} +

+ + + + + {rows if rows else ""} + +
JobIDURLEngineLibraryProxyStatus
No jobs yet.
+ + """ + + +@app.get("/", response_class=HTMLResponse) +def index(): + return HTMLResponse(render_downloads()) + + +@app.post("/submit") +def submit( + url: str = Form(...), + engine: str = Form("auto"), + library: str = Form("auto"), + proxy: str = Form(""), + headers: str = Form(""), +): + url = url.strip() + if not URL_RE.match(url): + return HTMLResponse(render_downloads("Only http(s) URLs supported"), status_code=400) + + engine = (engine or ENGINE_DEFAULT).strip().lower() + library = (library or "auto").strip().lower() + + try: + header_lines = parse_header_lines(headers) + except ValueError as exc: + return HTMLResponse(render_downloads(str(exc)), status_code=400) + + chosen_proxy = pick_proxy(proxy.strip()) + jobid = str(int(time.time() * 1000)) + + with lock: + jobs[jobid] = Job( + id=jobid, + url=url, + engine=engine, + library=library, + proxy=chosen_proxy, + headers=header_lines, + status="queued", + message="queued", + ) + + t = threading.Thread(target=worker, args=(jobid,), daemon=True) + t.start() + return RedirectResponse(url="/", status_code=303) + + +def render_proxies_page(error: str = "", s5: str = "", s4: str = "", hp: str = "", out_text: str = "") -> str: + err_html = f"

{error}

" if error else "" + return f""" + + + + Proxies + + +

Media WebGUI

+ {render_nav("proxies")} + {err_html} + +
+

+ +
+

+ +
+

+ +
+ +
+ +

Import-Liste (zum Kopieren)

+

Keine Prüfung/Validierung. In .env in PROXY_LIST einfügen (eine Zeile pro Proxy).

+
+ + + """ + + +@app.get("/proxies", response_class=HTMLResponse) +def proxies_get(): + return HTMLResponse(render_proxies_page()) + + +@app.post("/proxies", response_class=HTMLResponse) +def proxies_post(s5: str = Form(""), s4: str = Form(""), hp: str = Form("")): + try: + o1 = format_proxy_lines(s5, "socks5") + o2 = format_proxy_lines(s4, "socks4") + o3 = format_proxy_lines(hp, "http") + combined = "\n".join([x for x in [o1, o2, o3] if x.strip()]) + return HTMLResponse(render_proxies_page(s5=s5, s4=s4, hp=hp, out_text=combined)) + except Exception as e: + return HTMLResponse(render_proxies_page(error=str(e), s5=s5, s4=s4, hp=hp, out_text=""), status_code=400)