fix: remove invalid fastapi.middleware.base import (ModuleNotFoundError)

This commit is contained in:
2026-04-15 09:29:17 +00:00
parent dca4ac9964
commit 6c022c1412

View File

@@ -19,7 +19,6 @@ from typing import Any, Dict, List, Optional, Tuple
import paramiko
from fastapi import FastAPI, Form, Request
from fastapi.middleware.base import BaseHTTPMiddleware
from fastapi.responses import HTMLResponse, RedirectResponse
from fastapi.staticfiles import StaticFiles
from myjdapi import Myjdapi
@@ -55,9 +54,9 @@ MD5_DIR = os.environ.get("MD5_DIR", "/md5").rstrip("/")
BASIC_AUTH_USER = os.environ.get("BASIC_AUTH_USER", "")
BASIC_AUTH_PASS = os.environ.get("BASIC_AUTH_PASS", "")
POLL_SECONDS = float(os.environ.get("POLL_SECONDS", "5"))
MIN_VIDEO_SIZE_MB = int(os.environ.get("MIN_VIDEO_SIZE_MB", "200"))
MIN_VIDEO_BYTES = MIN_VIDEO_SIZE_MB * 1024 * 1024
POLL_SECONDS = float(os.environ.get("POLL_SECONDS", "5"))
MIN_VIDEO_SIZE_MB = int(os.environ.get("MIN_VIDEO_SIZE_MB", "200"))
MIN_VIDEO_BYTES = MIN_VIDEO_SIZE_MB * 1024 * 1024
# JDownloader writes here inside container
JD_OUTPUT_PATH = "/output"
@@ -205,9 +204,9 @@ class Job:
def ensure_env():
missing = []
for k, v in [
("MYJD_EMAIL", MYJD_EMAIL),
("MYJD_PASSWORD", MYJD_PASSWORD),
("JELLYFIN_USER", JELLYFIN_USER),
("MYJD_EMAIL", MYJD_EMAIL),
("MYJD_PASSWORD", MYJD_PASSWORD),
("JELLYFIN_USER", JELLYFIN_USER),
("JELLYFIN_SSH_KEY", JELLYFIN_SSH_KEY),
]:
if not v:
@@ -243,14 +242,12 @@ def get_device():
wanted = (MYJD_DEVICE or "").strip()
# wait up to 30s for device to become ONLINE
deadline = time.time() + 30
last = None
while time.time() < deadline:
devs = jd.list_devices() or []
last = devs
# pick by name (or first)
def pick():
if wanted:
for d in devs:
@@ -358,7 +355,6 @@ def is_video_file(path: str) -> bool:
DEMO_PATTERNS = {"big_buck_bunny", "bigbuckbunny", "big buck bunny", "bbb_sunflower"}
def is_demo_link(name: str) -> bool:
"""Detect JDownloader demo/fallback videos (e.g. Big Buck Bunny)."""
lower = name.lower().replace("-", "_").replace(".", " ")
return any(p in lower for p in DEMO_PATTERNS)
@@ -401,22 +397,21 @@ def pick_library_target(library_choice: str, filename: str, package_name: str) -
return JELLYFIN_MOVIES_DIR or JELLYFIN_DEST_DIR
if library_choice == "series":
return JELLYFIN_SERIES_DIR or JELLYFIN_DEST_DIR
# auto
if SERIES_RE.search(filename) or SERIES_RE.search(package_name or ""):
return JELLYFIN_SERIES_DIR or JELLYFIN_DEST_DIR
return JELLYFIN_MOVIES_DIR or JELLYFIN_DEST_DIR
def build_remote_paths(job_library: str, package_name: str, local_file: str) -> Tuple[str, str]:
filename = os.path.basename(local_file)
filename = os.path.basename(local_file)
base_target = pick_library_target(job_library, filename, package_name)
m = SERIES_RE.search(filename) or SERIES_RE.search(package_name or "")
m = SERIES_RE.search(filename) or SERIES_RE.search(package_name or "")
is_series = (job_library == "series") or (job_library == "auto" and m)
if is_series:
show_query = package_name or os.path.splitext(filename)[0]
tv = tmdb_search_tv(show_query) if TMDB_API_KEY else None
show_name = sanitize_name(tv["name"]) if tv and tv.get("name") else sanitize_name(show_query)
tv = tmdb_search_tv(show_query) if TMDB_API_KEY else None
show_name = sanitize_name(tv["name"]) if tv and tv.get("name") else sanitize_name(show_query)
season = int(m.group(1)) if m else 1
episode = int(m.group(2)) if m else 1
@@ -426,15 +421,15 @@ def build_remote_paths(job_library: str, package_name: str, local_file: str) ->
else:
remote_dir = base_target
ext = os.path.splitext(filename)[1]
ext = os.path.splitext(filename)[1]
remote_filename = f"{show_name} - S{season:02d}E{episode:02d}{ext}"
return remote_dir, remote_filename
movie_query = package_name or os.path.splitext(filename)[0]
mv = tmdb_search_movie(movie_query) if TMDB_API_KEY else None
title = mv.get("title") if mv else None
date = mv.get("release_date") if mv else None
year = date[:4] if isinstance(date, str) and len(date) >= 4 else None
mv = tmdb_search_movie(movie_query) if TMDB_API_KEY else None
title = mv.get("title") if mv else None
date = mv.get("release_date") if mv else None
year = date[:4] if isinstance(date, str) and len(date) >= 4 else None
title_safe = sanitize_name(title) if title else sanitize_name(movie_query)
year_safe = year if year else ""
@@ -445,7 +440,7 @@ def build_remote_paths(job_library: str, package_name: str, local_file: str) ->
else:
remote_dir = base_target
ext = os.path.splitext(filename)[1]
ext = os.path.splitext(filename)[1]
remote_filename = f"{title_safe} ({year_safe}){ext}".strip() if year_safe else f"{title_safe}{ext}"
return remote_dir, remote_filename
@@ -490,28 +485,18 @@ def call_raw_jd_api(dev, endpoints: List[str], payloads: List[Dict[str, Any]]) -
def cancel_job(dev, jobid: str) -> str:
links, pkg_map = query_links_and_packages(dev, jobid)
link_ids = [l.get("uuid") for l in links if l.get("uuid") is not None]
pkg_ids = [p for p in pkg_map]
msgs = []
pkg_ids = list(pkg_map)
for ep, pl in [
("downloads/remove_links", {"linkIds": link_ids, "packageIds": []}),
("downloads/remove_links", {"linkIds": link_ids, "packageIds": []}),
("downloadcontroller/remove_links", {"linkIds": link_ids, "packageIds": []}),
]:
try:
call_raw_jd_api(dev, [ep], [pl])
except Exception:
pass
for ep, pl in [
("downloads/remove_links", {"linkIds": [], "packageIds": pkg_ids}),
("downloads/remove_links", {"linkIds": [], "packageIds": pkg_ids}),
("downloadcontroller/remove_links", {"linkIds": [], "packageIds": pkg_ids}),
]:
try:
call_raw_jd_api(dev, [ep], [pl])
except Exception:
pass
return " ".join(msgs) if msgs else "Download abgebrochen."
return "Download abgebrochen."
def try_remove_from_jd(dev, links: List[Dict[str, Any]], pkg_map: Dict[Any, Dict[str, Any]]) -> str:
link_ids = [l.get("uuid") for l in links if l.get("uuid") is not None]
@@ -536,12 +521,12 @@ def try_remove_from_jd(dev, links: List[Dict[str, Any]], pkg_map: Dict[Any, Dict
# ============================================================
def query_links_and_packages(dev, jobid: str) -> Tuple[List[Dict[str, Any]], Dict[Any, Dict[str, Any]]]:
links = dev.downloads.query_links([{
"jobUUIDs": [int(jobid)] if jobid.isdigit() else [jobid],
"maxResults": -1,
"startAt": 0,
"name": True,
"finished": True,
"running": True,
"jobUUIDs": [int(jobid)] if jobid.isdigit() else [jobid],
"maxResults": -1,
"startAt": 0,
"name": True,
"finished": True,
"running": True,
"bytesLoaded": True,
"bytesTotal": True,
"bytes": True,
@@ -554,7 +539,7 @@ def query_links_and_packages(dev, jobid: str) -> Tuple[List[Dict[str, Any]], Dic
pkg_ids = sorted({l.get("packageUUID") for l in links if l.get("packageUUID") is not None})
pkgs = dev.downloads.query_packages([{
"packageUUIDs": pkg_ids,
"maxResults": -1,
"maxResults": -1,
"startAt": 0,
"saveTo": True,
"uuid": True,
@@ -618,12 +603,12 @@ def _filter_linkgrabber(dev, jobid: str) -> Tuple[int, int]:
links = []
try:
links = dev.linkgrabber.query_links([{
"jobUUIDs": [int(jobid)] if str(jobid).isdigit() else [jobid],
"maxResults": -1,
"startAt": 0,
"name": True,
"size": True,
"uuid": True,
"jobUUIDs": [int(jobid)] if str(jobid).isdigit() else [jobid],
"maxResults": -1,
"startAt": 0,
"name": True,
"size": True,
"uuid": True,
"packageUUID": True,
}]) or []
except Exception:
@@ -682,8 +667,8 @@ def worker(jobid: str):
job = jobs.get(jobid)
if job:
if accepted == 0:
job.status = "failed"
job.message = (
job.status = "failed"
job.message = (
f"Keine Video-Dateien \u2265 {MIN_VIDEO_SIZE_MB} MB gefunden "
f"({rejected} Link(s) verworfen)."
)
@@ -699,8 +684,8 @@ def worker(jobid: str):
if job.cancel_requested:
cancel_msg = cancel_job(dev, jobid)
with lock:
job.status = "canceled"
job.message = cancel_msg or "Download abgebrochen und Dateien entfernt."
job.status = "canceled"
job.message = cancel_msg or "Download abgebrochen und Dateien entfernt."
job.progress = 0.0
return
@@ -708,8 +693,8 @@ def worker(jobid: str):
if not links:
with lock:
job.status = "collecting"
job.message = "Warte auf Link-Crawler\u2026"
job.status = "collecting"
job.message = "Warte auf Link-Crawler\u2026"
job.progress = 0.0
time.sleep(POLL_SECONDS)
continue
@@ -718,9 +703,9 @@ def worker(jobid: str):
if all_demo and not is_demo_link(job.url):
cancel_msg = cancel_job(dev, jobid)
with lock:
job.status = "failed"
base_msg = "JDownloader lieferte das Demo-Video Big Buck Bunny statt des gew\u00fcnschten Links."
job.message = f"{base_msg} {cancel_msg}" if cancel_msg else base_msg
job.status = "failed"
base_msg = "JDownloader lieferte das Demo-Video Big Buck Bunny statt des gew\u00fcnschten Links."
job.message = f"{base_msg} {cancel_msg}" if cancel_msg else base_msg
job.progress = 0.0
return
@@ -728,9 +713,9 @@ def worker(jobid: str):
if not all_finished:
progress = calculate_progress(links)
with lock:
job.status = "downloading"
done = sum(1 for l in links if l.get("finished"))
job.message = f"Download l\u00e4uft\u2026 ({done}/{len(links)} fertig)"
job.status = "downloading"
done = sum(1 for l in links if l.get("finished"))
job.message = f"Download l\u00e4uft\u2026 ({done}/{len(links)} fertig)"
job.progress = progress
time.sleep(POLL_SECONDS)
continue
@@ -740,16 +725,16 @@ def worker(jobid: str):
if not video_files:
with lock:
job.status = "failed"
job.message = "Keine Video-Datei gefunden (Whitelist)."
job.status = "failed"
job.message = "Keine Video-Datei gefunden (Whitelist)."
job.progress = 0.0
return
valid_videos = [p for p in video_files if ffprobe_ok(p)]
if not valid_videos:
with lock:
job.status = "failed"
job.message = "ffprobe: keine g\u00fcltige Video-Datei."
job.status = "failed"
job.message = "ffprobe: keine g\u00fcltige Video-Datei."
job.progress = 0.0
return
@@ -769,8 +754,8 @@ def worker(jobid: str):
valid_videos = renamed
with lock:
job.status = "upload"
job.message = f"Download fertig. MD5/Upload/Verify f\u00fcr {len(valid_videos)} Datei(en)\u2026"
job.status = "upload"
job.message = f"Download fertig. MD5/Upload/Verify f\u00fcr {len(valid_videos)} Datei(en)\u2026"
job.progress = 100.0
ssh = ssh_connect()
@@ -790,7 +775,6 @@ def worker(jobid: str):
if remote_md5.lower() != md5_hex.lower():
raise RuntimeError(f"MD5 mismatch for {os.path.basename(f)}: local={md5_hex} remote={remote_md5}")
# Cleanup local
try:
os.remove(f)
except Exception:
@@ -809,8 +793,8 @@ def worker(jobid: str):
jellyfin_refresh_library()
with lock:
job.status = "finished"
job.message = "Upload + MD5 OK. " + (jd_cleanup_msg or "JDownloader: Paket/Links entfernt.")
job.status = "finished"
job.message = "Upload + MD5 OK. " + (jd_cleanup_msg or "JDownloader: Paket/Links entfernt.")
job.progress = 100.0
return
@@ -818,8 +802,8 @@ def worker(jobid: str):
with lock:
job = jobs.get(jobid)
if job:
job.status = "failed"
job.message = str(e)
job.status = "failed"
job.message = str(e)
job.progress = 0.0
# ============================================================
@@ -848,13 +832,13 @@ def render_jobs_page(job_list: list, log_lines: list) -> str:
rows = ""
for j in reversed(job_list):
bar_color = {
"finished": "#4caf50",
"failed": "#f44336",
"canceled": "#9e9e9e",
"uploading": "#2196f3",
"upload": "#2196f3",
"downloading":"#ff9800",
"collecting": "#9c27b0",
"finished": "#4caf50",
"failed": "#f44336",
"canceled": "#9e9e9e",
"uploading": "#2196f3",
"upload": "#2196f3",
"downloading": "#ff9800",
"collecting": "#9c27b0",
}.get(j.status, "#607d8b")
rows += (
f"<tr>"
@@ -913,7 +897,6 @@ def render_jobs_page(job_list: list, log_lines: list) -> str:
</html>"""
def render_page(message: str = "", error: str = "") -> str:
video_list = ", ".join(sorted(VIDEO_EXTS))
return f"""<!DOCTYPE html>
<html lang='de'>
<head>
@@ -958,11 +941,11 @@ def render_page(message: str = "", error: str = "") -> str:
</html>"""
def render_proxies_page(
socks5_in: str = "",
socks4_in: str = "",
out_text: str = "",
socks5_in: str = "",
socks4_in: str = "",
out_text: str = "",
export_path: str = "",
error: str = "",
error: str = "",
) -> str:
return f"""<!DOCTYPE html>
<html lang='de'>
@@ -1026,13 +1009,13 @@ def build_jdproxies_payload(text: str) -> Dict[str, Any]:
"https": "HTTPS",
}
for line in text.splitlines():
line = line.strip()
line = line.strip()
if not line:
continue
parsed = urllib.parse.urlparse(line)
proto = (parsed.scheme or "").lower()
host = parsed.hostname or ""
port = parsed.port or 1080
parsed = urllib.parse.urlparse(line)
proto = (parsed.scheme or "").lower()
host = parsed.hostname or ""
port = parsed.port or 1080
if not host:
continue
jd_type = type_map.get(proto, "SOCKS5")
@@ -1131,7 +1114,6 @@ def proxies_get():
socks4_in = fetch_proxy_list(
"https://api.proxyscrape.com/v4/free-proxy-list/get?request=displayproxies&protocol=socks4&timeout=10000&country=all&ssl=yes&anonymity=elite&skip=0&limit=2000"
)
s5 = format_proxy_lines(socks5_in, "socks5")
s4 = format_proxy_lines(socks4_in, "socks4")
combined = "\n".join([x for x in [s5, s4] if x.strip()])
@@ -1150,9 +1132,8 @@ def proxies_post(
socks4_in: str = Form(""),
):
try:
s5 = format_proxy_lines(socks5_in, "socks5")
s4 = format_proxy_lines(socks4_in, "socks4")
s5 = format_proxy_lines(socks5_in, "socks5")
s4 = format_proxy_lines(socks4_in, "socks4")
combined = "\n".join([x for x in [s5, s4] if x.strip()])
return HTMLResponse(render_proxies_page(
socks5_in=socks5_in,
@@ -1175,16 +1156,13 @@ def proxies_save(
socks4_in: str = Form(""),
):
try:
s5 = format_proxy_lines(socks5_in, "socks5")
s4 = format_proxy_lines(socks4_in, "socks4")
s5 = format_proxy_lines(socks5_in, "socks5")
s4 = format_proxy_lines(socks4_in, "socks4")
combined = "\n".join([x for x in [s5, s4] if x.strip()])
payload = build_jdproxies_payload(combined)
os.makedirs(os.path.dirname(PROXY_EXPORT_PATH), exist_ok=True)
with open(PROXY_EXPORT_PATH, "w", encoding="utf-8") as fh:
json.dump(payload, fh, indent=2, ensure_ascii=False)
return HTMLResponse(render_proxies_page(
socks5_in=socks5_in,
socks4_in=socks4_in,