feat: pre-download filter for videos >= MIN_VIDEO_SIZE_MB (default 200 MB)
This commit is contained in:
120
jd-webgui/app.py
120
jd-webgui/app.py
@@ -57,6 +57,8 @@ BASIC_AUTH_USER = os.environ.get("BASIC_AUTH_USER", "")
|
||||
BASIC_AUTH_PASS = os.environ.get("BASIC_AUTH_PASS", "")
|
||||
|
||||
POLL_SECONDS = float(os.environ.get("POLL_SECONDS", "5"))
|
||||
MIN_VIDEO_SIZE_MB = int(os.environ.get("MIN_VIDEO_SIZE_MB", "200"))
|
||||
MIN_VIDEO_BYTES = MIN_VIDEO_SIZE_MB * 1024 * 1024
|
||||
|
||||
# JDownloader writes here inside container
|
||||
JD_OUTPUT_PATH = "/output"
|
||||
@@ -830,6 +832,70 @@ def calculate_progress(links: List[Dict[str, Any]]) -> float:
|
||||
return 0.0
|
||||
return max(0.0, min(100.0, (loaded / total) * 100.0))
|
||||
|
||||
# ============================================================
|
||||
# Linkgrabber filter (pre-download)
|
||||
# ============================================================
|
||||
def _filter_linkgrabber(dev, jobid: str) -> Tuple[int, int]:
|
||||
"""Wait for link crawler, then remove non-video and sub-minimum-size links.
|
||||
Returns (accepted, rejected) counts."""
|
||||
deadline = time.time() + 120
|
||||
while time.time() < deadline:
|
||||
try:
|
||||
crawlers = dev.linkgrabber.query_link_crawlers([{"collectorInfo": True}]) or []
|
||||
if not any(c.get("crawling") for c in crawlers):
|
||||
break
|
||||
except Exception:
|
||||
break
|
||||
time.sleep(2)
|
||||
|
||||
links = []
|
||||
try:
|
||||
links = dev.linkgrabber.query_links([{
|
||||
"jobUUIDs": [int(jobid)] if str(jobid).isdigit() else [jobid],
|
||||
"maxResults": -1,
|
||||
"startAt": 0,
|
||||
"name": True,
|
||||
"size": True,
|
||||
"uuid": True,
|
||||
"packageUUID": True,
|
||||
}]) or []
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
to_remove_ids = []
|
||||
keep_ids = []
|
||||
keep_pkg_ids = set()
|
||||
for link in links:
|
||||
name = link.get("name", "")
|
||||
size = link.get("size", -1)
|
||||
_, ext = os.path.splitext(name.lower())
|
||||
is_video = ext in VIDEO_EXTS
|
||||
big_enough = size < 0 or size >= MIN_VIDEO_BYTES
|
||||
if is_video and big_enough:
|
||||
keep_ids.append(link.get("uuid"))
|
||||
if link.get("packageUUID") is not None:
|
||||
keep_pkg_ids.add(link.get("packageUUID"))
|
||||
else:
|
||||
to_remove_ids.append(link.get("uuid"))
|
||||
|
||||
if to_remove_ids:
|
||||
try:
|
||||
dev.linkgrabber.remove_links(link_ids=to_remove_ids, package_ids=[])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if keep_ids:
|
||||
try:
|
||||
dev.linkgrabber.move_to_downloadlist(
|
||||
link_ids=keep_ids,
|
||||
package_ids=list(keep_pkg_ids),
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return len(keep_ids), len(to_remove_ids)
|
||||
|
||||
|
||||
# ============================================================
|
||||
# Worker
|
||||
# ============================================================
|
||||
@@ -838,6 +904,26 @@ def worker(jobid: str):
|
||||
ensure_env()
|
||||
dev = get_device()
|
||||
|
||||
# Filter linkgrabber: keep only video files >= MIN_VIDEO_SIZE_MB
|
||||
with lock:
|
||||
job = jobs.get(jobid)
|
||||
if job:
|
||||
job.status = "collecting"
|
||||
job.message = f"Filtere Links (nur Videos \u2265 {MIN_VIDEO_SIZE_MB} MB)\u2026"
|
||||
accepted, rejected = _filter_linkgrabber(dev, jobid)
|
||||
with lock:
|
||||
job = jobs.get(jobid)
|
||||
if job:
|
||||
if accepted == 0:
|
||||
job.status = "failed"
|
||||
job.message = (
|
||||
f"Keine Video-Dateien \u2265 {MIN_VIDEO_SIZE_MB} MB gefunden "
|
||||
f"({rejected} Link(s) verworfen)."
|
||||
)
|
||||
job.progress = 0.0
|
||||
return
|
||||
job.message = f"{accepted} Video(s) akzeptiert, {rejected} verworfen."
|
||||
|
||||
while True:
|
||||
with lock:
|
||||
job = jobs.get(jobid)
|
||||
@@ -856,7 +942,7 @@ def worker(jobid: str):
|
||||
if not links:
|
||||
with lock:
|
||||
job.status = "collecting"
|
||||
job.message = "Warte auf Link-Crawler…"
|
||||
job.message = "Warte auf Link-Crawler\u2026"
|
||||
job.progress = 0.0
|
||||
time.sleep(POLL_SECONDS)
|
||||
continue
|
||||
@@ -866,7 +952,7 @@ def worker(jobid: str):
|
||||
cancel_msg = cancel_job(dev, jobid)
|
||||
with lock:
|
||||
job.status = "failed"
|
||||
base_msg = "JDownloader lieferte das Demo-Video Big Buck Bunny statt des gewünschten Links."
|
||||
base_msg = "JDownloader lieferte das Demo-Video Big Buck Bunny statt des gew\u00fcnschten Links."
|
||||
job.message = f"{base_msg} {cancel_msg}" if cancel_msg else base_msg
|
||||
job.progress = 0.0
|
||||
return
|
||||
@@ -877,7 +963,7 @@ def worker(jobid: str):
|
||||
with lock:
|
||||
job.status = "downloading"
|
||||
done = sum(1 for l in links if l.get("finished"))
|
||||
job.message = f"Download läuft… ({done}/{len(links)} fertig)"
|
||||
job.message = f"Download l\u00e4uft\u2026 ({done}/{len(links)} fertig)"
|
||||
job.progress = progress
|
||||
time.sleep(POLL_SECONDS)
|
||||
continue
|
||||
@@ -896,13 +982,13 @@ def worker(jobid: str):
|
||||
if not valid_videos:
|
||||
with lock:
|
||||
job.status = "failed"
|
||||
job.message = "ffprobe: keine gültige Video-Datei."
|
||||
job.message = "ffprobe: keine g\u00fcltige Video-Datei."
|
||||
job.progress = 0.0
|
||||
return
|
||||
|
||||
with lock:
|
||||
job.status = "upload"
|
||||
job.message = f"Download fertig. MD5/Upload/Verify für {len(valid_videos)} Datei(en)…"
|
||||
job.message = f"Download fertig. MD5/Upload/Verify f\u00fcr {len(valid_videos)} Datei(en)\u2026"
|
||||
job.progress = 100.0
|
||||
|
||||
ssh = ssh_connect()
|
||||
@@ -1018,7 +1104,7 @@ def render_page(error: str = "") -> str:
|
||||
<head>
|
||||
<link rel="stylesheet" href="/static/style.css">
|
||||
<meta charset="utf-8">
|
||||
<title>JD → Jellyfin</title>
|
||||
<title>JD \u2192 Jellyfin</title>
|
||||
<script>
|
||||
async function refreshJobs() {{
|
||||
if (document.hidden) return;
|
||||
@@ -1035,7 +1121,7 @@ def render_page(error: str = "") -> str:
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<h1>JD → Jellyfin</h1>
|
||||
<h1>JD \u2192 Jellyfin</h1>
|
||||
{render_nav("downloads")}
|
||||
{err_html}
|
||||
|
||||
@@ -1099,7 +1185,7 @@ def render_logs_page() -> str:
|
||||
<head>
|
||||
<link rel="stylesheet" href="/static/style.css">
|
||||
<meta charset="utf-8">
|
||||
<title>JD → Jellyfin (Logs)</title>
|
||||
<title>JD \u2192 Jellyfin (Logs)</title>
|
||||
<script>
|
||||
async function refreshLogs() {{
|
||||
if (document.hidden) return;
|
||||
@@ -1120,9 +1206,9 @@ def render_logs_page() -> str:
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<h1>JD → Jellyfin</h1>
|
||||
<h1>JD \u2192 Jellyfin</h1>
|
||||
{render_nav("logs")}
|
||||
<p class="hint">Verbindungs-Debugger (Echtzeit). Letzte {LOG_BUFFER_LIMIT} Einträge.</p>
|
||||
<p class="hint">Verbindungs-Debugger (Echtzeit). Letzte {LOG_BUFFER_LIMIT} Eintr\u00e4ge.</p>
|
||||
<textarea id="log-body" class="log-area" rows="20" readonly></textarea>
|
||||
</body>
|
||||
</html>
|
||||
@@ -1143,10 +1229,10 @@ def render_proxies_page(
|
||||
<head>
|
||||
<link rel="stylesheet" href="/static/style.css">
|
||||
<meta charset="utf-8">
|
||||
<title>JD → Jellyfin (Proxies)</title>
|
||||
<title>JD \u2192 Jellyfin (Proxies)</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>JD → Jellyfin</h1>
|
||||
<h1>JD \u2192 Jellyfin</h1>
|
||||
{render_nav("proxies")}
|
||||
{err_html}
|
||||
{msg_html}
|
||||
@@ -1166,7 +1252,7 @@ def render_proxies_page(
|
||||
</form>
|
||||
|
||||
<h2 style="margin-top:18px;">JDownloader Import-Liste</h2>
|
||||
<p class="hint">Format: <code>socks5://IP:PORT</code>, <code>socks4://IP:PORT</code>. Keine Prüfung/Validierung.</p>
|
||||
<p class="hint">Format: <code>socks5://IP:PORT</code>, <code>socks4://IP:PORT</code>. Keine Pr\u00fcfung/Validierung.</p>
|
||||
|
||||
<div class="row">
|
||||
<textarea id="out" rows="12" readonly style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{esc(out_text)}</textarea>
|
||||
@@ -1174,8 +1260,8 @@ def render_proxies_page(
|
||||
|
||||
<button type="button" onclick="navigator.clipboard.writeText(document.getElementById('out').value)">Kopieren</button>
|
||||
|
||||
<h2 style="margin-top:18px;">Datei für Connection Manager</h2>
|
||||
<p class="hint">Speichert die Liste als <code>.jdproxies</code> im Container, z. B. zum Import in JDownloader → Verbindungsmanager → Importieren.</p>
|
||||
<h2 style="margin-top:18px;">Datei f\u00fcr Connection Manager</h2>
|
||||
<p class="hint">Speichert die Liste als <code>.jdproxies</code> im Container, z. B. zum Import in JDownloader \u2192 Verbindungsmanager \u2192 Importieren.</p>
|
||||
|
||||
<form method="post" action="/proxies/save">
|
||||
<textarea name="socks5_in" style="display:none;">{esc(socks5_in)}</textarea>
|
||||
@@ -1217,7 +1303,7 @@ def submit(url: str = Form(...), package_name: str = Form(""), library: str = Fo
|
||||
return HTMLResponse(render_page(f"JDownloader nicht erreichbar: {e}"), status_code=503)
|
||||
resp = dev.linkgrabber.add_links([{
|
||||
"links": url,
|
||||
"autostart": True,
|
||||
"autostart": False,
|
||||
"assignJobID": True,
|
||||
"packageName": package_name,
|
||||
}])
|
||||
@@ -1251,7 +1337,7 @@ def cancel(jobid: str):
|
||||
if job.status in {"finished", "failed", "canceled"}:
|
||||
return RedirectResponse(url="/", status_code=303)
|
||||
job.cancel_requested = True
|
||||
job.message = "Abbruch angefordert…"
|
||||
job.message = "Abbruch angefordert\u2026"
|
||||
return RedirectResponse(url="/", status_code=303)
|
||||
|
||||
@app.post("/clear-finished")
|
||||
|
||||
Reference in New Issue
Block a user