5 Commits

Author SHA1 Message Date
f540c894af Update .env 2026-01-04 13:18:05 +01:00
475d61e1bc Merge pull request #5 from DasPoschi/codex/add-integration-for-chatgpt.com-a56dpf
Retry downloads with next proxy on failure
2026-01-04 13:17:07 +01:00
35c360dd46 Merge branch 'main' into codex/add-integration-for-chatgpt.com-a56dpf 2026-01-04 13:17:00 +01:00
70f8364522 Retry downloads with next proxy on failure 2026-01-04 13:16:11 +01:00
9b3c1fbbdd Merge pull request #4 from DasPoschi/codex/add-integration-for-chatgpt.com-xrizez
Add live per-job progress bars with polling
2026-01-04 13:10:37 +01:00
2 changed files with 36 additions and 18 deletions

2
.env
View File

@@ -3,7 +3,7 @@ WEBGUI_PORT=8081
# Optional Basic Auth (leave empty to disable)
BASIC_AUTH_USER=admin
BASIC_AUTH_PASS=change_me
BASIC_AUTH_PASS=123456
# Paths inside container
OUTPUT_DIR=/output

View File

@@ -96,6 +96,7 @@ class Job:
engine: str
library: str
proxy: str
proxy_forced: bool
headers: List[str]
progress: float
status: str
@@ -127,8 +128,7 @@ def pick_proxy(forced_proxy: str = "") -> str:
global _rr_idx
if forced_proxy:
return forced_proxy.strip() if proxy_is_usable(forced_proxy.strip()) else ""
with lock:
proxies = list(PROXIES)
proxies = snapshot_proxies()
if PROXY_MODE == "off" or not proxies:
return ""
if PROXY_MODE == "random":
@@ -221,6 +221,11 @@ def refresh_proxies() -> None:
PROXIES = updated
def snapshot_proxies() -> List[str]:
with lock:
return list(PROXIES)
def proxy_refresh_loop(interval_seconds: int = 12 * 60 * 60) -> None:
while True:
try:
@@ -307,7 +312,6 @@ def run_aria2(url: str, out_dir: str, proxy: str, progress_cb, headers: List[str
raise subprocess.CalledProcessError(ret, cmd)
def md5_file(path: str) -> str:
h = hashlib.md5()
with open(path, "rb") as f:
@@ -415,6 +419,8 @@ def worker(jobid: str):
engine = pick_engine(job.url, job.engine)
proxy = job.proxy
headers = job.headers
proxy_forced = job.proxy_forced
proxy_candidates = []
with lock:
header_note = f"Headers={len(headers)}" if headers else "Headers=none"
@@ -428,17 +434,33 @@ def worker(jobid: str):
if engine == "ytdlp":
run_ytdlp(job.url, OUTPUT_DIR, YTDLP_FORMAT, proxy, update_progress)
elif engine == "hoster":
run_aria2(job.url, OUTPUT_DIR, proxy, update_progress, headers=headers)
else:
run_aria2(job.url, OUTPUT_DIR, proxy, update_progress)
if proxy:
if proxy_forced:
proxy_candidates = [proxy]
else:
proxy_list = snapshot_proxies()
proxy_candidates = [proxy] + [p for p in proxy_list if p != proxy]
else:
proxy_candidates = [""]
if engine == "ytdlp":
run_ytdlp(job.url, OUTPUT_DIR, YTDLP_FORMAT, proxy)
elif engine == "hoster":
run_aria2(job.url, OUTPUT_DIR, proxy, headers=headers)
for idx, candidate in enumerate(proxy_candidates):
try:
if engine == "hoster":
run_aria2(job.url, OUTPUT_DIR, candidate, update_progress, headers=headers)
else:
run_aria2(job.url, OUTPUT_DIR, proxy)
run_aria2(job.url, OUTPUT_DIR, candidate, update_progress)
break
except subprocess.CalledProcessError as exc:
if idx == len(proxy_candidates) - 1:
raise
with lock:
job.message = f"Proxy failed, trying next ({idx + 1}/{len(proxy_candidates) - 1})"
except Exception as exc:
if idx == len(proxy_candidates) - 1:
raise
with lock:
job.message = f"Proxy failed, trying next ({idx + 1}/{len(proxy_candidates) - 1})"
new_files = list_output_files(before)
if not new_files:
@@ -596,11 +618,6 @@ def render_downloads(error: str = "") -> str:
}}
setInterval(refreshProgress, 2000);
</script>
<thead><tr><th>JobID</th><th>URL</th><th>Engine</th><th>Library</th><th>Proxy</th><th>Status</th></tr></thead>
<tbody>
{rows if rows else "<tr><td colspan='6'><em>No jobs yet.</em></td></tr>"}
</tbody>
</table>
</body></html>
"""
@@ -653,6 +670,7 @@ def submit(
engine=engine,
library=library,
proxy=chosen_proxy,
proxy_forced=bool(proxy.strip()),
headers=header_lines,
progress=0.0,
status="queued",