Compare commits

..

17 Commits

Author SHA1 Message Date
f87f0f5cdc Merge branch 'main' into codex/fix-jdownloader-api-package-removal-error-54zoo0 2026-01-21 21:23:26 +01:00
68353b33aa Detect demo link downloads and fail early 2026-01-21 21:22:59 +01:00
c3b1fcadfa Merge pull request #17 from DasPoschi/codex/fix-jdownloader-api-package-removal-error
Add raw MyJDownloader API fallback for removing/canceling links
2026-01-21 21:09:25 +01:00
25ad8c05d0 Add raw API cleanup fallback for JDownloader 2026-01-21 21:08:48 +01:00
b65cb53463 Merge pull request #16 from DasPoschi/codex/fetch-proxies-from-proxyscrape-api-4xe4oq
Remove proxy blacklist and HTTP proxy handling; use ProxyScrape SOCKS lists
2026-01-04 14:46:16 +01:00
6c13fbbb2f Merge branch 'main' into codex/fetch-proxies-from-proxyscrape-api-4xe4oq 2026-01-04 14:46:06 +01:00
33282ddbcb Remove proxy blacklist filters 2026-01-04 14:45:44 +01:00
7795e22744 Merge pull request #15 from DasPoschi/codex/fetch-proxies-from-proxyscrape-api-4vaqb3
Remove HTTP proxies from proxy UI
2026-01-04 14:27:14 +01:00
e83f1323cd Merge branch 'main' into codex/fetch-proxies-from-proxyscrape-api-4vaqb3 2026-01-04 14:27:07 +01:00
194b16e09c Remove HTTP proxies from UI 2026-01-04 14:26:36 +01:00
423e8e28ec Merge pull request #14 from DasPoschi/codex/fetch-proxies-from-proxyscrape-api
Use ProxyScrape API for SOCKS lists and normalize single-line responses
2026-01-04 14:20:54 +01:00
daeee039fa Update proxy sources for socks lists 2026-01-04 14:20:38 +01:00
97a5afbee9 Update app.py 2026-01-03 23:09:10 +01:00
a2de578087 Merge pull request #13 from DasPoschi/codex/configure-proxies-for-downloads-only-r44dl5
Add *.your-server.de to proxy blacklist
2026-01-03 23:04:38 +01:00
c3aac479fe Merge branch 'main' into codex/configure-proxies-for-downloads-only-r44dl5 2026-01-03 22:56:21 +01:00
1350b50199 Add your-server.de to proxy blacklist 2026-01-03 22:55:54 +01:00
6b06134edf Merge pull request #12 from DasPoschi/codex/configure-proxies-for-downloads-only
Bypass proxies for internal HTTP calls
2026-01-03 22:43:36 +01:00
2 changed files with 135 additions and 46 deletions

View File

@@ -18,7 +18,7 @@ from typing import Any, Dict, List, Optional, Tuple
from myjdapi import Myjdapi from myjdapi import Myjdapi
import paramiko import paramiko
from fastapi import FastAPI, Form, Request from fastapi import FastAPI, Form, Request
from fastapi.responses import HTMLResponse, RedirectResponse from fastapi.responses import HTMLResponse, PlainTextResponse, RedirectResponse
from fastapi.staticfiles import StaticFiles from fastapi.staticfiles import StaticFiles
# ============================================================ # ============================================================
@@ -57,6 +57,7 @@ POLL_SECONDS = float(os.environ.get("POLL_SECONDS", "5"))
# JDownloader writes here inside container # JDownloader writes here inside container
JD_OUTPUT_PATH = "/output" JD_OUTPUT_PATH = "/output"
PROXY_EXPORT_PATH = os.environ.get("PROXY_EXPORT_PATH", "/output/jd-proxies.jdproxies") PROXY_EXPORT_PATH = os.environ.get("PROXY_EXPORT_PATH", "/output/jd-proxies.jdproxies")
LOG_BUFFER_LIMIT = int(os.environ.get("LOG_BUFFER_LIMIT", "500"))
URL_RE = re.compile(r"^https?://", re.I) URL_RE = re.compile(r"^https?://", re.I)
@@ -125,6 +126,21 @@ class Job:
jobs: Dict[str, Job] = {} jobs: Dict[str, Job] = {}
lock = threading.Lock() lock = threading.Lock()
log_lock = threading.Lock()
connection_logs: List[str] = []
def log_connection(message: str) -> None:
timestamp = time.strftime("%Y-%m-%d %H:%M:%S")
line = f"[{timestamp}] {message}"
with log_lock:
connection_logs.append(line)
if len(connection_logs) > LOG_BUFFER_LIMIT:
excess = len(connection_logs) - LOG_BUFFER_LIMIT
del connection_logs[:excess]
def get_connection_logs() -> str:
with log_lock:
return "\n".join(connection_logs)
# ============================================================ # ============================================================
# Core helpers # Core helpers
@@ -151,6 +167,7 @@ def ensure_env():
def get_device(): def get_device():
jd = Myjdapi() jd = Myjdapi()
log_connection(f"MyJDownloader connect as {MYJD_EMAIL or 'unknown'}")
jd.connect(MYJD_EMAIL, MYJD_PASSWORD) jd.connect(MYJD_EMAIL, MYJD_PASSWORD)
wanted = (MYJD_DEVICE or "").strip() wanted = (MYJD_DEVICE or "").strip()
@@ -248,6 +265,7 @@ def ffprobe_ok(path: str) -> bool:
def ssh_connect() -> paramiko.SSHClient: def ssh_connect() -> paramiko.SSHClient:
ssh = paramiko.SSHClient() ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
log_connection(f"SSH connect {JELLYFIN_USER}@{JELLYFIN_HOST}:{JELLYFIN_PORT}")
ssh.connect( ssh.connect(
hostname=JELLYFIN_HOST, hostname=JELLYFIN_HOST,
port=JELLYFIN_PORT, port=JELLYFIN_PORT,
@@ -270,6 +288,7 @@ def sftp_mkdirs(sftp: paramiko.SFTPClient, remote_dir: str):
def sftp_upload(ssh: paramiko.SSHClient, local_path: str, remote_path: str): def sftp_upload(ssh: paramiko.SSHClient, local_path: str, remote_path: str):
sftp = ssh.open_sftp() sftp = ssh.open_sftp()
try: try:
log_connection(f"SFTP upload {local_path} -> {remote_path}")
sftp_mkdirs(sftp, os.path.dirname(remote_path)) sftp_mkdirs(sftp, os.path.dirname(remote_path))
sftp.put(local_path, remote_path) sftp.put(local_path, remote_path)
finally: finally:
@@ -278,6 +297,7 @@ def sftp_upload(ssh: paramiko.SSHClient, local_path: str, remote_path: str):
def remote_md5sum(ssh: paramiko.SSHClient, remote_path: str) -> str: def remote_md5sum(ssh: paramiko.SSHClient, remote_path: str) -> str:
quoted = shlex.quote(remote_path) quoted = shlex.quote(remote_path)
cmd = f"md5sum {quoted}" cmd = f"md5sum {quoted}"
log_connection(f"SSH exec {cmd}")
stdin, stdout, stderr = ssh.exec_command(cmd, timeout=120) stdin, stdout, stderr = ssh.exec_command(cmd, timeout=120)
out = stdout.read().decode("utf-8", "replace").strip() out = stdout.read().decode("utf-8", "replace").strip()
err = stderr.read().decode("utf-8", "replace").strip() err = stderr.read().decode("utf-8", "replace").strip()
@@ -292,6 +312,7 @@ def remote_md5sum(ssh: paramiko.SSHClient, remote_path: str) -> str:
# ============================================================ # ============================================================
def _http_get_json(url: str, headers: Optional[Dict[str, str]] = None) -> Any: def _http_get_json(url: str, headers: Optional[Dict[str, str]] = None) -> Any:
req = urllib.request.Request(url, headers=headers or {}) req = urllib.request.Request(url, headers=headers or {})
log_connection(f"HTTP GET {url} (no-proxy)")
with NO_PROXY_OPENER.open(req, timeout=20) as r: with NO_PROXY_OPENER.open(req, timeout=20) as r:
return json.loads(r.read().decode("utf-8", "replace")) return json.loads(r.read().decode("utf-8", "replace"))
@@ -365,32 +386,16 @@ def format_proxy_lines(raw: str, scheme: str) -> str:
def fetch_proxy_list(url: str) -> str: def fetch_proxy_list(url: str) -> str:
req = urllib.request.Request(url) req = urllib.request.Request(url)
log_connection(f"HTTP GET {url} (no-proxy)")
with NO_PROXY_OPENER.open(req, timeout=20) as resp: with NO_PROXY_OPENER.open(req, timeout=20) as resp:
return resp.read().decode("utf-8", "replace") text = resp.read().decode("utf-8", "replace")
if "\n" not in text and re.search(r"\s", text):
return re.sub(r"\s+", "\n", text.strip())
return text
def build_jdproxies_payload(text: str) -> Dict[str, Any]: def build_jdproxies_payload(text: str) -> Dict[str, Any]:
if not text.strip(): if not text.strip():
raise ValueError("Keine Proxy-Einträge zum Speichern.") raise ValueError("Keine Proxy-Einträge zum Speichern.")
blacklist_filter = {
"entries": [
"# Dies ist ein Kommentar",
"// Dies ist auch ein Kommentar",
"# Für jdownloader.org auskommentieren",
"# jdownloader.org",
"# unten für alle Accounts mit der ID 'test *' @ jdownloader.org auskommentieren",
"#test@jdownloader.org",
"# Kommentar unten für ein Konto mit der ID 'test' @ jdownloader.org",
"#test$@jdownloader.org",
"# Sie können Muster für Konto-ID und Host verwenden, z. B. accountPattern @ hostPattern",
"",
"my.jdownloader.org",
"",
"api.jdownloader.org",
"",
"*.jdownloader.org",
],
"type": "BLACKLIST",
}
entries: List[Dict[str, Any]] = [] entries: List[Dict[str, Any]] = []
type_map = { type_map = {
"socks5": "SOCKS5", "socks5": "SOCKS5",
@@ -425,7 +430,7 @@ def build_jdproxies_payload(text: str) -> Dict[str, Any]:
if not proxy_type: if not proxy_type:
continue continue
entries.append({ entries.append({
"filter": blacklist_filter, "filter": None,
"proxy": { "proxy": {
"address": parsed.hostname, "address": parsed.hostname,
"password": None, "password": None,
@@ -529,6 +534,7 @@ def jellyfin_refresh_library():
try: try:
url = JELLYFIN_API_BASE + path url = JELLYFIN_API_BASE + path
req = urllib.request.Request(url, headers=headers, method="POST") req = urllib.request.Request(url, headers=headers, method="POST")
log_connection(f"HTTP POST {url} (no-proxy)")
with NO_PROXY_OPENER.open(req, timeout=20) as r: with NO_PROXY_OPENER.open(req, timeout=20) as r:
_ = r.read() _ = r.read()
return return
@@ -586,6 +592,31 @@ def local_paths_from_links(links: List[Dict[str, Any]], pkg_map: Dict[Any, Dict[
out.append(p) out.append(p)
return out return out
def call_raw_jd_api(dev, endpoints: List[str], payloads: List[Dict[str, Any]]) -> bool:
method_candidates = ["action", "call", "api", "request"]
for method_name in method_candidates:
method = getattr(dev, method_name, None)
if method is None:
continue
for endpoint in endpoints:
for payload in payloads:
try:
method(endpoint, payload)
return True
except TypeError:
try:
method(endpoint, params=payload)
return True
except TypeError:
try:
method(endpoint, data=payload)
return True
except Exception:
continue
except Exception:
continue
return False
def try_remove_from_jd(dev, links: List[Dict[str, Any]], pkg_map: Dict[Any, Dict[str, Any]]) -> Optional[str]: def try_remove_from_jd(dev, links: List[Dict[str, Any]], pkg_map: Dict[Any, Dict[str, Any]]) -> Optional[str]:
link_ids = [l.get("uuid") for l in links if l.get("uuid") is not None] link_ids = [l.get("uuid") for l in links if l.get("uuid") is not None]
pkg_ids = list(pkg_map.keys()) pkg_ids = list(pkg_map.keys())
@@ -620,6 +651,14 @@ def try_remove_from_jd(dev, links: List[Dict[str, Any]], pkg_map: Dict[Any, Dict
except Exception: except Exception:
continue continue
endpoint_candidates = [
"downloads/removeLinks",
"downloadsV2/removeLinks",
"downloadcontroller/removeLinks",
]
if call_raw_jd_api(dev, endpoint_candidates, payloads):
return None
return "JDownloader-API: Paket/Links konnten nicht entfernt werden (Wrapper-Methoden nicht vorhanden)." return "JDownloader-API: Paket/Links konnten nicht entfernt werden (Wrapper-Methoden nicht vorhanden)."
def try_cancel_from_jd(dev, links: List[Dict[str, Any]], pkg_map: Dict[Any, Dict[str, Any]]) -> Optional[str]: def try_cancel_from_jd(dev, links: List[Dict[str, Any]], pkg_map: Dict[Any, Dict[str, Any]]) -> Optional[str]:
@@ -658,6 +697,14 @@ def try_cancel_from_jd(dev, links: List[Dict[str, Any]], pkg_map: Dict[Any, Dict
except Exception: except Exception:
continue continue
endpoint_candidates = [
"downloads/removeLinks",
"downloadsV2/removeLinks",
"downloadcontroller/removeLinks",
]
if call_raw_jd_api(dev, endpoint_candidates, payloads):
return None
return "JDownloader-API: Abbrechen fehlgeschlagen (Wrapper-Methoden nicht vorhanden)." return "JDownloader-API: Abbrechen fehlgeschlagen (Wrapper-Methoden nicht vorhanden)."
def cancel_job(dev, jobid: str) -> Optional[str]: def cancel_job(dev, jobid: str) -> Optional[str]:
@@ -733,6 +780,16 @@ def worker(jobid: str):
time.sleep(POLL_SECONDS) time.sleep(POLL_SECONDS)
continue continue
all_demo = all(is_demo_link(l.get("name", "")) for l in links)
if all_demo and not is_demo_link(job.url):
cancel_msg = cancel_job(dev, jobid)
with lock:
job.status = "failed"
base_msg = "JDownloader lieferte das Demo-Video Big Buck Bunny statt des gewünschten Links."
job.message = f"{base_msg} {cancel_msg}" if cancel_msg else base_msg
job.progress = 0.0
return
all_finished = all(bool(l.get("finished")) for l in links) all_finished = all(bool(l.get("finished")) for l in links)
if not all_finished: if not all_finished:
progress = calculate_progress(links) progress = calculate_progress(links)
@@ -827,6 +884,14 @@ def favicon():
def jobs_get(): def jobs_get():
return HTMLResponse(render_job_rows()) return HTMLResponse(render_job_rows())
@app.get("/logs", response_class=HTMLResponse)
def logs_get():
return HTMLResponse(render_logs_page())
@app.get("/logs/data", response_class=PlainTextResponse)
def logs_data():
return PlainTextResponse(get_connection_logs())
def render_job_rows() -> str: def render_job_rows() -> str:
rows = "" rows = ""
with lock: with lock:
@@ -938,15 +1003,50 @@ def render_nav(active: str) -> str:
"<div style='margin: 8px 0 14px 0;'>" "<div style='margin: 8px 0 14px 0;'>"
+ link("Downloads", "/", "downloads") + link("Downloads", "/", "downloads")
+ link("Proxies", "/proxies", "proxies") + link("Proxies", "/proxies", "proxies")
+ link("Logs", "/logs", "logs")
+ "</div>" + "</div>"
) )
def render_logs_page() -> str:
return f"""
<html>
<head>
<link rel="stylesheet" href="/static/style.css">
<meta charset="utf-8">
<title>JD → Jellyfin (Logs)</title>
<script>
async function refreshLogs() {{
if (document.hidden) return;
try {{
const resp = await fetch('/logs/data');
if (!resp.ok) return;
const text = await resp.text();
const area = document.getElementById('log-body');
if (area) {{
area.value = text;
area.scrollTop = area.scrollHeight;
}}
}} catch (e) {{
}}
}}
setInterval(refreshLogs, 2000);
window.addEventListener('load', refreshLogs);
</script>
</head>
<body>
<h1>JD → Jellyfin</h1>
{render_nav("logs")}
<p class="hint">Verbindungs-Debugger (Echtzeit). Letzte {LOG_BUFFER_LIMIT} Einträge.</p>
<textarea id="log-body" class="log-area" rows="20" readonly></textarea>
</body>
</html>
"""
def render_proxies_page( def render_proxies_page(
error: str = "", error: str = "",
message: str = "", message: str = "",
socks5_in: str = "", socks5_in: str = "",
socks4_in: str = "", socks4_in: str = "",
http_in: str = "",
out_text: str = "", out_text: str = "",
export_path: str = "", export_path: str = "",
) -> str: ) -> str:
@@ -976,16 +1076,11 @@ def render_proxies_page(
<textarea name="socks4_in" rows="6" style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{socks4_in}</textarea> <textarea name="socks4_in" rows="6" style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{socks4_in}</textarea>
</div> </div>
<div class="row">
<label>HTTP (ein Proxy pro Zeile, z. B. IP:PORT)</label><br/>
<textarea name="http_in" rows="6" style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{http_in}</textarea>
</div>
<button type="submit">In JDownloader-Format umwandeln</button> <button type="submit">In JDownloader-Format umwandeln</button>
</form> </form>
<h2 style="margin-top:18px;">JDownloader Import-Liste</h2> <h2 style="margin-top:18px;">JDownloader Import-Liste</h2>
<p class="hint">Format: <code>socks5://IP:PORT</code>, <code>socks4://IP:PORT</code>, <code>http://IP:PORT</code>. Keine Prüfung/Validierung.</p> <p class="hint">Format: <code>socks5://IP:PORT</code>, <code>socks4://IP:PORT</code>. Keine Prüfung/Validierung.</p>
<div class="row"> <div class="row">
<textarea id="out" rows="12" readonly style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{out_text}</textarea> <textarea id="out" rows="12" readonly style="width:100%; max-width:860px; padding:10px; border:1px solid #ccc; border-radius:8px;">{out_text}</textarea>
@@ -999,7 +1094,6 @@ def render_proxies_page(
<form method="post" action="/proxies/save"> <form method="post" action="/proxies/save">
<textarea name="socks5_in" style="display:none;">{socks5_in}</textarea> <textarea name="socks5_in" style="display:none;">{socks5_in}</textarea>
<textarea name="socks4_in" style="display:none;">{socks4_in}</textarea> <textarea name="socks4_in" style="display:none;">{socks4_in}</textarea>
<textarea name="http_in" style="display:none;">{http_in}</textarea>
<button type="submit">Liste als JDProxies speichern</button> <button type="submit">Liste als JDProxies speichern</button>
</form> </form>
@@ -1069,18 +1163,20 @@ def cancel(jobid: str):
@app.get("/proxies", response_class=HTMLResponse) @app.get("/proxies", response_class=HTMLResponse)
def proxies_get(): def proxies_get():
try: try:
socks5_in = fetch_proxy_list("https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/socks5.txt") socks5_in = fetch_proxy_list(
socks4_in = fetch_proxy_list("https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/socks4.txt") "https://api.proxyscrape.com/v4/free-proxy-list/get?request=displayproxies&protocol=socks5&timeout=10000&country=all&ssl=yes&anonymity=elite&skip=0&limit=2000"
)
socks4_in = fetch_proxy_list(
"https://api.proxyscrape.com/v4/free-proxy-list/get?request=displayproxies&protocol=socks4&timeout=10000&country=all&ssl=yes&anonymity=elite&skip=0&limit=2000"
)
http_in = fetch_proxy_list("https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/http.txt") http_in = fetch_proxy_list("https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/http.txt")
s5 = format_proxy_lines(socks5_in, "socks5") s5 = format_proxy_lines(socks5_in, "socks5")
s4 = format_proxy_lines(socks4_in, "socks4") s4 = format_proxy_lines(socks4_in, "socks4")
hp = format_proxy_lines(http_in, "http") combined = "\n".join([x for x in [s5, s4] if x.strip()])
combined = "\n".join([x for x in [s5, s4, hp] if x.strip()])
return HTMLResponse(render_proxies_page( return HTMLResponse(render_proxies_page(
socks5_in=socks5_in, socks5_in=socks5_in,
socks4_in=socks4_in, socks4_in=socks4_in,
http_in=http_in,
out_text=combined, out_text=combined,
export_path=PROXY_EXPORT_PATH, export_path=PROXY_EXPORT_PATH,
)) ))
@@ -1091,18 +1187,15 @@ def proxies_get():
def proxies_post( def proxies_post(
socks5_in: str = Form(""), socks5_in: str = Form(""),
socks4_in: str = Form(""), socks4_in: str = Form(""),
http_in: str = Form(""),
): ):
try: try:
s5 = format_proxy_lines(socks5_in, "socks5") s5 = format_proxy_lines(socks5_in, "socks5")
s4 = format_proxy_lines(socks4_in, "socks4") s4 = format_proxy_lines(socks4_in, "socks4")
hp = format_proxy_lines(http_in, "http")
combined = "\n".join([x for x in [s5, s4, hp] if x.strip()]) combined = "\n".join([x for x in [s5, s4] if x.strip()])
return HTMLResponse(render_proxies_page( return HTMLResponse(render_proxies_page(
socks5_in=socks5_in, socks5_in=socks5_in,
socks4_in=socks4_in, socks4_in=socks4_in,
http_in=http_in,
out_text=combined, out_text=combined,
export_path=PROXY_EXPORT_PATH, export_path=PROXY_EXPORT_PATH,
)) ))
@@ -1111,7 +1204,6 @@ def proxies_post(
error=str(e), error=str(e),
socks5_in=socks5_in, socks5_in=socks5_in,
socks4_in=socks4_in, socks4_in=socks4_in,
http_in=http_in,
out_text="", out_text="",
export_path=PROXY_EXPORT_PATH, export_path=PROXY_EXPORT_PATH,
), status_code=400) ), status_code=400)
@@ -1120,19 +1212,16 @@ def proxies_post(
def proxies_save( def proxies_save(
socks5_in: str = Form(""), socks5_in: str = Form(""),
socks4_in: str = Form(""), socks4_in: str = Form(""),
http_in: str = Form(""),
): ):
try: try:
s5 = format_proxy_lines(socks5_in, "socks5") s5 = format_proxy_lines(socks5_in, "socks5")
s4 = format_proxy_lines(socks4_in, "socks4") s4 = format_proxy_lines(socks4_in, "socks4")
hp = format_proxy_lines(http_in, "http") combined = "\n".join([x for x in [s5, s4] if x.strip()])
combined = "\n".join([x for x in [s5, s4, hp] if x.strip()])
export_path = save_proxy_export(combined) export_path = save_proxy_export(combined)
return HTMLResponse(render_proxies_page( return HTMLResponse(render_proxies_page(
message=f"Proxy-Liste gespeichert: {export_path}", message=f"Proxy-Liste gespeichert: {export_path}",
socks5_in=socks5_in, socks5_in=socks5_in,
socks4_in=socks4_in, socks4_in=socks4_in,
http_in=http_in,
out_text=combined, out_text=combined,
export_path=export_path, export_path=export_path,
)) ))
@@ -1141,7 +1230,6 @@ def proxies_save(
error=str(e), error=str(e),
socks5_in=socks5_in, socks5_in=socks5_in,
socks4_in=socks4_in, socks4_in=socks4_in,
http_in=http_in,
out_text="", out_text="",
export_path=PROXY_EXPORT_PATH, export_path=PROXY_EXPORT_PATH,
), status_code=400) ), status_code=400)

View File

@@ -19,3 +19,4 @@ code { font-family: ui-monospace, SFMono-Regular, Menlo, Consolas, monospace; fo
.progress-row { display:flex; align-items:center; gap:8px; margin-top:6px; } .progress-row { display:flex; align-items:center; gap:8px; margin-top:6px; }
.progress-text { font-size:12px; color:#333; min-width:48px; } .progress-text { font-size:12px; color:#333; min-width:48px; }
.inline-form { margin-top:6px; } .inline-form { margin-top:6px; }
.log-area { width:100%; max-width: 920px; padding:10px; border:1px solid #ccc; border-radius:8px; background:#fff; }