Kapitel 13/Tutorial.md aktualisiert

This commit is contained in:
2025-09-17 08:47:04 +00:00
parent dba41c4a33
commit 3bbdfd5a4e

View File

@@ -568,16 +568,12 @@ In diesem Schritt erstellen wir den eigentlichen Workflow in **n8n**. Er sorgt d
TMP="${CLIPPER_TMP:-/srv/clipper/temp}"
OUT_BASE="${CLIPPER_OUT:-/srv/clipper/out}/${ID}"
LOGDIR="/srv/clipper/logs/${ID}"
LOGDIR="${CLIPPER_LOG:-/srv/clipper/logs}/${ID}"
FILE="${TMP}/${ID}.mp4"
TEMP="${TMP}/${ID}.temp.mp4"
PART="${TMP}/${ID}.mp4.part"
LOCK="${TMP}/${ID}.lock"
DROP_BASE="${SFTP_DROP_BASE:-incoming}"
REMOTE_DIR="${DROP_BASE}/${ID}"
REMOTE_FILE="${REMOTE_DIR}/${ID}.mp4"
mkdir -p "$TMP" "$LOGDIR" "$OUT_BASE"
LOG="${LOGDIR}/download.log"
log(){ printf '[%(%F %T)T] %s\n' -1 "$*" ; }
@@ -590,29 +586,9 @@ In diesem Schritt erstellen wir den eigentlichen Workflow in **n8n**. Er sorgt d
fi
trap 'flock -u 9; rm -f "$LOCK"' EXIT
SFTP_OPTS=(-i "${SFTP_KEY}" -P "${SFTP_PORT:-22}" -oBatchMode=yes -oStrictHostKeyChecking=accept-new)
SFTP_TARGET="${SFTP_USER}@${SFTP_HOST}"
sftp_batch() {
local cmds
cmds=$(printf "%s\n" "$@")
sftp "${SFTP_OPTS[@]}" "${SFTP_TARGET}" <<< "$cmds"
}
exists_remote_file() {
local out
out=$(sftp "${SFTP_OPTS[@]}" "${SFTP_TARGET}" <<< "ls -l ${REMOTE_FILE}" 2>&1 || true)
[[ "$out" != *"No such file"* ]] && [[ "$out" != *"not found"* ]]
}
log "=== Start VOD ${ID} ==="
log "=== START: VOD ${ID} ==="
log "URL: ${URL}"
log "DROP: ${REMOTE_FILE}"
if exists_remote_file; then
log "SKIP: ${REMOTE_FILE} existiert bereits"
exit 0
fi
log "TMP: ${TMP}"
if [[ -s "$TEMP" && ! -s "$FILE" ]]; then
log "RESUME: $TEMP -> $FILE"
@@ -630,27 +606,13 @@ In diesem Schritt erstellen wir den eigentlichen Workflow in **n8n**. Er sorgt d
exit 10
fi
sftp_batch "mkdir ${DROP_BASE}" "mkdir ${REMOTE_DIR}"
FINAL_DIR="${OUT_BASE}/original"
mkdir -p "$FINAL_DIR"
mv -f "$FILE" "$FINAL_DIR/${ID}.mp4"
log "MOVE: ${FILE} -> ${FINAL_DIR}/${ID}.mp4"
tries=0
until exists_remote_file; do
tries=$((tries+1))
log "UPLOAD Try #$tries: $FILE -> ${REMOTE_FILE}"
sftp_batch "reput ${FILE} ${REMOTE_FILE}" "put ${FILE} ${REMOTE_FILE}" || true
sleep $((2*tries))
[[ $tries -ge 5 ]] && break
done
log "=== DONE: VOD ${ID} erfolgreich geladen ==="
if ! exists_remote_file; then
log "ERROR: Upload fehlgeschlagen"
exit 20
fi
rm -f "$PART" "$TEMP" || true
log "CLEANUP: $TMP"
rm -rf "${TMP:?}/"*
log "=== Done VOD ${ID} ==="
```
Mit diesem Skript laden wir die aktuellen VODs herunter, laden sie in die Nextcloud für die weitere Verabeitung und räumen wieder auf. Zusätzlich erzeugen wir logs in `<clipper-ordner>/logs/<ID>.log`.
@@ -837,64 +799,71 @@ Das folgende Schaubild zeigt dir die konkrete Verkabelung
**Ort:** Terminal im **ClipperLXC** → als Benutzer **clipper**
Öffne die Datei und füge den Inhalt ein:
```bash
nano /srv/clipper/bin/vod-analyze # (dieses Skript wird das VOD analysieren und candidates.json erzeugen)
```
Inhalt:
```bash
#!/usr/bin/env bash
set -euo pipefail
. /etc/clipper/clipper.env
. /etc/clipper/clipper.env
ID="${1:?need VOD id}"
VOD_IN_MP4="/srv/clipper/temp/${ID}.mp4" # temporäre Datei (vom VOD aus NC heruntergeladen)
VOD_IN_MP4="${CLIPPER_OUT}/${ID}/original/${ID}.mp4"
OUT_BASE="${CLIPPER_OUT}/${ID}"
ANALYSIS="${OUT_BASE}/analysis"
LOGDIR="${CLIPPER_LOG}/${ID}"
mkdir -p "$ANALYSIS" "$LOGDIR"
exec > >(tee -a "${LOGDIR}/analyze.log") 2>&1
echo "== Analyze $ID =="
# 1) Szenenwechsel
echo "[FFMPEG] Szenewechselanalyse läuft..."
ffmpeg -hide_banner -loglevel error -i "${VOD_IN_MP4}" \
-vf "scale=-2:360,select=gt(scene\,0.30),showinfo" -an -f null - \
2> "${LOGDIR}/sceneinfo.log"
# 2) Audio-Statistik
echo "[FFMPEG] Audiostatistik läuft..."
ffmpeg -hide_banner -loglevel error -i "${VOD_IN_MP4}" \
-vn -ac 1 -ar 16000 \
-af "astats=metadata=1:reset=2,ametadata=print:key=lavfi.astats.Overall.RMS_level" \
-f null - \
2> "${LOGDIR}/astats.log" || true
# 3) Logs → candidates.json
ANALYSIS="$ANALYSIS" LOGDIR="$LOGDIR" python3 - <<'PY'
import os,re,json
out=os.environ["ANALYSIS"]; log=os.environ["LOGDIR"]
import os, re, json, sys
from datetime import datetime
def log(msg):
timestamp = datetime.now().strftime("%F %T")
print(f"[PY] [{timestamp}] {msg}")
out = os.environ["ANALYSIS"]
logdir = os.environ["LOGDIR"]
scene_ts = []
with open(os.path.join(log,"sceneinfo.log"), errors="ignore") as f:
log("Lese sceneinfo.log...")
try:
with open(os.path.join(logdir, "sceneinfo.log"), errors="ignore") as f:
for line in f:
m = re.search(r"pts_time:([0-9]+(?:\.[0-9]+)?)", line)
if m: scene_ts.append(float(m.group(1)))
if m:
scene_ts.append(float(m.group(1)))
except Exception as e:
log(f"Fehler beim Lesen von sceneinfo.log: {e}")
sys.exit(1)
log(f"{len(scene_ts)} Szenenwechsel gefunden.")
has_audio = False
ap=os.path.join(log,"astats.log")
ap = os.path.join(logdir, "astats.log")
if os.path.exists(ap):
log("Prüfe astats.log auf Audiodaten...")
with open(ap, errors="ignore") as f:
has_audio = "RMS_level" in f.read()
log(f"Audioanalyse: {'gefunden' if has_audio else 'nicht vorhanden'}")
cands = [{
"start": max(0.0, t - 2.0),
@@ -903,32 +872,17 @@ Das folgende Schaubild zeigt dir die konkrete Verkabelung
"tags": ["scene-cut"] + (["audio-peak"] if has_audio else [])
} for t in scene_ts]
with open(os.path.join(out,"candidates.json"),"w",encoding="utf-8") as f:
target = os.path.join(out, "candidates.json")
try:
with open(target, "w", encoding="utf-8") as f:
json.dump(cands, f, ensure_ascii=False, indent=2)
print("Wrote", os.path.join(out,"candidates.json"))
log(f"{len(cands)} Kandidaten gespeichert → {target}")
except Exception as e:
log(f"Fehler beim Schreiben von candidates.json: {e}")
sys.exit(2)
PY
echo "== Done $ID =="
# cleanup temp VOD
echo "== Cleanup: remove temp file $VOD_IN_MP4 =="
rm -f "$VOD_IN_MP4"
```
### Schritt 4.2 n8n: Analyse starten
**Ort:** n8nWeboberfläche
**SSH Node Analyze VOD**
- **Credentials:** `SSH Clipper`
- **Working Dir:** `/srv/clipper`
- **Command (Expression):**
```js
{{`/srv/clipper/bin/vod-analyze ${$json.data.id}`}}
```
### Ergebnis