From 88fc7017e6c605154c47ebf3aab67a9bd5602f62 Mon Sep 17 00:00:00 2001 From: Fedja Windows Date: Sat, 26 Apr 2025 07:09:44 +0200 Subject: [PATCH] =?UTF-8?q?mit=20REST=20API.=20Bilder=20m=C3=BCssen=20per?= =?UTF-8?q?=20POST=20als=20Bin=C3=A4rdatei=20=C3=BCbergeben=20werden?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- api_server.py | 150 +++++++++++++++++++++++++++++++ n8n_workflow.json | 207 +++++++++++++++++++++++++++++++++++++++++++ start_api_server.bat | 12 +++ 3 files changed, 369 insertions(+) create mode 100644 api_server.py create mode 100644 n8n_workflow.json create mode 100644 start_api_server.bat diff --git a/api_server.py b/api_server.py new file mode 100644 index 0000000..b0b0e7e --- /dev/null +++ b/api_server.py @@ -0,0 +1,150 @@ +# api_server.py - REST-API für den Videogenerator +# Im webui-Verzeichnis speichern + +from fastapi import FastAPI, File, UploadFile, BackgroundTasks, Form +from fastapi.responses import FileResponse +from fastapi.middleware.cors import CORSMiddleware +import uvicorn +import os +import subprocess +import uuid +import shutil +from typing import Optional + +app = FastAPI(title="Hunyuan Video Generator API") + +# CORS für n8n-Zugriff erlauben +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], # Im Produktivbetrieb einschränken auf n8n-Server-IP + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +# Verzeichnisse für temporäre Dateien und Ausgaben +TEMP_DIR = os.path.join(os.path.dirname(__file__), "temp_uploads") +OUTPUT_DIR = os.path.join(os.path.dirname(__file__), "outputs") +PYTHON_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), "system", "python", "python.exe") + +os.makedirs(TEMP_DIR, exist_ok=True) +os.makedirs(OUTPUT_DIR, exist_ok=True) + +def generate_video_task(image_path: str, prompt: str, n_prompt: str, + seed: int, length: float, steps: int, + use_teacache: bool, output_filename: str, + job_id: str): + """Führt die Videogenerierung als Hintergrundaufgabe aus""" + + # Befehlszeile erstellen + cmd = [ + PYTHON_PATH, + "hunyuan_cli.py", + "--image", image_path, + "--prompt", prompt, + "--seed", str(seed), + "--length", str(length), + "--steps", str(steps), + "--output", f"{job_id}_{output_filename}" + ] + + if n_prompt: + cmd.extend(["--n_prompt", n_prompt]) + + if use_teacache: + cmd.append("--teacache") + + # Befehl ausführen + try: + subprocess.run(cmd, check=True, cwd=os.path.dirname(__file__)) + print(f"Video generation completed for job {job_id}") + except subprocess.CalledProcessError as e: + print(f"Error generating video: {e}") + +@app.post("/generate/") +async def generate_video( + background_tasks: BackgroundTasks, + image: UploadFile = File(...), + prompt: str = Form(...), + n_prompt: Optional[str] = Form(""), + seed: Optional[int] = Form(31337), + length: Optional[float] = Form(5.0), + steps: Optional[int] = Form(25), + use_teacache: Optional[bool] = Form(True), + output_filename: Optional[str] = Form("output.mp4") +): + """ + Generiert ein Video basierend auf den angegebenen Parametern + """ + # Eindeutige Job-ID generieren + job_id = str(uuid.uuid4()) + + # Temporären Dateipfad für das Bild erstellen + temp_image_path = os.path.join(TEMP_DIR, f"{job_id}_{image.filename}") + + # Bild speichern + with open(temp_image_path, "wb") as buffer: + shutil.copyfileobj(image.file, buffer) + + # Videogenerierung als Hintergrundaufgabe starten + background_tasks.add_task( + generate_video_task, + temp_image_path, + prompt, + n_prompt, + seed, + length, + steps, + use_teacache, + output_filename, + job_id + ) + + return { + "status": "processing", + "job_id": job_id, + "message": "Video generation started in background", + "result_url": f"/result/{job_id}_{output_filename}" + } + +@app.get("/result/{filename}") +async def get_result(filename: str): + """ + Gibt das generierte Video zurück, wenn es verfügbar ist + """ + file_path = os.path.join(OUTPUT_DIR, filename) + + if os.path.exists(file_path): + return FileResponse( + file_path, + media_type="video/mp4", + filename=filename.split("_", 1)[1] # Entferne Job-ID vom Dateinamen + ) + else: + return {"status": "not_found", "message": "Requested video not found or still processing"} + +@app.get("/status/{job_id}") +async def check_status(job_id: str): + """ + Prüft den Status einer Videogenerierung + """ + # Suche nach Dateien, die mit der Job-ID beginnen + result_files = [f for f in os.listdir(OUTPUT_DIR) if f.startswith(job_id)] + + if result_files: + return { + "status": "completed", + "job_id": job_id, + "files": result_files, + "download_urls": [f"/result/{file}" for file in result_files] + } + else: + # Prüfen, ob das Eingabebild noch existiert (Verarbeitung läuft noch) + input_files = [f for f in os.listdir(TEMP_DIR) if f.startswith(job_id)] + if input_files: + return {"status": "processing", "job_id": job_id} + else: + return {"status": "not_found", "job_id": job_id} + +if __name__ == "__main__": + uvicorn.run(app, host="0.0.0.0", port=8000) \ No newline at end of file diff --git a/n8n_workflow.json b/n8n_workflow.json new file mode 100644 index 0000000..19162a3 --- /dev/null +++ b/n8n_workflow.json @@ -0,0 +1,207 @@ +{ + "nodes": [ + { + "parameters": { + "httpMethod": "POST", + "path": "generate-video", + "options": {} + }, + "name": "Webhook", + "type": "n8n-nodes-base.webhook", + "typeVersion": 1, + "position": [ + 250, + 300 + ] + }, + { + "parameters": { + "url": "=http://{{$node[\"Webhook\"].json[\"server_ip\"]}}/generate/", + "options": { + "formData": { + "values": { + "image": { + "property": "=data:application/octet-stream;base64,{{$node[\"Webhook\"].json[\"image_base64\"]}}", + "fileName": "=input.jpg" + }, + "prompt": "={{$node[\"Webhook\"].json[\"prompt\"]}}", + "n_prompt": "={{$node[\"Webhook\"].json[\"n_prompt\"]}}", + "seed": "={{$node[\"Webhook\"].json[\"seed\"]}}", + "length": "={{$node[\"Webhook\"].json[\"length\"]}}", + "steps": "={{$node[\"Webhook\"].json[\"steps\"]}}", + "use_teacache": "={{$node[\"Webhook\"].json[\"use_teacache\"]}}", + "output_filename": "={{$node[\"Webhook\"].json[\"output_filename\"]}}" + } + } + } + }, + "name": "HTTP Request", + "type": "n8n-nodes-base.httpRequest", + "typeVersion": 4.1, + "position": [ + 500, + 300 + ] + }, + { + "parameters": { + "rule": { + "interval": [ + { + "field": "seconds", + "secondsInterval": 10 + } + ] + } + }, + "name": "Polling Trigger", + "type": "n8n-nodes-base.scheduleTrigger", + "typeVersion": 1.1, + "position": [ + 250, + 500 + ] + }, + { + "parameters": { + "url": "=http://{{$node[\"HTTP Request\"].json[\"job_id\"]}}/status/{{$node[\"HTTP Request\"].json[\"job_id\"]}}", + "options": {} + }, + "name": "Check Status", + "type": "n8n-nodes-base.httpRequest", + "typeVersion": 4.1, + "position": [ + 500, + 500 + ] + }, + { + "parameters": { + "conditions": { + "string": [ + { + "value1": "={{$node[\"Check Status\"].json[\"status\"]}}", + "operation": "equals", + "value2": "completed" + } + ] + } + }, + "name": "IF", + "type": "n8n-nodes-base.if", + "typeVersion": 1, + "position": [ + 700, + 500 + ] + }, + { + "parameters": { + "url": "=http://{{$node[\"Webhook\"].json[\"server_ip\"]}}/result/{{$node[\"HTTP Request\"].json[\"job_id\"]}}_{{$node[\"Webhook\"].json[\"output_filename\"]}}", + "options": { + "response": { + "response": { + "fullResponse": true, + "responseFormat": "file" + } + } + } + }, + "name": "Download Result", + "type": "n8n-nodes-base.httpRequest", + "typeVersion": 4.1, + "position": [ + 900, + 450 + ] + }, + { + "parameters": { + "values": { + "string": [ + { + "name": "jobId", + "value": "={{$node[\"HTTP Request\"].json[\"job_id\"]}}" + }, + { + "name": "status", + "value": "={{$node[\"Check Status\"].json[\"status\"]}}" + } + ] + }, + "options": {} + }, + "name": "Continue Polling", + "type": "n8n-nodes-base.set", + "typeVersion": 2, + "position": [ + 900, + 550 + ] + } + ], + "connections": { + "Webhook": { + "main": [ + [ + { + "node": "HTTP Request", + "type": "main", + "index": 0 + } + ] + ] + }, + "HTTP Request": { + "main": [ + [ + { + "node": "Polling Trigger", + "type": "main", + "index": 0 + } + ] + ] + }, + "Polling Trigger": { + "main": [ + [ + { + "node": "Check Status", + "type": "main", + "index": 0 + } + ] + ] + }, + "Check Status": { + "main": [ + [ + { + "node": "IF", + "type": "main", + "index": 0 + } + ] + ] + }, + "IF": { + "main": [ + [ + { + "node": "Download Result", + "type": "main", + "index": 0 + } + ], + [ + { + "node": "Continue Polling", + "type": "main", + "index": 0 + } + ] + ] + } + } + } \ No newline at end of file diff --git a/start_api_server.bat b/start_api_server.bat new file mode 100644 index 0000000..9672122 --- /dev/null +++ b/start_api_server.bat @@ -0,0 +1,12 @@ +@echo off + +call environment.bat + +cd %~dp0webui + +"%DIR%\python\python.exe" -m pip install fastapi uvicorn python-multipart + +"%DIR%\python\python.exe" api_server.py + +:done +pause \ No newline at end of file