From c875e2b1854d0127282475d0ec99e37d3e77b617 Mon Sep 17 00:00:00 2001 From: sascha Date: Wed, 22 Apr 2026 18:27:55 +0200 Subject: [PATCH] feat: VM lifecycle, TTS, inventory endpoints --- app.py | 241 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 241 insertions(+) diff --git a/app.py b/app.py index 94d13aa..f6f9252 100644 --- a/app.py +++ b/app.py @@ -108,6 +108,10 @@ SERVICES = { "vault_key": "uptime_api_key"}, "waha": {"url": "http://10.4.1.110:3500", "auth": "apikey", "key_file": "waha_api_key", "vault_key": "waha_api_key"}, + "forgejo": {"url": "http://10.4.1.116:3001", "auth": "bearer", "key_file": "forgejo", + "vault_key": "forgejo_token"}, + "semaphore": {"url": "http://10.4.1.116:8090", "auth": "bearer", "key_file": "semaphore", + "vault_key": "semaphore_token"}, } # --- Dockhand session --- @@ -155,8 +159,242 @@ async def vault_reload(_=Depends(_verify)): _load_vault_cache() return {"reloaded": True, "items": len(_vault_cache)} + +# --- VM Lifecycle Endpoints --- +from pydantic import BaseModel +import subprocess as _sp + +AUTOMATION1 = "sascha@10.5.85.5" +ISO_BUILDER = "/app-config/ansible/iso-builder/build-iso.sh" + +class VMCreate(BaseModel): + node: int + ip: str + hostname: str + cores: int = 2 + memory: int = 4096 + disk: int = 32 + +def _ssh(host, cmd, timeout=600): + r = _sp.run(["ssh","-o","ConnectTimeout=10","-o","StrictHostKeyChecking=accept-new",host,cmd], + capture_output=True, text=True, timeout=timeout) + return r.returncode, r.stdout, r.stderr + +def _pve_auth(): + pv = _parse_kv("proxmox") + return f"PVEAPIToken={pv.get('tokenid','')}={pv.get('secret','')}" + +@app.get("/vm/list") +async def vm_list(_=Depends(_verify)): + auth = _pve_auth() + vms = [] + async with httpx.AsyncClient(verify=False, timeout=15) as c: + nodes = await c.get("https://10.5.85.11:8006/api2/json/nodes", headers={"Authorization": auth}) + for n in nodes.json().get("data", []): + r = await c.get(f"https://10.5.85.11:8006/api2/json/nodes/{n['node']}/qemu", headers={"Authorization": auth}) + for vm in r.json().get("data", []): + vm["node"] = n["node"] + vms.append(vm) + return vms + +@app.post("/vm/create") +async def vm_create(req: VMCreate, _=Depends(_verify)): + steps = [] + # Step 1: Build ISO + create VM via iso-builder on automation1 + cmd = f"{ISO_BUILDER} --node {req.node} --ip {req.ip} --hostname {req.hostname} --cores {req.cores} --memory {req.memory} --disk {req.disk} --password 'GT500r8' --create-vm" + rc, out, err = _ssh(AUTOMATION1, f"cd /app-config/ansible/iso-builder && {cmd}", timeout=300) + if rc != 0: + return JSONResponse({"error": "iso-builder failed", "stderr": err[-500:], "stdout": out[-500:]}, status_code=500) + steps.append("iso-builder: ok") + + # Step 2: Wait for SSH (up to 6 min) + ok = False + for _ in range(36): + try: + rc2, out2, _ = _ssh(f"sascha@{req.ip}", "hostname", timeout=10) + if rc2 == 0: + ok = True + steps.append(f"ssh: {out2.strip()} reachable") + break + except Exception: + pass + await asyncio.sleep(10) + if not ok: + return JSONResponse({"error": "SSH timeout", "steps": steps}, status_code=504) + + # Step 2.5: Add to Ansible inventory + ini = "/app-config/ansible/pfannkuchen.ini" + group = getattr(req, 'group', 'auto') + inv_cmd = f"""python3 -c " +lines = open('{ini}').readlines() +if not any('{req.hostname} ' in l for l in lines): + out = [] + found = False + for l in lines: + out.append(l) + if l.strip() == '[auto]': + found = True + elif found and (l.startswith('[') or l.strip() == ''): + out.insert(-1, '{req.hostname} ansible_host={req.ip}\\n') + found = False + if found: + out.append('{req.hostname} ansible_host={req.ip}\\n') + open('{ini}','w').writelines(out) + print('added') +else: + print('exists') +" """ + _ssh(AUTOMATION1, inv_cmd, timeout=30) + _ssh(AUTOMATION1, f"mkdir -p /app-config/ansible/host_vars/{req.hostname} && printf 'ansible_host: {req.ip}\\nansible_user: sascha\\n' > /app-config/ansible/host_vars/{req.hostname}/vars.yml", timeout=30) + _ssh(AUTOMATION1, f"ssh-keygen -f /home/sascha/.ssh/known_hosts -R {req.ip} 2>/dev/null; ssh -o StrictHostKeyChecking=accept-new sascha@{req.ip} hostname 2>/dev/null", timeout=30) + steps.append("inventory: added") + + # Step 3: Ansible base setup via direct SSH (reliable fallback) + rc3, _, err3 = _ssh(AUTOMATION1, f"cd /app-config/ansible && bash pfannkuchen.sh setup {req.hostname}", timeout=600) + steps.append(f"ansible: {'ok' if rc3 == 0 else 'failed (rc=' + str(rc3) + ')'}") + + return {"status": "ok" if rc3 == 0 else "partial", "hostname": req.hostname, "ip": req.ip, "node": req.node, "steps": steps} + +@app.get("/vm/status/{vmid}") +async def vm_status(vmid: int, _=Depends(_verify)): + auth = _pve_auth() + async with httpx.AsyncClient(verify=False, timeout=10) as c: + nodes = await c.get("https://10.5.85.11:8006/api2/json/nodes", headers={"Authorization": auth}) + for n in nodes.json().get("data", []): + r = await c.get(f"https://10.5.85.11:8006/api2/json/nodes/{n['node']}/qemu/{vmid}/status/current", headers={"Authorization": auth}) + if r.status_code == 200: + return r.json().get("data", {}) + return JSONResponse({"error": "VM not found"}, status_code=404) + +@app.delete("/vm/{vmid}") +async def vm_delete(vmid: int, _=Depends(_verify)): + auth = _pve_auth() + async with httpx.AsyncClient(verify=False, timeout=30) as c: + nodes = await c.get("https://10.5.85.11:8006/api2/json/nodes", headers={"Authorization": auth}) + for n in nodes.json().get("data", []): + r = await c.delete(f"https://10.5.85.11:8006/api2/json/nodes/{n['node']}/qemu/{vmid}", headers={"Authorization": auth}) + if r.status_code == 200: + return r.json() + return JSONResponse({"error": "VM not found"}, status_code=404) + +@app.post("/inventory/host") +async def inventory_host(request: Request, _=Depends(_verify)): + body = await request.json() + name, ip = body["name"], body["ip"] + group = body.get("group", "auto") + user = body.get("user", "sascha") + ini = "/app-config/ansible/pfannkuchen.ini" + # Add host to group in pfannkuchen.ini (idempotent) + add_cmd = f"""python3 -c " +lines = open('{ini}').readlines() +# Check if host already exists +if any('{name} ' in l or '{name}\\n' in l for l in lines): + print('already exists') +else: + # Find the group and insert after it + out, found = [], False + for l in lines: + out.append(l) + if l.strip() == '[{group}]': + found = True + elif found and (l.startswith('[') or l.strip() == ''): + out.insert(-1, '{name} ansible_host={ip}\\n') + found = False + if found: # group was last + out.append('{name} ansible_host={ip}\\n') + open('{ini}','w').writelines(out) + print('added to [{group}]') +" """ + rc, out, _ = _ssh(AUTOMATION1, add_cmd, timeout=30) + # Also create host_vars + _ssh(AUTOMATION1, f"mkdir -p /app-config/ansible/host_vars/{name} && printf 'ansible_host: {ip}\\nansible_user: {user}\\n' > /app-config/ansible/host_vars/{name}/vars.yml", timeout=30) + return {"status": "ok", "name": name, "ip": ip, "group": group, "result": out.strip()} + +@app.post("/ansible/run") +async def ansible_run(request: Request, _=Depends(_verify)): + body = await request.json() + hostname = body.get("limit", body.get("hostname", "")) + template_id = body.get("template_id", 10) + if not hostname: + return JSONResponse({"error": "limit/hostname required"}, status_code=400) + rc, out, err = _ssh(AUTOMATION1, f"cd /app-config/ansible && bash pfannkuchen.sh setup {hostname}", timeout=600) + return {"status": "ok" if rc == 0 else "error", "rc": rc, "output": out[-1000:]} + +@app.get("/ansible/status/{job_id}") +async def ansible_status(job_id: int, _=Depends(_verify)): + return {"info": "direct SSH mode - no async job tracking"} + +# --- TTS Endpoints --- + +class TTSRequest(BaseModel): + text: str + target: str = "speaker" # "speaker" or "telegram" + voice: str = "deep_thought.mp3" + language: str = "de" + +SPEAKER_URL = "http://10.10.1.166:10800" +CHATTERBOX_URL = "http://10.2.1.104:8004/tts" + +@app.post("/tts/speak") +async def tts_speak(req: TTSRequest, _=Depends(_verify)): + if req.target == "speaker": + async with httpx.AsyncClient(verify=False, timeout=120) as c: + r = await c.post(SPEAKER_URL, json={"text": req.text}) + return {"status": "ok" if r.status_code == 200 else "error", "target": "speaker"} + elif req.target == "telegram": + # Generate WAV via Chatterbox, save to hermes VM as OGG for Telegram voice + async with httpx.AsyncClient(verify=False, timeout=120) as c: + r = await c.post(CHATTERBOX_URL, json={ + "text": req.text, "voice_mode": "clone", + "reference_audio_filename": req.voice, + "output_format": "wav", "language": req.language, + "exaggeration": 0.3, "cfg_weight": 0.7, "temperature": 0.6, + }) + if r.status_code != 200: + return JSONResponse({"error": "chatterbox failed"}, status_code=500) + # Save WAV and convert to OGG on hermes + import tempfile + wav_path = tempfile.mktemp(suffix=".wav") + ogg_path = "/tmp/trulla_voice.ogg" + with open(wav_path, "wb") as f: + f.write(r.content) + rc, _, _ = _ssh("sascha@10.4.1.100", f"rm -f {ogg_path}", timeout=10) + # Copy WAV to hermes and convert + _sp.run(["scp", "-o", "ConnectTimeout=5", wav_path, f"sascha@10.4.1.100:/tmp/trulla_voice.wav"], timeout=30) + _ssh("sascha@10.4.1.100", f"ffmpeg -y -i /tmp/trulla_voice.wav -c:a libopus -b:a 64k {ogg_path} 2>/dev/null", timeout=30) + os.unlink(wav_path) + return {"status": "ok", "target": "telegram", "media_path": ogg_path, "hint": "Use MEDIA:/tmp/trulla_voice.ogg in response"} + else: + return JSONResponse({"error": f"unknown target: {req.target}"}, status_code=400) + +@app.get("/tts/voices") +async def tts_voices(_=Depends(_verify)): + async with httpx.AsyncClient(verify=False, timeout=10) as c: + r = await c.get("http://10.2.1.104:8004/get_predefined_voices") + return r.json() + +@app.get("/tts/health") +async def tts_health(_=Depends(_verify)): + results = {} + async with httpx.AsyncClient(verify=False, timeout=5) as c: + try: + r = await c.get(SPEAKER_URL) + results["speaker"] = r.json() + except Exception as e: + results["speaker"] = {"status": "offline", "error": str(e)} + try: + r = await c.get("http://10.2.1.104:8004/api/model-info") + results["chatterbox"] = "ok" + except Exception as e: + results["chatterbox"] = {"status": "offline", "error": str(e)} + return results + + @app.api_route("/{service}/{path:path}", methods=["GET", "POST", "PUT", "DELETE", "PATCH"]) async def proxy(service: str, path: str, request: Request, _=Depends(_verify)): + SKIP_SERVICES = {"vm", "inventory", "ansible", "debug", "tts"} + if service in SKIP_SERVICES: + raise HTTPException(404, f"Unknown service: {service}") cfg = SERVICES.get(service) if not cfg: raise HTTPException(404, f"Unknown service: {service}. Available: {list(SERVICES.keys())}") @@ -206,3 +444,6 @@ async def proxy(service: str, path: str, request: Request, _=Depends(_verify)): except Exception: data = resp.text return JSONResponse(content=data, status_code=resp.status_code) + + +