Spaces:
Paused
Paused
| import os, subprocess, requests | |
| from fastapi import FastAPI | |
| from pydantic import BaseModel | |
| app = FastAPI() | |
| class RigIn(BaseModel): | |
| 혻 혻 mesh_url: str혻 # ?낅젰 ?뚯씪 URL (obj, glb, fbx ?? | |
| def root(): | |
| 혻 혻 return {"message": "Puppeteer API (GPU) ready"} | |
| def health(): | |
| 혻 혻 try: | |
| 혻 혻 혻 혻 import torch | |
| 혻 혻 혻 혻 gpu = torch.cuda.is_available() | |
| 혻 혻 혻 혻 name = torch.cuda.get_device_name(0) if gpu else None | |
| 혻 혻 혻 혻 return {"status": "ok", "cuda": gpu, "gpu": name} | |
| 혻 혻 except Exception as e: | |
| 혻 혻 혻 혻 return {"status": "ok", "cuda": False, "detail": str(e)} | |
| def rig(inp: RigIn): | |
| 혻 혻 os.makedirs("/tmp/in", exist_ok=True) | |
| 혻 혻 mesh_path = os.path.join("/tmp/in", os.path.basename(inp.mesh_url)) | |
| 혻 혻 # 1截뤴깵 ?낅젰 ?뚯씪 ?ㅼ슫濡쒕뱶 | |
| 혻 혻 with requests.get(inp.mesh_url, stream=True) as r: | |
| 혻 혻 혻 혻 r.raise_for_status() | |
| 혻 혻 혻 혻 with open(mesh_path, "wb") as f: | |
| 혻 혻 혻 혻 혻 혻 for chunk in r.iter_content(chunk_size=8192): | |
| 혻 혻 혻 혻 혻 혻 혻 혻 if chunk: | |
| 혻 혻 혻 혻 혻 혻 혻 혻 혻 혻 f.write(chunk) | |
| 혻 혻 # 2截뤴깵 Puppeteer ?ㅽ뻾 | |
| 혻 혻 workdir = "/app/Puppeteer" | |
| 혻 혻 cmd = ["bash", "demo_rigging.sh", mesh_path] | |
| 혻 혻 try: | |
| 혻 혻 혻 혻 subprocess.run(cmd, cwd=workdir, check=True) | |
| 혻 혻 except subprocess.CalledProcessError as e: | |
| 혻 혻 혻 혻 return {"status": "error", "detail": str(e)} | |
| 혻 혻 # 3截뤴깵 寃곌낵 紐⑸줉 諛섑솚 | |
| 혻 혻 result_dir = os.path.join(workdir, "results") | |
| 혻 혻 files = [] | |
| 혻 혻 for rootdir, _, filenames in os.walk(result_dir): | |
| 혻 혻 혻 혻 for fn in filenames: | |
| 혻 혻 혻 혻 혻 혻 files.append(os.path.join(rootdir, fn)) | |
| 혻 혻 혻 혻 혻 혻 if len(files) >= 20: break | |
| 혻 혻 return {"status": "ok", "result_dir": result_dir, "files_preview": files[:10]} | |