Spaces:
Paused
Paused
File size: 1,772 Bytes
08b23ce |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 |
import os, subprocess, requests
from fastapi import FastAPI
from pydantic import BaseModel
app = FastAPI()
class RigIn(BaseModel):
mesh_url: str # 입력 파일 URL (obj, glb, fbx 등)
@app.get("/")
def root():
return {"message": "Puppeteer API (GPU) ready"}
@app.get("/health")
def health():
try:
import torch
gpu = torch.cuda.is_available()
name = torch.cuda.get_device_name(0) if gpu else None
return {"status": "ok", "cuda": gpu, "gpu": name}
except Exception as e:
return {"status": "ok", "cuda": False, "detail": str(e)}
@app.post("/rig")
def rig(inp: RigIn):
os.makedirs("/tmp/in", exist_ok=True)
mesh_path = os.path.join("/tmp/in", os.path.basename(inp.mesh_url))
# 1️⃣ 입력 파일 다운로드
with requests.get(inp.mesh_url, stream=True) as r:
r.raise_for_status()
with open(mesh_path, "wb") as f:
for chunk in r.iter_content(chunk_size=8192):
if chunk:
f.write(chunk)
# 2️⃣ Puppeteer 실행
workdir = "/app/Puppeteer"
cmd = ["bash", "demo_rigging.sh", mesh_path]
try:
subprocess.run(cmd, cwd=workdir, check=True)
except subprocess.CalledProcessError as e:
return {"status": "error", "detail": str(e)}
# 3️⃣ 결과 목록 반환
result_dir = os.path.join(workdir, "results")
files = []
for rootdir, _, filenames in os.walk(result_dir):
for fn in filenames:
files.append(os.path.join(rootdir, fn))
if len(files) >= 20: break
return {"status": "ok", "result_dir": result_dir, "files_preview": files[:10]} |