nami0342 commited on
Commit
7ba2aec
Β·
1 Parent(s): 50da29c

Support zeroGPU env

Browse files
Files changed (1) hide show
  1. app.py +51 -32
app.py CHANGED
@@ -30,6 +30,10 @@ from torchvision.transforms.functional import to_pil_image
30
  # import pillow_heif # HEIC 이미지 처리용 (아이폰 촬영 사진 포맷)
31
  from urllib.parse import urlparse
32
 
 
 
 
 
33
  # SSL κ²½κ³  μ–΅μ œ
34
  warnings.filterwarnings("ignore", message=".*OpenSSL.*")
35
  warnings.filterwarnings("ignore", category=UserWarning, module="urllib3")
@@ -68,14 +72,17 @@ unet = UNet2DConditionModel.from_pretrained(
68
  unet.requires_grad_(False)
69
  # torch.compile() 적용 - μΆ”λ‘  속도 20-40% ν–₯상 (PyTorch 2.0+)
70
  # 주의: 첫 번째 좔둠은 컴파일둜 인해 느릴 수 있음
71
- if hasattr(torch, 'compile'):
72
- try:
73
- unet = torch.compile(unet, mode="reduce-overhead")
74
- print("βœ“ UNet model loaded and compiled successfully")
75
- except Exception as e:
76
- print(f"βœ“ UNet model loaded (compile skipped: {e})")
77
- else:
78
  print("βœ“ UNet model loaded successfully")
 
 
 
 
 
 
 
 
 
79
 
80
  print("\n[2/10] Loading tokenizers...")
81
  tokenizer_one = AutoTokenizer.from_pretrained(
@@ -123,14 +130,17 @@ vae = AutoencoderKL.from_pretrained(base_path,
123
  torch_dtype=torch.float16,
124
  )
125
  # torch.compile() 적용 - VAE 인코딩/λ””μ½”λ”© 속도 ν–₯상
126
- if hasattr(torch, 'compile'):
127
- try:
128
- vae = torch.compile(vae, mode="reduce-overhead")
129
- print("βœ“ VAE loaded and compiled successfully")
130
- except Exception as e:
131
- print(f"βœ“ VAE loaded (compile skipped: {e})")
132
- else:
133
  print("βœ“ VAE loaded successfully")
 
 
 
 
 
 
 
 
 
134
 
135
  print("\n[7/10] Loading UNet Encoder...")
136
  UNet_Encoder = UNet2DConditionModel_ref.from_pretrained(
@@ -139,14 +149,17 @@ UNet_Encoder = UNet2DConditionModel_ref.from_pretrained(
139
  torch_dtype=torch.float16,
140
  )
141
  # torch.compile() 적용 - UNet Encoder 속도 ν–₯상
142
- if hasattr(torch, 'compile'):
143
- try:
144
- UNet_Encoder = torch.compile(UNet_Encoder, mode="reduce-overhead")
145
- print("βœ“ UNet Encoder loaded and compiled successfully")
146
- except Exception as e:
147
- print(f"βœ“ UNet Encoder loaded (compile skipped: {e})")
148
- else:
149
  print("βœ“ UNet Encoder loaded successfully")
 
 
 
 
 
 
 
 
 
150
 
151
  print("\n[8/10] Initializing parsing and openpose models...")
152
  parsing_model = Parsing(0)
@@ -243,12 +256,15 @@ print("=" * 60 + "\n")
243
 
244
  # torch.compile 였λ₯˜ μ‹œ eager λͺ¨λ“œλ‘œ 폴백 μ„€μ •
245
  # μ»€μŠ€ν…€ UNet forward λ©”μ„œλ“œ ν˜Έν™˜μ„± 문제 λŒ€μ‘
246
- try:
247
- import torch._dynamo
248
- torch._dynamo.config.suppress_errors = True
249
- print("βœ“ torch._dynamo.config.suppress_errors enabled (fallback to eager mode on error)")
250
- except Exception as e:
251
- print(f"⚠ torch._dynamo config not available: {e}")
 
 
 
252
 
253
  # GPU Warm-up ν•¨μˆ˜ (μ•± λ‘œλ“œ μ‹œ μžλ™ μ‹€ν–‰)
254
  # Text Encoder, VAE GPU λ‘œλ”© 및 CUDA 컀널 μ΄ˆκΈ°ν™”
@@ -752,11 +768,14 @@ with image_blocks as demo:
752
  warmup_status = gr.Textbox(visible=False)
753
 
754
  # μ•± λ‘œλ“œ μ‹œ GPU Warm-up μžλ™ μ‹€ν–‰ (torch.compile 첫 컴파일)
755
- demo.load(
756
- fn=warmup_gpu,
757
- inputs=None,
758
- outputs=warmup_status,
759
- )
 
 
 
760
 
761
  print("βœ“ Gradio interface components created")
762
  print("βœ“ Event handlers configured")
 
30
  # import pillow_heif # HEIC 이미지 처리용 (아이폰 촬영 사진 포맷)
31
  from urllib.parse import urlparse
32
 
33
+
34
+ # zeroGPU ν™˜κ²½μ—μ„œ compile μ‚¬μš© μ—¬λΆ€
35
+ is_compile_for_zeroGPU = True # True: compile μ‚¬μš©, False: compile μ‚¬μš© μ•ˆ 함
36
+
37
  # SSL κ²½κ³  μ–΅μ œ
38
  warnings.filterwarnings("ignore", message=".*OpenSSL.*")
39
  warnings.filterwarnings("ignore", category=UserWarning, module="urllib3")
 
72
  unet.requires_grad_(False)
73
  # torch.compile() 적용 - μΆ”λ‘  속도 20-40% ν–₯상 (PyTorch 2.0+)
74
  # 주의: 첫 번째 좔둠은 컴파일둜 인해 느릴 수 있음
75
+ if is_compile_for_zeroGPU == True:
 
 
 
 
 
 
76
  print("βœ“ UNet model loaded successfully")
77
+ else:
78
+ if hasattr(torch, 'compile'):
79
+ try:
80
+ unet = torch.compile(unet, mode="reduce-overhead")
81
+ print("βœ“ UNet model loaded and compiled successfully")
82
+ except Exception as e:
83
+ print(f"βœ“ UNet model loaded (compile skipped: {e})")
84
+ else:
85
+ print("βœ“ UNet model loaded successfully")
86
 
87
  print("\n[2/10] Loading tokenizers...")
88
  tokenizer_one = AutoTokenizer.from_pretrained(
 
130
  torch_dtype=torch.float16,
131
  )
132
  # torch.compile() 적용 - VAE 인코딩/λ””μ½”λ”© 속도 ν–₯상
133
+ if is_compile_for_zeroGPU == True:
 
 
 
 
 
 
134
  print("βœ“ VAE loaded successfully")
135
+ else:
136
+ if hasattr(torch, 'compile'):
137
+ try:
138
+ vae = torch.compile(vae, mode="reduce-overhead")
139
+ print("βœ“ VAE loaded and compiled successfully")
140
+ except Exception as e:
141
+ print(f"βœ“ VAE loaded (compile skipped: {e})")
142
+ else:
143
+ print("βœ“ VAE loaded successfully")
144
 
145
  print("\n[7/10] Loading UNet Encoder...")
146
  UNet_Encoder = UNet2DConditionModel_ref.from_pretrained(
 
149
  torch_dtype=torch.float16,
150
  )
151
  # torch.compile() 적용 - UNet Encoder 속도 ν–₯상
152
+ if is_compile_for_zeroGPU == True:
 
 
 
 
 
 
153
  print("βœ“ UNet Encoder loaded successfully")
154
+ else:
155
+ if hasattr(torch, 'compile'):
156
+ try:
157
+ UNet_Encoder = torch.compile(UNet_Encoder, mode="reduce-overhead")
158
+ print("βœ“ UNet Encoder loaded and compiled successfully")
159
+ except Exception as e:
160
+ print(f"βœ“ UNet Encoder loaded (compile skipped: {e})")
161
+ else:
162
+ print("βœ“ UNet Encoder loaded successfully")
163
 
164
  print("\n[8/10] Initializing parsing and openpose models...")
165
  parsing_model = Parsing(0)
 
256
 
257
  # torch.compile 였λ₯˜ μ‹œ eager λͺ¨λ“œλ‘œ 폴백 μ„€μ •
258
  # μ»€μŠ€ν…€ UNet forward λ©”μ„œλ“œ ν˜Έν™˜μ„± 문제 λŒ€μ‘
259
+ if is_compile_for_zeroGPU == True:
260
+ print("βœ“ torch.compile is disabled for ZeroGPU")
261
+ else:
262
+ try:
263
+ import torch._dynamo
264
+ torch._dynamo.config.suppress_errors = True
265
+ print("βœ“ torch._dynamo.config.suppress_errors enabled (fallback to eager mode on error)")
266
+ except Exception as e:
267
+ print(f"⚠ torch._dynamo config not available: {e}")
268
 
269
  # GPU Warm-up ν•¨μˆ˜ (μ•± λ‘œλ“œ μ‹œ μžλ™ μ‹€ν–‰)
270
  # Text Encoder, VAE GPU λ‘œλ”© 및 CUDA 컀널 μ΄ˆκΈ°ν™”
 
768
  warmup_status = gr.Textbox(visible=False)
769
 
770
  # μ•± λ‘œλ“œ μ‹œ GPU Warm-up μžλ™ μ‹€ν–‰ (torch.compile 첫 컴파일)
771
+ if is_compile_for_zeroGPU == True:
772
+ print("βœ“ GPU warm-up is disabled for ZeroGPU")
773
+ else:
774
+ demo.load(
775
+ fn=warmup_gpu,
776
+ inputs=None,
777
+ outputs=warmup_status,
778
+ )
779
 
780
  print("βœ“ Gradio interface components created")
781
  print("βœ“ Event handlers configured")