Spaces:
Runtime error
Runtime error
fix the errors of checkpoint names. The checkpoints remain the same while we modify the names in case of misunderstanding.
Browse files- app.py +5 -9
- checkpoints/{laion10M_epoch_6_model_wo_ema.ckpt β laion10M_epoch_6_model_ema_only.ckpt} +0 -0
- checkpoints/{textcaps5K_epoch_10_model_wo_ema.ckpt β textcaps5K_epoch_10_model_ema_only.ckpt} +0 -0
- checkpoints/{textcaps5K_epoch_20_model_wo_ema.ckpt β textcaps5K_epoch_20_model_ema_only.ckpt} +0 -0
- checkpoints/{textcaps5K_epoch_40_model_wo_ema.ckpt β textcaps5K_epoch_40_model_ema_only.ckpt} +0 -0
app.py
CHANGED
|
@@ -86,18 +86,14 @@ def load_ckpt(model_ckpt = "LAION-Glyph-10M-Epoch-5"):
|
|
| 86 |
time.sleep(2)
|
| 87 |
print("empty the cuda cache")
|
| 88 |
|
| 89 |
-
# if model_ckpt == "LAION-Glyph-1M":
|
| 90 |
-
# model = load_model_ckpt(model, "laion1M_model_wo_ema.ckpt")
|
| 91 |
-
# if model_ckpt == "LAION-Glyph-10M-Epoch-5":
|
| 92 |
-
# model = load_model_ckpt(model, "laion10M_epoch_5_model_wo_ema.ckpt")
|
| 93 |
if model_ckpt == "LAION-Glyph-10M-Epoch-6":
|
| 94 |
-
model = load_model_ckpt(model, "checkpoints/
|
| 95 |
elif model_ckpt == "TextCaps-5K-Epoch-10":
|
| 96 |
-
model = load_model_ckpt(model, "checkpoints/
|
| 97 |
elif model_ckpt == "TextCaps-5K-Epoch-20":
|
| 98 |
-
model = load_model_ckpt(model, "checkpoints/
|
| 99 |
elif model_ckpt == "TextCaps-5K-Epoch-40":
|
| 100 |
-
model = load_model_ckpt(model, "checkpoints/
|
| 101 |
|
| 102 |
render_tool = Render_Text(model, save_memory = SAVE_MEMORY)
|
| 103 |
output_str = f"already change the model checkpoint to {model_ckpt}"
|
|
@@ -121,7 +117,7 @@ disable_verbosity()
|
|
| 121 |
if SAVE_MEMORY:
|
| 122 |
enable_sliced_attention()
|
| 123 |
cfg = OmegaConf.load("config.yaml")
|
| 124 |
-
model = load_model_from_config(cfg, "checkpoints/
|
| 125 |
render_tool = Render_Text(model, save_memory = SAVE_MEMORY)
|
| 126 |
|
| 127 |
description = """
|
|
|
|
| 86 |
time.sleep(2)
|
| 87 |
print("empty the cuda cache")
|
| 88 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 89 |
if model_ckpt == "LAION-Glyph-10M-Epoch-6":
|
| 90 |
+
model = load_model_ckpt(model, "checkpoints/laion10M_epoch_6_model_ema_only.ckpt")
|
| 91 |
elif model_ckpt == "TextCaps-5K-Epoch-10":
|
| 92 |
+
model = load_model_ckpt(model, "checkpoints/textcaps5K_epoch_10_model_ema_only.ckpt")
|
| 93 |
elif model_ckpt == "TextCaps-5K-Epoch-20":
|
| 94 |
+
model = load_model_ckpt(model, "checkpoints/textcaps5K_epoch_20_model_ema_only.ckpt")
|
| 95 |
elif model_ckpt == "TextCaps-5K-Epoch-40":
|
| 96 |
+
model = load_model_ckpt(model, "checkpoints/textcaps5K_epoch_40_model_ema_only.ckpt")
|
| 97 |
|
| 98 |
render_tool = Render_Text(model, save_memory = SAVE_MEMORY)
|
| 99 |
output_str = f"already change the model checkpoint to {model_ckpt}"
|
|
|
|
| 117 |
if SAVE_MEMORY:
|
| 118 |
enable_sliced_attention()
|
| 119 |
cfg = OmegaConf.load("config.yaml")
|
| 120 |
+
model = load_model_from_config(cfg, "checkpoints/laion10M_epoch_6_model_ema_only.ckpt", verbose=True)
|
| 121 |
render_tool = Render_Text(model, save_memory = SAVE_MEMORY)
|
| 122 |
|
| 123 |
description = """
|
checkpoints/{laion10M_epoch_6_model_wo_ema.ckpt β laion10M_epoch_6_model_ema_only.ckpt}
RENAMED
|
File without changes
|
checkpoints/{textcaps5K_epoch_10_model_wo_ema.ckpt β textcaps5K_epoch_10_model_ema_only.ckpt}
RENAMED
|
File without changes
|
checkpoints/{textcaps5K_epoch_20_model_wo_ema.ckpt β textcaps5K_epoch_20_model_ema_only.ckpt}
RENAMED
|
File without changes
|
checkpoints/{textcaps5K_epoch_40_model_wo_ema.ckpt β textcaps5K_epoch_40_model_ema_only.ckpt}
RENAMED
|
File without changes
|