Update app.py
Browse files
app.py
CHANGED
|
@@ -211,10 +211,7 @@ def edit_inference(net, prompt, negative_prompt, guidance_scale, ddim_steps, see
|
|
| 211 |
mean.to(device)
|
| 212 |
std.to(device)
|
| 213 |
v.to(device)
|
| 214 |
-
|
| 215 |
-
pointy.to(device)
|
| 216 |
-
wavy.to(device)
|
| 217 |
-
thick.to(device)
|
| 218 |
|
| 219 |
|
| 220 |
weights = torch.load(net).to(device)
|
|
@@ -235,9 +232,9 @@ def edit_inference(net, prompt, negative_prompt, guidance_scale, ddim_steps, see
|
|
| 235 |
pcs_edits = young.shape[1]
|
| 236 |
padding = torch.zeros((1,pcs_original-pcs_edits)).to(device)
|
| 237 |
young_pad = torch.cat((young.to(device), padding), 1)
|
| 238 |
-
pointy_pad = torch.cat((pointy, padding), 1)
|
| 239 |
-
wavy_pad = torch.cat((wavy, padding), 1)
|
| 240 |
-
thick_pad = torch.cat((thick, padding), 1)
|
| 241 |
|
| 242 |
|
| 243 |
edited_weights = weights+a1*1e6*young_pad+a2*1e6*pointy_pad+a3*1e6*wavy_pad+a4*2e6*thick_pad
|
|
|
|
| 211 |
mean.to(device)
|
| 212 |
std.to(device)
|
| 213 |
v.to(device)
|
| 214 |
+
|
|
|
|
|
|
|
|
|
|
| 215 |
|
| 216 |
|
| 217 |
weights = torch.load(net).to(device)
|
|
|
|
| 232 |
pcs_edits = young.shape[1]
|
| 233 |
padding = torch.zeros((1,pcs_original-pcs_edits)).to(device)
|
| 234 |
young_pad = torch.cat((young.to(device), padding), 1)
|
| 235 |
+
pointy_pad = torch.cat((pointy.to(device), padding), 1)
|
| 236 |
+
wavy_pad = torch.cat((wavy.to(device), padding), 1)
|
| 237 |
+
thick_pad = torch.cat((thick.to(device), padding), 1)
|
| 238 |
|
| 239 |
|
| 240 |
edited_weights = weights+a1*1e6*young_pad+a2*1e6*pointy_pad+a3*1e6*wavy_pad+a4*2e6*thick_pad
|