marcus-castalk commited on
Commit
8732e8f
·
verified ·
1 Parent(s): 1181570

Add files using upload-large-folder tool

Browse files
Files changed (50) hide show
  1. LLM-TTS/checkpoints/finetune_jonathan_llama_xcodec/checkpoint-2984/trainer_state.json +447 -0
  2. LLM-TTS/checkpoints/finetune_jronaldinho_llama_xcodec/checkpoint-2984/config.json +39 -0
  3. LLM-TTS/checkpoints/finetune_jronaldinho_llama_xcodec/checkpoint-2984/generation_config.json +10 -0
  4. LLM-TTS/checkpoints/finetune_jronaldinho_llama_xcodec/checkpoint-2984/model.safetensors.index.json +262 -0
  5. LLM-TTS/checkpoints/finetune_jronaldinho_llama_xcodec/checkpoint-2984/trainer_state.json +447 -0
  6. LLM-TTS/checkpoints/finetune_llama_xcodec/checkpoint-2984/config.json +39 -0
  7. LLM-TTS/checkpoints/finetune_llama_xcodec/checkpoint-2984/generation_config.json +10 -0
  8. LLM-TTS/checkpoints/finetune_llama_xcodec/checkpoint-2984/model.safetensors.index.json +262 -0
  9. LLM-TTS/checkpoints/finetune_llama_xcodec/checkpoint-2984/trainer_state.json +447 -0
  10. LLM-TTS/checkpoints/finetune_llama_xcodec_jonathan/checkpoint-2984/config.json +40 -0
  11. LLM-TTS/checkpoints/finetune_llama_xcodec_jonathan/checkpoint-2984/generation_config.json +10 -0
  12. LLM-TTS/checkpoints/finetune_llama_xcodec_jonathan/checkpoint-2984/model.safetensors.index.json +261 -0
  13. LLM-TTS/jonathan_ft_data/2288.wav +0 -0
  14. LLM-TTS/jonathan_ft_data/2314.wav +0 -0
  15. LLM-TTS/jonathan_ft_data/2316.wav +0 -0
  16. LLM-TTS/jonathan_ft_data/2379.wav +0 -0
  17. LLM-TTS/jonathan_ft_data/24.wav +0 -0
  18. LLM-TTS/jonathan_ft_data/2410.wav +0 -0
  19. LLM-TTS/jonathan_ft_data/2445.wav +0 -0
  20. LLM-TTS/jonathan_ft_data/2452.wav +0 -0
  21. LLM-TTS/jonathan_ft_data/2464.wav +0 -0
  22. LLM-TTS/jonathan_ft_data/2497.wav +0 -0
  23. LLM-TTS/jonathan_ft_data/2537.wav +0 -0
  24. LLM-TTS/jonathan_ft_data/254.wav +0 -0
  25. LLM-TTS/jonathan_ft_data/2569.wav +0 -0
  26. LLM-TTS/jonathan_ft_data/258.wav +0 -0
  27. LLM-TTS/jonathan_ft_data/2602.wav +0 -0
  28. LLM-TTS/jonathan_ft_data/2624.wav +0 -0
  29. LLM-TTS/jonathan_ft_data/264.wav +0 -0
  30. LLM-TTS/jonathan_ft_data/2648.wav +0 -0
  31. LLM-TTS/jonathan_ft_data/2659.wav +0 -0
  32. LLM-TTS/jonathan_ft_data/2682.wav +0 -0
  33. LLM-TTS/jonathan_ft_data/2704.wav +0 -0
  34. LLM-TTS/jonathan_ft_data/2706.wav +0 -0
  35. LLM-TTS/jonathan_ft_data/2715.wav +0 -0
  36. LLM-TTS/jonathan_ft_data/2720.wav +0 -0
  37. LLM-TTS/jonathan_ft_data/2725.wav +0 -0
  38. LLM-TTS/jonathan_ft_data/2772.wav +0 -0
  39. LLM-TTS/jonathan_ft_data/2787.wav +0 -0
  40. LLM-TTS/jonathan_ft_data/2793.wav +0 -0
  41. LLM-TTS/jonathan_ft_data/2827.wav +0 -0
  42. LLM-TTS/jonathan_ft_data/2870.wav +0 -0
  43. LLM-TTS/jonathan_ft_data/2889.wav +0 -0
  44. LLM-TTS/jonathan_ft_data/2909.wav +0 -0
  45. LLM-TTS/jonathan_ft_data/2911.wav +0 -0
  46. LLM-TTS/jonathan_ft_data/2939.wav +0 -0
  47. LLM-TTS/jonathan_ft_data/317.wav +0 -0
  48. LLM-TTS/jonathan_ft_data/323.wav +0 -0
  49. LLM-TTS/jonathan_ft_data/336.wav +0 -0
  50. LLM-TTS/jonathan_ft_data/342.wav +0 -0
LLM-TTS/checkpoints/finetune_jonathan_llama_xcodec/checkpoint-2984/trainer_state.json ADDED
@@ -0,0 +1,447 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": null,
3
+ "best_metric": null,
4
+ "best_model_checkpoint": null,
5
+ "epoch": 1.0,
6
+ "eval_steps": 500,
7
+ "global_step": 2984,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.01675603217158177,
14
+ "grad_norm": 15.375,
15
+ "learning_rate": 4.9178954423592495e-05,
16
+ "loss": 7.6287,
17
+ "step": 50
18
+ },
19
+ {
20
+ "epoch": 0.03351206434316354,
21
+ "grad_norm": 21.125,
22
+ "learning_rate": 4.834115281501341e-05,
23
+ "loss": 7.0313,
24
+ "step": 100
25
+ },
26
+ {
27
+ "epoch": 0.05026809651474531,
28
+ "grad_norm": 11.5,
29
+ "learning_rate": 4.750335120643432e-05,
30
+ "loss": 6.9903,
31
+ "step": 150
32
+ },
33
+ {
34
+ "epoch": 0.06702412868632708,
35
+ "grad_norm": 8.125,
36
+ "learning_rate": 4.666554959785523e-05,
37
+ "loss": 6.9855,
38
+ "step": 200
39
+ },
40
+ {
41
+ "epoch": 0.08378016085790885,
42
+ "grad_norm": 11.5,
43
+ "learning_rate": 4.582774798927614e-05,
44
+ "loss": 6.8722,
45
+ "step": 250
46
+ },
47
+ {
48
+ "epoch": 0.10053619302949061,
49
+ "grad_norm": 13.125,
50
+ "learning_rate": 4.4989946380697054e-05,
51
+ "loss": 6.8653,
52
+ "step": 300
53
+ },
54
+ {
55
+ "epoch": 0.11729222520107238,
56
+ "grad_norm": 13.0,
57
+ "learning_rate": 4.4152144772117966e-05,
58
+ "loss": 6.837,
59
+ "step": 350
60
+ },
61
+ {
62
+ "epoch": 0.13404825737265416,
63
+ "grad_norm": 10.125,
64
+ "learning_rate": 4.331434316353888e-05,
65
+ "loss": 6.7308,
66
+ "step": 400
67
+ },
68
+ {
69
+ "epoch": 0.15080428954423591,
70
+ "grad_norm": 21.5,
71
+ "learning_rate": 4.247654155495979e-05,
72
+ "loss": 6.6909,
73
+ "step": 450
74
+ },
75
+ {
76
+ "epoch": 0.1675603217158177,
77
+ "grad_norm": 16.0,
78
+ "learning_rate": 4.16387399463807e-05,
79
+ "loss": 6.7413,
80
+ "step": 500
81
+ },
82
+ {
83
+ "epoch": 0.18431635388739948,
84
+ "grad_norm": 11.9375,
85
+ "learning_rate": 4.0800938337801606e-05,
86
+ "loss": 6.6221,
87
+ "step": 550
88
+ },
89
+ {
90
+ "epoch": 0.20107238605898123,
91
+ "grad_norm": 10.625,
92
+ "learning_rate": 3.9963136729222525e-05,
93
+ "loss": 6.6998,
94
+ "step": 600
95
+ },
96
+ {
97
+ "epoch": 0.217828418230563,
98
+ "grad_norm": 20.0,
99
+ "learning_rate": 3.912533512064344e-05,
100
+ "loss": 6.5853,
101
+ "step": 650
102
+ },
103
+ {
104
+ "epoch": 0.23458445040214476,
105
+ "grad_norm": 8.75,
106
+ "learning_rate": 3.828753351206434e-05,
107
+ "loss": 6.6301,
108
+ "step": 700
109
+ },
110
+ {
111
+ "epoch": 0.25134048257372654,
112
+ "grad_norm": 12.9375,
113
+ "learning_rate": 3.744973190348526e-05,
114
+ "loss": 6.5768,
115
+ "step": 750
116
+ },
117
+ {
118
+ "epoch": 0.2680965147453083,
119
+ "grad_norm": 7.8125,
120
+ "learning_rate": 3.6611930294906165e-05,
121
+ "loss": 6.5991,
122
+ "step": 800
123
+ },
124
+ {
125
+ "epoch": 0.2848525469168901,
126
+ "grad_norm": 9.75,
127
+ "learning_rate": 3.577412868632708e-05,
128
+ "loss": 6.5431,
129
+ "step": 850
130
+ },
131
+ {
132
+ "epoch": 0.30160857908847183,
133
+ "grad_norm": 9.125,
134
+ "learning_rate": 3.4936327077747996e-05,
135
+ "loss": 6.5432,
136
+ "step": 900
137
+ },
138
+ {
139
+ "epoch": 0.3183646112600536,
140
+ "grad_norm": 7.25,
141
+ "learning_rate": 3.40985254691689e-05,
142
+ "loss": 6.5763,
143
+ "step": 950
144
+ },
145
+ {
146
+ "epoch": 0.3351206434316354,
147
+ "grad_norm": 11.125,
148
+ "learning_rate": 3.326072386058981e-05,
149
+ "loss": 6.4979,
150
+ "step": 1000
151
+ },
152
+ {
153
+ "epoch": 0.35187667560321717,
154
+ "grad_norm": 10.625,
155
+ "learning_rate": 3.2422922252010724e-05,
156
+ "loss": 6.4356,
157
+ "step": 1050
158
+ },
159
+ {
160
+ "epoch": 0.36863270777479895,
161
+ "grad_norm": 10.5,
162
+ "learning_rate": 3.1585120643431636e-05,
163
+ "loss": 6.4282,
164
+ "step": 1100
165
+ },
166
+ {
167
+ "epoch": 0.3853887399463807,
168
+ "grad_norm": 11.625,
169
+ "learning_rate": 3.074731903485255e-05,
170
+ "loss": 6.513,
171
+ "step": 1150
172
+ },
173
+ {
174
+ "epoch": 0.40214477211796246,
175
+ "grad_norm": 8.875,
176
+ "learning_rate": 2.990951742627346e-05,
177
+ "loss": 6.4606,
178
+ "step": 1200
179
+ },
180
+ {
181
+ "epoch": 0.41890080428954424,
182
+ "grad_norm": 10.25,
183
+ "learning_rate": 2.907171581769437e-05,
184
+ "loss": 6.4058,
185
+ "step": 1250
186
+ },
187
+ {
188
+ "epoch": 0.435656836461126,
189
+ "grad_norm": 10.25,
190
+ "learning_rate": 2.823391420911528e-05,
191
+ "loss": 6.3676,
192
+ "step": 1300
193
+ },
194
+ {
195
+ "epoch": 0.4524128686327078,
196
+ "grad_norm": 7.34375,
197
+ "learning_rate": 2.7396112600536195e-05,
198
+ "loss": 6.383,
199
+ "step": 1350
200
+ },
201
+ {
202
+ "epoch": 0.4691689008042895,
203
+ "grad_norm": 8.875,
204
+ "learning_rate": 2.6558310991957107e-05,
205
+ "loss": 6.3729,
206
+ "step": 1400
207
+ },
208
+ {
209
+ "epoch": 0.4859249329758713,
210
+ "grad_norm": 11.125,
211
+ "learning_rate": 2.5720509383378015e-05,
212
+ "loss": 6.341,
213
+ "step": 1450
214
+ },
215
+ {
216
+ "epoch": 0.5026809651474531,
217
+ "grad_norm": 10.3125,
218
+ "learning_rate": 2.488270777479893e-05,
219
+ "loss": 6.3014,
220
+ "step": 1500
221
+ },
222
+ {
223
+ "epoch": 0.5194369973190348,
224
+ "grad_norm": 21.625,
225
+ "learning_rate": 2.4044906166219842e-05,
226
+ "loss": 6.3591,
227
+ "step": 1550
228
+ },
229
+ {
230
+ "epoch": 0.5361930294906166,
231
+ "grad_norm": 7.0,
232
+ "learning_rate": 2.320710455764075e-05,
233
+ "loss": 6.3019,
234
+ "step": 1600
235
+ },
236
+ {
237
+ "epoch": 0.5529490616621984,
238
+ "grad_norm": 6.84375,
239
+ "learning_rate": 2.2369302949061662e-05,
240
+ "loss": 6.2788,
241
+ "step": 1650
242
+ },
243
+ {
244
+ "epoch": 0.5697050938337802,
245
+ "grad_norm": 8.25,
246
+ "learning_rate": 2.1531501340482574e-05,
247
+ "loss": 6.2423,
248
+ "step": 1700
249
+ },
250
+ {
251
+ "epoch": 0.5864611260053619,
252
+ "grad_norm": 12.1875,
253
+ "learning_rate": 2.069369973190349e-05,
254
+ "loss": 6.2861,
255
+ "step": 1750
256
+ },
257
+ {
258
+ "epoch": 0.6032171581769437,
259
+ "grad_norm": 8.3125,
260
+ "learning_rate": 1.9855898123324398e-05,
261
+ "loss": 6.2618,
262
+ "step": 1800
263
+ },
264
+ {
265
+ "epoch": 0.6199731903485255,
266
+ "grad_norm": 10.9375,
267
+ "learning_rate": 1.901809651474531e-05,
268
+ "loss": 6.301,
269
+ "step": 1850
270
+ },
271
+ {
272
+ "epoch": 0.6367292225201072,
273
+ "grad_norm": 10.75,
274
+ "learning_rate": 1.818029490616622e-05,
275
+ "loss": 6.2324,
276
+ "step": 1900
277
+ },
278
+ {
279
+ "epoch": 0.653485254691689,
280
+ "grad_norm": 8.875,
281
+ "learning_rate": 1.7342493297587133e-05,
282
+ "loss": 6.2882,
283
+ "step": 1950
284
+ },
285
+ {
286
+ "epoch": 0.6702412868632708,
287
+ "grad_norm": 12.5625,
288
+ "learning_rate": 1.6504691689008045e-05,
289
+ "loss": 6.2651,
290
+ "step": 2000
291
+ },
292
+ {
293
+ "epoch": 0.6869973190348525,
294
+ "grad_norm": 8.0,
295
+ "learning_rate": 1.5666890080428956e-05,
296
+ "loss": 6.2333,
297
+ "step": 2050
298
+ },
299
+ {
300
+ "epoch": 0.7037533512064343,
301
+ "grad_norm": 7.96875,
302
+ "learning_rate": 1.4829088471849867e-05,
303
+ "loss": 6.2272,
304
+ "step": 2100
305
+ },
306
+ {
307
+ "epoch": 0.7205093833780161,
308
+ "grad_norm": 12.5,
309
+ "learning_rate": 1.3991286863270778e-05,
310
+ "loss": 6.2392,
311
+ "step": 2150
312
+ },
313
+ {
314
+ "epoch": 0.7372654155495979,
315
+ "grad_norm": 12.375,
316
+ "learning_rate": 1.3153485254691688e-05,
317
+ "loss": 6.306,
318
+ "step": 2200
319
+ },
320
+ {
321
+ "epoch": 0.7540214477211796,
322
+ "grad_norm": 12.125,
323
+ "learning_rate": 1.23156836461126e-05,
324
+ "loss": 6.305,
325
+ "step": 2250
326
+ },
327
+ {
328
+ "epoch": 0.7707774798927614,
329
+ "grad_norm": 8.875,
330
+ "learning_rate": 1.1477882037533512e-05,
331
+ "loss": 6.2792,
332
+ "step": 2300
333
+ },
334
+ {
335
+ "epoch": 0.7875335120643432,
336
+ "grad_norm": 11.3125,
337
+ "learning_rate": 1.0640080428954424e-05,
338
+ "loss": 6.1801,
339
+ "step": 2350
340
+ },
341
+ {
342
+ "epoch": 0.8042895442359249,
343
+ "grad_norm": 7.4375,
344
+ "learning_rate": 9.802278820375336e-06,
345
+ "loss": 6.3133,
346
+ "step": 2400
347
+ },
348
+ {
349
+ "epoch": 0.8210455764075067,
350
+ "grad_norm": 10.9375,
351
+ "learning_rate": 8.964477211796247e-06,
352
+ "loss": 6.2437,
353
+ "step": 2450
354
+ },
355
+ {
356
+ "epoch": 0.8378016085790885,
357
+ "grad_norm": 7.09375,
358
+ "learning_rate": 8.126675603217159e-06,
359
+ "loss": 6.266,
360
+ "step": 2500
361
+ },
362
+ {
363
+ "epoch": 0.8545576407506702,
364
+ "grad_norm": 10.625,
365
+ "learning_rate": 7.288873994638071e-06,
366
+ "loss": 6.2586,
367
+ "step": 2550
368
+ },
369
+ {
370
+ "epoch": 0.871313672922252,
371
+ "grad_norm": 10.125,
372
+ "learning_rate": 6.451072386058982e-06,
373
+ "loss": 6.1621,
374
+ "step": 2600
375
+ },
376
+ {
377
+ "epoch": 0.8880697050938338,
378
+ "grad_norm": 8.875,
379
+ "learning_rate": 5.613270777479894e-06,
380
+ "loss": 6.1984,
381
+ "step": 2650
382
+ },
383
+ {
384
+ "epoch": 0.9048257372654156,
385
+ "grad_norm": 8.375,
386
+ "learning_rate": 4.7754691689008045e-06,
387
+ "loss": 6.1918,
388
+ "step": 2700
389
+ },
390
+ {
391
+ "epoch": 0.9215817694369973,
392
+ "grad_norm": 11.0625,
393
+ "learning_rate": 3.9376675603217155e-06,
394
+ "loss": 6.2566,
395
+ "step": 2750
396
+ },
397
+ {
398
+ "epoch": 0.938337801608579,
399
+ "grad_norm": 10.8125,
400
+ "learning_rate": 3.0998659517426277e-06,
401
+ "loss": 6.2238,
402
+ "step": 2800
403
+ },
404
+ {
405
+ "epoch": 0.9550938337801609,
406
+ "grad_norm": 8.5,
407
+ "learning_rate": 2.262064343163539e-06,
408
+ "loss": 6.2632,
409
+ "step": 2850
410
+ },
411
+ {
412
+ "epoch": 0.9718498659517426,
413
+ "grad_norm": 8.625,
414
+ "learning_rate": 1.4242627345844506e-06,
415
+ "loss": 6.2279,
416
+ "step": 2900
417
+ },
418
+ {
419
+ "epoch": 0.9886058981233244,
420
+ "grad_norm": 26.0,
421
+ "learning_rate": 5.86461126005362e-07,
422
+ "loss": 6.197,
423
+ "step": 2950
424
+ }
425
+ ],
426
+ "logging_steps": 50,
427
+ "max_steps": 2984,
428
+ "num_input_tokens_seen": 0,
429
+ "num_train_epochs": 1,
430
+ "save_steps": 5000,
431
+ "stateful_callbacks": {
432
+ "TrainerControl": {
433
+ "args": {
434
+ "should_epoch_stop": false,
435
+ "should_evaluate": false,
436
+ "should_log": false,
437
+ "should_save": true,
438
+ "should_training_stop": true
439
+ },
440
+ "attributes": {}
441
+ }
442
+ },
443
+ "total_flos": 2.3213450259634176e+16,
444
+ "train_batch_size": 1,
445
+ "trial_name": null,
446
+ "trial_params": null
447
+ }
LLM-TTS/checkpoints/finetune_jronaldinho_llama_xcodec/checkpoint-2984/config.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LlamaForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 128000,
8
+ "eos_token_id": [
9
+ 128001,
10
+ 128008,
11
+ 128009
12
+ ],
13
+ "head_dim": 128,
14
+ "hidden_act": "silu",
15
+ "hidden_size": 3072,
16
+ "initializer_range": 0.02,
17
+ "intermediate_size": 8192,
18
+ "max_position_embeddings": 131072,
19
+ "mlp_bias": false,
20
+ "model_type": "llama",
21
+ "num_attention_heads": 24,
22
+ "num_hidden_layers": 28,
23
+ "num_key_value_heads": 8,
24
+ "pretraining_tp": 1,
25
+ "rms_norm_eps": 1e-05,
26
+ "rope_scaling": {
27
+ "factor": 32.0,
28
+ "high_freq_factor": 4.0,
29
+ "low_freq_factor": 1.0,
30
+ "original_max_position_embeddings": 8192,
31
+ "rope_type": "llama3"
32
+ },
33
+ "rope_theta": 500000.0,
34
+ "tie_word_embeddings": true,
35
+ "torch_dtype": "bfloat16",
36
+ "transformers_version": "4.53.2",
37
+ "use_cache": true,
38
+ "vocab_size": 193803
39
+ }
LLM-TTS/checkpoints/finetune_jronaldinho_llama_xcodec/checkpoint-2984/generation_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 128000,
4
+ "eos_token_id": [
5
+ 128001,
6
+ 128008,
7
+ 128009
8
+ ],
9
+ "transformers_version": "4.53.2"
10
+ }
LLM-TTS/checkpoints/finetune_jronaldinho_llama_xcodec/checkpoint-2984/model.safetensors.index.json ADDED
@@ -0,0 +1,262 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_parameters": 3414110208,
4
+ "total_size": 6828220416
5
+ },
6
+ "weight_map": {
7
+ "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
13
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
14
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
15
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
16
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
17
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
18
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
19
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
20
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
21
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
22
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
23
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
24
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
25
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
26
+ "model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
27
+ "model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
28
+ "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
29
+ "model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
30
+ "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
31
+ "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
32
+ "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
33
+ "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
34
+ "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
35
+ "model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
36
+ "model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
37
+ "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
38
+ "model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
39
+ "model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
40
+ "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
41
+ "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
42
+ "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
43
+ "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
44
+ "model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
45
+ "model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
46
+ "model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
47
+ "model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
48
+ "model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
49
+ "model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
50
+ "model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
51
+ "model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
52
+ "model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
53
+ "model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
54
+ "model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
55
+ "model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
56
+ "model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
57
+ "model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
58
+ "model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
59
+ "model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
60
+ "model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
61
+ "model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
62
+ "model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
63
+ "model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
64
+ "model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
65
+ "model.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
66
+ "model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
67
+ "model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
68
+ "model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
69
+ "model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
70
+ "model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
71
+ "model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
72
+ "model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
73
+ "model.layers.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
74
+ "model.layers.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
75
+ "model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
76
+ "model.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
77
+ "model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
78
+ "model.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
79
+ "model.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
80
+ "model.layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors",
81
+ "model.layers.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
82
+ "model.layers.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
83
+ "model.layers.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
84
+ "model.layers.16.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
85
+ "model.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
86
+ "model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
87
+ "model.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
88
+ "model.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
89
+ "model.layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors",
90
+ "model.layers.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
91
+ "model.layers.17.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
92
+ "model.layers.17.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
93
+ "model.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
94
+ "model.layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
95
+ "model.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
96
+ "model.layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
97
+ "model.layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
98
+ "model.layers.18.input_layernorm.weight": "model-00002-of-00002.safetensors",
99
+ "model.layers.18.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
100
+ "model.layers.18.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
101
+ "model.layers.18.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
102
+ "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
103
+ "model.layers.18.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
104
+ "model.layers.18.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
105
+ "model.layers.18.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
106
+ "model.layers.18.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
107
+ "model.layers.19.input_layernorm.weight": "model-00002-of-00002.safetensors",
108
+ "model.layers.19.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
109
+ "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
110
+ "model.layers.19.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
111
+ "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
112
+ "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
113
+ "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
114
+ "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
115
+ "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
116
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
117
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
118
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
119
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
120
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
121
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
122
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
123
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
124
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
125
+ "model.layers.20.input_layernorm.weight": "model-00002-of-00002.safetensors",
126
+ "model.layers.20.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
127
+ "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
128
+ "model.layers.20.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
129
+ "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
130
+ "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
131
+ "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
132
+ "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
133
+ "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
134
+ "model.layers.21.input_layernorm.weight": "model-00002-of-00002.safetensors",
135
+ "model.layers.21.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
136
+ "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
137
+ "model.layers.21.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
138
+ "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
139
+ "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
140
+ "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
141
+ "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
142
+ "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
143
+ "model.layers.22.input_layernorm.weight": "model-00002-of-00002.safetensors",
144
+ "model.layers.22.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
145
+ "model.layers.22.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
146
+ "model.layers.22.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
147
+ "model.layers.22.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
148
+ "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
149
+ "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
150
+ "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
151
+ "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
152
+ "model.layers.23.input_layernorm.weight": "model-00002-of-00002.safetensors",
153
+ "model.layers.23.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
154
+ "model.layers.23.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
155
+ "model.layers.23.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
156
+ "model.layers.23.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
157
+ "model.layers.23.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
158
+ "model.layers.23.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
159
+ "model.layers.23.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
160
+ "model.layers.23.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
161
+ "model.layers.24.input_layernorm.weight": "model-00002-of-00002.safetensors",
162
+ "model.layers.24.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
163
+ "model.layers.24.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
164
+ "model.layers.24.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
165
+ "model.layers.24.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
166
+ "model.layers.24.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
167
+ "model.layers.24.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
168
+ "model.layers.24.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
169
+ "model.layers.24.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
170
+ "model.layers.25.input_layernorm.weight": "model-00002-of-00002.safetensors",
171
+ "model.layers.25.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
172
+ "model.layers.25.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
173
+ "model.layers.25.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
174
+ "model.layers.25.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
175
+ "model.layers.25.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
176
+ "model.layers.25.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
177
+ "model.layers.25.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
178
+ "model.layers.25.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
179
+ "model.layers.26.input_layernorm.weight": "model-00002-of-00002.safetensors",
180
+ "model.layers.26.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
181
+ "model.layers.26.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
182
+ "model.layers.26.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
183
+ "model.layers.26.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
184
+ "model.layers.26.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
185
+ "model.layers.26.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
186
+ "model.layers.26.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
187
+ "model.layers.26.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
188
+ "model.layers.27.input_layernorm.weight": "model-00002-of-00002.safetensors",
189
+ "model.layers.27.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
190
+ "model.layers.27.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
191
+ "model.layers.27.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
192
+ "model.layers.27.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
193
+ "model.layers.27.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
194
+ "model.layers.27.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
195
+ "model.layers.27.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
196
+ "model.layers.27.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
197
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
198
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
199
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
200
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
201
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
202
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
203
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
204
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
205
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
206
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
207
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
208
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
209
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
210
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
211
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
212
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
213
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
214
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
215
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
216
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
217
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
218
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
219
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
220
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
221
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
222
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
223
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
224
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
225
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
226
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
227
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
228
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
229
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
230
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
231
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
232
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
233
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
234
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
235
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
236
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
237
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
238
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
239
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
240
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
241
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
242
+ "model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
243
+ "model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
244
+ "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
245
+ "model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
246
+ "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
247
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
248
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
249
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
250
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
251
+ "model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
252
+ "model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
253
+ "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
254
+ "model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
255
+ "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
256
+ "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
257
+ "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
258
+ "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
259
+ "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
260
+ "model.norm.weight": "model-00002-of-00002.safetensors"
261
+ }
262
+ }
LLM-TTS/checkpoints/finetune_jronaldinho_llama_xcodec/checkpoint-2984/trainer_state.json ADDED
@@ -0,0 +1,447 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": null,
3
+ "best_metric": null,
4
+ "best_model_checkpoint": null,
5
+ "epoch": 1.0,
6
+ "eval_steps": 500,
7
+ "global_step": 2984,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.01675603217158177,
14
+ "grad_norm": 16.25,
15
+ "learning_rate": 4.9178954423592495e-05,
16
+ "loss": 7.7369,
17
+ "step": 50
18
+ },
19
+ {
20
+ "epoch": 0.03351206434316354,
21
+ "grad_norm": 22.75,
22
+ "learning_rate": 4.834115281501341e-05,
23
+ "loss": 7.1522,
24
+ "step": 100
25
+ },
26
+ {
27
+ "epoch": 0.05026809651474531,
28
+ "grad_norm": 12.625,
29
+ "learning_rate": 4.750335120643432e-05,
30
+ "loss": 7.099,
31
+ "step": 150
32
+ },
33
+ {
34
+ "epoch": 0.06702412868632708,
35
+ "grad_norm": 8.5625,
36
+ "learning_rate": 4.666554959785523e-05,
37
+ "loss": 7.0765,
38
+ "step": 200
39
+ },
40
+ {
41
+ "epoch": 0.08378016085790885,
42
+ "grad_norm": 11.0,
43
+ "learning_rate": 4.582774798927614e-05,
44
+ "loss": 7.0373,
45
+ "step": 250
46
+ },
47
+ {
48
+ "epoch": 0.10053619302949061,
49
+ "grad_norm": 10.1875,
50
+ "learning_rate": 4.4989946380697054e-05,
51
+ "loss": 6.9745,
52
+ "step": 300
53
+ },
54
+ {
55
+ "epoch": 0.11729222520107238,
56
+ "grad_norm": 12.75,
57
+ "learning_rate": 4.4152144772117966e-05,
58
+ "loss": 6.9897,
59
+ "step": 350
60
+ },
61
+ {
62
+ "epoch": 0.13404825737265416,
63
+ "grad_norm": 9.625,
64
+ "learning_rate": 4.331434316353888e-05,
65
+ "loss": 6.8805,
66
+ "step": 400
67
+ },
68
+ {
69
+ "epoch": 0.15080428954423591,
70
+ "grad_norm": 18.375,
71
+ "learning_rate": 4.247654155495979e-05,
72
+ "loss": 6.8051,
73
+ "step": 450
74
+ },
75
+ {
76
+ "epoch": 0.1675603217158177,
77
+ "grad_norm": 17.0,
78
+ "learning_rate": 4.16387399463807e-05,
79
+ "loss": 6.8407,
80
+ "step": 500
81
+ },
82
+ {
83
+ "epoch": 0.18431635388739948,
84
+ "grad_norm": 9.25,
85
+ "learning_rate": 4.0800938337801606e-05,
86
+ "loss": 6.8142,
87
+ "step": 550
88
+ },
89
+ {
90
+ "epoch": 0.20107238605898123,
91
+ "grad_norm": 10.375,
92
+ "learning_rate": 3.9963136729222525e-05,
93
+ "loss": 6.7223,
94
+ "step": 600
95
+ },
96
+ {
97
+ "epoch": 0.217828418230563,
98
+ "grad_norm": 17.5,
99
+ "learning_rate": 3.912533512064344e-05,
100
+ "loss": 6.7469,
101
+ "step": 650
102
+ },
103
+ {
104
+ "epoch": 0.23458445040214476,
105
+ "grad_norm": 9.25,
106
+ "learning_rate": 3.828753351206434e-05,
107
+ "loss": 6.7671,
108
+ "step": 700
109
+ },
110
+ {
111
+ "epoch": 0.25134048257372654,
112
+ "grad_norm": 12.5,
113
+ "learning_rate": 3.744973190348526e-05,
114
+ "loss": 6.7353,
115
+ "step": 750
116
+ },
117
+ {
118
+ "epoch": 0.2680965147453083,
119
+ "grad_norm": 6.65625,
120
+ "learning_rate": 3.6611930294906165e-05,
121
+ "loss": 6.6989,
122
+ "step": 800
123
+ },
124
+ {
125
+ "epoch": 0.2848525469168901,
126
+ "grad_norm": 8.0625,
127
+ "learning_rate": 3.577412868632708e-05,
128
+ "loss": 6.6608,
129
+ "step": 850
130
+ },
131
+ {
132
+ "epoch": 0.30160857908847183,
133
+ "grad_norm": 8.9375,
134
+ "learning_rate": 3.4936327077747996e-05,
135
+ "loss": 6.6318,
136
+ "step": 900
137
+ },
138
+ {
139
+ "epoch": 0.3183646112600536,
140
+ "grad_norm": 6.8125,
141
+ "learning_rate": 3.40985254691689e-05,
142
+ "loss": 6.6803,
143
+ "step": 950
144
+ },
145
+ {
146
+ "epoch": 0.3351206434316354,
147
+ "grad_norm": 11.5625,
148
+ "learning_rate": 3.326072386058981e-05,
149
+ "loss": 6.6695,
150
+ "step": 1000
151
+ },
152
+ {
153
+ "epoch": 0.35187667560321717,
154
+ "grad_norm": 11.25,
155
+ "learning_rate": 3.2422922252010724e-05,
156
+ "loss": 6.5309,
157
+ "step": 1050
158
+ },
159
+ {
160
+ "epoch": 0.36863270777479895,
161
+ "grad_norm": 10.4375,
162
+ "learning_rate": 3.1585120643431636e-05,
163
+ "loss": 6.6139,
164
+ "step": 1100
165
+ },
166
+ {
167
+ "epoch": 0.3853887399463807,
168
+ "grad_norm": 10.9375,
169
+ "learning_rate": 3.074731903485255e-05,
170
+ "loss": 6.5815,
171
+ "step": 1150
172
+ },
173
+ {
174
+ "epoch": 0.40214477211796246,
175
+ "grad_norm": 8.125,
176
+ "learning_rate": 2.990951742627346e-05,
177
+ "loss": 6.5957,
178
+ "step": 1200
179
+ },
180
+ {
181
+ "epoch": 0.41890080428954424,
182
+ "grad_norm": 10.6875,
183
+ "learning_rate": 2.907171581769437e-05,
184
+ "loss": 6.5169,
185
+ "step": 1250
186
+ },
187
+ {
188
+ "epoch": 0.435656836461126,
189
+ "grad_norm": 8.8125,
190
+ "learning_rate": 2.823391420911528e-05,
191
+ "loss": 6.5024,
192
+ "step": 1300
193
+ },
194
+ {
195
+ "epoch": 0.4524128686327078,
196
+ "grad_norm": 7.59375,
197
+ "learning_rate": 2.7396112600536195e-05,
198
+ "loss": 6.5258,
199
+ "step": 1350
200
+ },
201
+ {
202
+ "epoch": 0.4691689008042895,
203
+ "grad_norm": 8.8125,
204
+ "learning_rate": 2.6558310991957107e-05,
205
+ "loss": 6.4758,
206
+ "step": 1400
207
+ },
208
+ {
209
+ "epoch": 0.4859249329758713,
210
+ "grad_norm": 11.125,
211
+ "learning_rate": 2.5720509383378015e-05,
212
+ "loss": 6.4974,
213
+ "step": 1450
214
+ },
215
+ {
216
+ "epoch": 0.5026809651474531,
217
+ "grad_norm": 10.4375,
218
+ "learning_rate": 2.488270777479893e-05,
219
+ "loss": 6.4562,
220
+ "step": 1500
221
+ },
222
+ {
223
+ "epoch": 0.5194369973190348,
224
+ "grad_norm": 21.0,
225
+ "learning_rate": 2.4044906166219842e-05,
226
+ "loss": 6.5143,
227
+ "step": 1550
228
+ },
229
+ {
230
+ "epoch": 0.5361930294906166,
231
+ "grad_norm": 6.375,
232
+ "learning_rate": 2.320710455764075e-05,
233
+ "loss": 6.3844,
234
+ "step": 1600
235
+ },
236
+ {
237
+ "epoch": 0.5529490616621984,
238
+ "grad_norm": 7.40625,
239
+ "learning_rate": 2.2369302949061662e-05,
240
+ "loss": 6.4705,
241
+ "step": 1650
242
+ },
243
+ {
244
+ "epoch": 0.5697050938337802,
245
+ "grad_norm": 8.875,
246
+ "learning_rate": 2.1531501340482574e-05,
247
+ "loss": 6.3831,
248
+ "step": 1700
249
+ },
250
+ {
251
+ "epoch": 0.5864611260053619,
252
+ "grad_norm": 11.6875,
253
+ "learning_rate": 2.069369973190349e-05,
254
+ "loss": 6.3881,
255
+ "step": 1750
256
+ },
257
+ {
258
+ "epoch": 0.6032171581769437,
259
+ "grad_norm": 8.8125,
260
+ "learning_rate": 1.9855898123324398e-05,
261
+ "loss": 6.4078,
262
+ "step": 1800
263
+ },
264
+ {
265
+ "epoch": 0.6199731903485255,
266
+ "grad_norm": 9.875,
267
+ "learning_rate": 1.901809651474531e-05,
268
+ "loss": 6.4398,
269
+ "step": 1850
270
+ },
271
+ {
272
+ "epoch": 0.6367292225201072,
273
+ "grad_norm": 10.625,
274
+ "learning_rate": 1.818029490616622e-05,
275
+ "loss": 6.3583,
276
+ "step": 1900
277
+ },
278
+ {
279
+ "epoch": 0.653485254691689,
280
+ "grad_norm": 8.8125,
281
+ "learning_rate": 1.7342493297587133e-05,
282
+ "loss": 6.4002,
283
+ "step": 1950
284
+ },
285
+ {
286
+ "epoch": 0.6702412868632708,
287
+ "grad_norm": 13.5625,
288
+ "learning_rate": 1.6504691689008045e-05,
289
+ "loss": 6.3811,
290
+ "step": 2000
291
+ },
292
+ {
293
+ "epoch": 0.6869973190348525,
294
+ "grad_norm": 7.5625,
295
+ "learning_rate": 1.5666890080428956e-05,
296
+ "loss": 6.3521,
297
+ "step": 2050
298
+ },
299
+ {
300
+ "epoch": 0.7037533512064343,
301
+ "grad_norm": 7.9375,
302
+ "learning_rate": 1.4829088471849867e-05,
303
+ "loss": 6.3412,
304
+ "step": 2100
305
+ },
306
+ {
307
+ "epoch": 0.7205093833780161,
308
+ "grad_norm": 12.4375,
309
+ "learning_rate": 1.3991286863270778e-05,
310
+ "loss": 6.3674,
311
+ "step": 2150
312
+ },
313
+ {
314
+ "epoch": 0.7372654155495979,
315
+ "grad_norm": 12.3125,
316
+ "learning_rate": 1.3153485254691688e-05,
317
+ "loss": 6.408,
318
+ "step": 2200
319
+ },
320
+ {
321
+ "epoch": 0.7540214477211796,
322
+ "grad_norm": 12.875,
323
+ "learning_rate": 1.23156836461126e-05,
324
+ "loss": 6.4123,
325
+ "step": 2250
326
+ },
327
+ {
328
+ "epoch": 0.7707774798927614,
329
+ "grad_norm": 8.375,
330
+ "learning_rate": 1.1477882037533512e-05,
331
+ "loss": 6.4316,
332
+ "step": 2300
333
+ },
334
+ {
335
+ "epoch": 0.7875335120643432,
336
+ "grad_norm": 11.0,
337
+ "learning_rate": 1.0640080428954424e-05,
338
+ "loss": 6.3405,
339
+ "step": 2350
340
+ },
341
+ {
342
+ "epoch": 0.8042895442359249,
343
+ "grad_norm": 6.65625,
344
+ "learning_rate": 9.802278820375336e-06,
345
+ "loss": 6.4144,
346
+ "step": 2400
347
+ },
348
+ {
349
+ "epoch": 0.8210455764075067,
350
+ "grad_norm": 11.0,
351
+ "learning_rate": 8.964477211796247e-06,
352
+ "loss": 6.3269,
353
+ "step": 2450
354
+ },
355
+ {
356
+ "epoch": 0.8378016085790885,
357
+ "grad_norm": 6.9375,
358
+ "learning_rate": 8.126675603217159e-06,
359
+ "loss": 6.3915,
360
+ "step": 2500
361
+ },
362
+ {
363
+ "epoch": 0.8545576407506702,
364
+ "grad_norm": 11.125,
365
+ "learning_rate": 7.288873994638071e-06,
366
+ "loss": 6.3686,
367
+ "step": 2550
368
+ },
369
+ {
370
+ "epoch": 0.871313672922252,
371
+ "grad_norm": 10.3125,
372
+ "learning_rate": 6.451072386058982e-06,
373
+ "loss": 6.3265,
374
+ "step": 2600
375
+ },
376
+ {
377
+ "epoch": 0.8880697050938338,
378
+ "grad_norm": 9.5,
379
+ "learning_rate": 5.613270777479894e-06,
380
+ "loss": 6.3386,
381
+ "step": 2650
382
+ },
383
+ {
384
+ "epoch": 0.9048257372654156,
385
+ "grad_norm": 7.875,
386
+ "learning_rate": 4.7754691689008045e-06,
387
+ "loss": 6.3062,
388
+ "step": 2700
389
+ },
390
+ {
391
+ "epoch": 0.9215817694369973,
392
+ "grad_norm": 11.3125,
393
+ "learning_rate": 3.9376675603217155e-06,
394
+ "loss": 6.4035,
395
+ "step": 2750
396
+ },
397
+ {
398
+ "epoch": 0.938337801608579,
399
+ "grad_norm": 10.875,
400
+ "learning_rate": 3.0998659517426277e-06,
401
+ "loss": 6.4041,
402
+ "step": 2800
403
+ },
404
+ {
405
+ "epoch": 0.9550938337801609,
406
+ "grad_norm": 8.3125,
407
+ "learning_rate": 2.262064343163539e-06,
408
+ "loss": 6.3914,
409
+ "step": 2850
410
+ },
411
+ {
412
+ "epoch": 0.9718498659517426,
413
+ "grad_norm": 8.3125,
414
+ "learning_rate": 1.4242627345844506e-06,
415
+ "loss": 6.4039,
416
+ "step": 2900
417
+ },
418
+ {
419
+ "epoch": 0.9886058981233244,
420
+ "grad_norm": 21.75,
421
+ "learning_rate": 5.86461126005362e-07,
422
+ "loss": 6.2662,
423
+ "step": 2950
424
+ }
425
+ ],
426
+ "logging_steps": 50,
427
+ "max_steps": 2984,
428
+ "num_input_tokens_seen": 0,
429
+ "num_train_epochs": 1,
430
+ "save_steps": 5000,
431
+ "stateful_callbacks": {
432
+ "TrainerControl": {
433
+ "args": {
434
+ "should_epoch_stop": false,
435
+ "should_evaluate": false,
436
+ "should_log": false,
437
+ "should_save": true,
438
+ "should_training_stop": true
439
+ },
440
+ "attributes": {}
441
+ }
442
+ },
443
+ "total_flos": 2.3213450259634176e+16,
444
+ "train_batch_size": 1,
445
+ "trial_name": null,
446
+ "trial_params": null
447
+ }
LLM-TTS/checkpoints/finetune_llama_xcodec/checkpoint-2984/config.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LlamaForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 128000,
8
+ "eos_token_id": [
9
+ 128001,
10
+ 128008,
11
+ 128009
12
+ ],
13
+ "head_dim": 128,
14
+ "hidden_act": "silu",
15
+ "hidden_size": 3072,
16
+ "initializer_range": 0.02,
17
+ "intermediate_size": 8192,
18
+ "max_position_embeddings": 131072,
19
+ "mlp_bias": false,
20
+ "model_type": "llama",
21
+ "num_attention_heads": 24,
22
+ "num_hidden_layers": 28,
23
+ "num_key_value_heads": 8,
24
+ "pretraining_tp": 1,
25
+ "rms_norm_eps": 1e-05,
26
+ "rope_scaling": {
27
+ "factor": 32.0,
28
+ "high_freq_factor": 4.0,
29
+ "low_freq_factor": 1.0,
30
+ "original_max_position_embeddings": 8192,
31
+ "rope_type": "llama3"
32
+ },
33
+ "rope_theta": 500000.0,
34
+ "tie_word_embeddings": true,
35
+ "torch_dtype": "bfloat16",
36
+ "transformers_version": "4.53.2",
37
+ "use_cache": true,
38
+ "vocab_size": 193803
39
+ }
LLM-TTS/checkpoints/finetune_llama_xcodec/checkpoint-2984/generation_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 128000,
4
+ "eos_token_id": [
5
+ 128001,
6
+ 128008,
7
+ 128009
8
+ ],
9
+ "transformers_version": "4.53.2"
10
+ }
LLM-TTS/checkpoints/finetune_llama_xcodec/checkpoint-2984/model.safetensors.index.json ADDED
@@ -0,0 +1,262 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_parameters": 3414110208,
4
+ "total_size": 6828220416
5
+ },
6
+ "weight_map": {
7
+ "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
13
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
14
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
15
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
16
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
17
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
18
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
19
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
20
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
21
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
22
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
23
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
24
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
25
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
26
+ "model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
27
+ "model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
28
+ "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
29
+ "model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
30
+ "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
31
+ "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
32
+ "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
33
+ "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
34
+ "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
35
+ "model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
36
+ "model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
37
+ "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
38
+ "model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
39
+ "model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
40
+ "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
41
+ "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
42
+ "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
43
+ "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
44
+ "model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
45
+ "model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
46
+ "model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
47
+ "model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
48
+ "model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
49
+ "model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
50
+ "model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
51
+ "model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
52
+ "model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
53
+ "model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
54
+ "model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
55
+ "model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
56
+ "model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
57
+ "model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
58
+ "model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
59
+ "model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
60
+ "model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
61
+ "model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
62
+ "model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
63
+ "model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
64
+ "model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
65
+ "model.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
66
+ "model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
67
+ "model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
68
+ "model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
69
+ "model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
70
+ "model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
71
+ "model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
72
+ "model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
73
+ "model.layers.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
74
+ "model.layers.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
75
+ "model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
76
+ "model.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
77
+ "model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
78
+ "model.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
79
+ "model.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
80
+ "model.layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors",
81
+ "model.layers.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
82
+ "model.layers.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
83
+ "model.layers.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
84
+ "model.layers.16.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
85
+ "model.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
86
+ "model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
87
+ "model.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
88
+ "model.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
89
+ "model.layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors",
90
+ "model.layers.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
91
+ "model.layers.17.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
92
+ "model.layers.17.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
93
+ "model.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
94
+ "model.layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
95
+ "model.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
96
+ "model.layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
97
+ "model.layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
98
+ "model.layers.18.input_layernorm.weight": "model-00002-of-00002.safetensors",
99
+ "model.layers.18.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
100
+ "model.layers.18.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
101
+ "model.layers.18.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
102
+ "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
103
+ "model.layers.18.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
104
+ "model.layers.18.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
105
+ "model.layers.18.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
106
+ "model.layers.18.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
107
+ "model.layers.19.input_layernorm.weight": "model-00002-of-00002.safetensors",
108
+ "model.layers.19.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
109
+ "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
110
+ "model.layers.19.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
111
+ "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
112
+ "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
113
+ "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
114
+ "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
115
+ "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
116
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
117
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
118
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
119
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
120
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
121
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
122
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
123
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
124
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
125
+ "model.layers.20.input_layernorm.weight": "model-00002-of-00002.safetensors",
126
+ "model.layers.20.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
127
+ "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
128
+ "model.layers.20.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
129
+ "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
130
+ "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
131
+ "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
132
+ "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
133
+ "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
134
+ "model.layers.21.input_layernorm.weight": "model-00002-of-00002.safetensors",
135
+ "model.layers.21.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
136
+ "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
137
+ "model.layers.21.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
138
+ "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
139
+ "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
140
+ "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
141
+ "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
142
+ "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
143
+ "model.layers.22.input_layernorm.weight": "model-00002-of-00002.safetensors",
144
+ "model.layers.22.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
145
+ "model.layers.22.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
146
+ "model.layers.22.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
147
+ "model.layers.22.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
148
+ "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
149
+ "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
150
+ "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
151
+ "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
152
+ "model.layers.23.input_layernorm.weight": "model-00002-of-00002.safetensors",
153
+ "model.layers.23.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
154
+ "model.layers.23.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
155
+ "model.layers.23.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
156
+ "model.layers.23.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
157
+ "model.layers.23.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
158
+ "model.layers.23.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
159
+ "model.layers.23.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
160
+ "model.layers.23.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
161
+ "model.layers.24.input_layernorm.weight": "model-00002-of-00002.safetensors",
162
+ "model.layers.24.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
163
+ "model.layers.24.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
164
+ "model.layers.24.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
165
+ "model.layers.24.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
166
+ "model.layers.24.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
167
+ "model.layers.24.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
168
+ "model.layers.24.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
169
+ "model.layers.24.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
170
+ "model.layers.25.input_layernorm.weight": "model-00002-of-00002.safetensors",
171
+ "model.layers.25.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
172
+ "model.layers.25.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
173
+ "model.layers.25.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
174
+ "model.layers.25.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
175
+ "model.layers.25.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
176
+ "model.layers.25.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
177
+ "model.layers.25.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
178
+ "model.layers.25.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
179
+ "model.layers.26.input_layernorm.weight": "model-00002-of-00002.safetensors",
180
+ "model.layers.26.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
181
+ "model.layers.26.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
182
+ "model.layers.26.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
183
+ "model.layers.26.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
184
+ "model.layers.26.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
185
+ "model.layers.26.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
186
+ "model.layers.26.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
187
+ "model.layers.26.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
188
+ "model.layers.27.input_layernorm.weight": "model-00002-of-00002.safetensors",
189
+ "model.layers.27.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
190
+ "model.layers.27.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
191
+ "model.layers.27.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
192
+ "model.layers.27.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
193
+ "model.layers.27.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
194
+ "model.layers.27.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
195
+ "model.layers.27.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
196
+ "model.layers.27.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
197
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
198
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
199
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
200
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
201
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
202
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
203
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
204
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
205
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
206
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
207
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
208
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
209
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
210
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
211
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
212
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
213
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
214
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
215
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
216
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
217
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
218
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
219
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
220
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
221
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
222
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
223
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
224
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
225
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
226
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
227
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
228
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
229
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
230
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
231
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
232
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
233
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
234
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
235
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
236
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
237
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
238
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
239
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
240
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
241
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
242
+ "model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
243
+ "model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
244
+ "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
245
+ "model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
246
+ "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
247
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
248
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
249
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
250
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
251
+ "model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
252
+ "model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
253
+ "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
254
+ "model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
255
+ "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
256
+ "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
257
+ "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
258
+ "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
259
+ "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
260
+ "model.norm.weight": "model-00002-of-00002.safetensors"
261
+ }
262
+ }
LLM-TTS/checkpoints/finetune_llama_xcodec/checkpoint-2984/trainer_state.json ADDED
@@ -0,0 +1,447 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": null,
3
+ "best_metric": null,
4
+ "best_model_checkpoint": null,
5
+ "epoch": 1.0,
6
+ "eval_steps": 500,
7
+ "global_step": 2984,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.01675603217158177,
14
+ "grad_norm": 18.25,
15
+ "learning_rate": 4.9178954423592495e-05,
16
+ "loss": 7.8384,
17
+ "step": 50
18
+ },
19
+ {
20
+ "epoch": 0.03351206434316354,
21
+ "grad_norm": 23.25,
22
+ "learning_rate": 4.834115281501341e-05,
23
+ "loss": 7.5834,
24
+ "step": 100
25
+ },
26
+ {
27
+ "epoch": 0.05026809651474531,
28
+ "grad_norm": 14.25,
29
+ "learning_rate": 4.750335120643432e-05,
30
+ "loss": 7.6394,
31
+ "step": 150
32
+ },
33
+ {
34
+ "epoch": 0.06702412868632708,
35
+ "grad_norm": 10.8125,
36
+ "learning_rate": 4.666554959785523e-05,
37
+ "loss": 7.6381,
38
+ "step": 200
39
+ },
40
+ {
41
+ "epoch": 0.08378016085790885,
42
+ "grad_norm": 14.75,
43
+ "learning_rate": 4.582774798927614e-05,
44
+ "loss": 7.5721,
45
+ "step": 250
46
+ },
47
+ {
48
+ "epoch": 0.10053619302949061,
49
+ "grad_norm": 11.125,
50
+ "learning_rate": 4.4989946380697054e-05,
51
+ "loss": 7.5896,
52
+ "step": 300
53
+ },
54
+ {
55
+ "epoch": 0.11729222520107238,
56
+ "grad_norm": 14.875,
57
+ "learning_rate": 4.4152144772117966e-05,
58
+ "loss": 7.53,
59
+ "step": 350
60
+ },
61
+ {
62
+ "epoch": 0.13404825737265416,
63
+ "grad_norm": 11.1875,
64
+ "learning_rate": 4.331434316353888e-05,
65
+ "loss": 7.5232,
66
+ "step": 400
67
+ },
68
+ {
69
+ "epoch": 0.15080428954423591,
70
+ "grad_norm": 24.625,
71
+ "learning_rate": 4.247654155495979e-05,
72
+ "loss": 7.3671,
73
+ "step": 450
74
+ },
75
+ {
76
+ "epoch": 0.1675603217158177,
77
+ "grad_norm": 17.25,
78
+ "learning_rate": 4.16387399463807e-05,
79
+ "loss": 7.4381,
80
+ "step": 500
81
+ },
82
+ {
83
+ "epoch": 0.18431635388739948,
84
+ "grad_norm": 11.0625,
85
+ "learning_rate": 4.0800938337801606e-05,
86
+ "loss": 7.3239,
87
+ "step": 550
88
+ },
89
+ {
90
+ "epoch": 0.20107238605898123,
91
+ "grad_norm": 12.6875,
92
+ "learning_rate": 3.9963136729222525e-05,
93
+ "loss": 7.3584,
94
+ "step": 600
95
+ },
96
+ {
97
+ "epoch": 0.217828418230563,
98
+ "grad_norm": 22.625,
99
+ "learning_rate": 3.912533512064344e-05,
100
+ "loss": 7.2773,
101
+ "step": 650
102
+ },
103
+ {
104
+ "epoch": 0.23458445040214476,
105
+ "grad_norm": 10.3125,
106
+ "learning_rate": 3.828753351206434e-05,
107
+ "loss": 7.3188,
108
+ "step": 700
109
+ },
110
+ {
111
+ "epoch": 0.25134048257372654,
112
+ "grad_norm": 13.375,
113
+ "learning_rate": 3.744973190348526e-05,
114
+ "loss": 7.2516,
115
+ "step": 750
116
+ },
117
+ {
118
+ "epoch": 0.2680965147453083,
119
+ "grad_norm": 9.8125,
120
+ "learning_rate": 3.6611930294906165e-05,
121
+ "loss": 7.2993,
122
+ "step": 800
123
+ },
124
+ {
125
+ "epoch": 0.2848525469168901,
126
+ "grad_norm": 10.4375,
127
+ "learning_rate": 3.577412868632708e-05,
128
+ "loss": 7.2054,
129
+ "step": 850
130
+ },
131
+ {
132
+ "epoch": 0.30160857908847183,
133
+ "grad_norm": 10.625,
134
+ "learning_rate": 3.4936327077747996e-05,
135
+ "loss": 7.2521,
136
+ "step": 900
137
+ },
138
+ {
139
+ "epoch": 0.3183646112600536,
140
+ "grad_norm": 9.125,
141
+ "learning_rate": 3.40985254691689e-05,
142
+ "loss": 7.2151,
143
+ "step": 950
144
+ },
145
+ {
146
+ "epoch": 0.3351206434316354,
147
+ "grad_norm": 12.9375,
148
+ "learning_rate": 3.326072386058981e-05,
149
+ "loss": 7.1426,
150
+ "step": 1000
151
+ },
152
+ {
153
+ "epoch": 0.35187667560321717,
154
+ "grad_norm": 10.9375,
155
+ "learning_rate": 3.2422922252010724e-05,
156
+ "loss": 7.0921,
157
+ "step": 1050
158
+ },
159
+ {
160
+ "epoch": 0.36863270777479895,
161
+ "grad_norm": 12.0625,
162
+ "learning_rate": 3.1585120643431636e-05,
163
+ "loss": 7.1262,
164
+ "step": 1100
165
+ },
166
+ {
167
+ "epoch": 0.3853887399463807,
168
+ "grad_norm": 13.3125,
169
+ "learning_rate": 3.074731903485255e-05,
170
+ "loss": 7.1519,
171
+ "step": 1150
172
+ },
173
+ {
174
+ "epoch": 0.40214477211796246,
175
+ "grad_norm": 8.8125,
176
+ "learning_rate": 2.990951742627346e-05,
177
+ "loss": 7.1543,
178
+ "step": 1200
179
+ },
180
+ {
181
+ "epoch": 0.41890080428954424,
182
+ "grad_norm": 11.0625,
183
+ "learning_rate": 2.907171581769437e-05,
184
+ "loss": 7.0298,
185
+ "step": 1250
186
+ },
187
+ {
188
+ "epoch": 0.435656836461126,
189
+ "grad_norm": 11.6875,
190
+ "learning_rate": 2.823391420911528e-05,
191
+ "loss": 7.0965,
192
+ "step": 1300
193
+ },
194
+ {
195
+ "epoch": 0.4524128686327078,
196
+ "grad_norm": 8.5,
197
+ "learning_rate": 2.7396112600536195e-05,
198
+ "loss": 7.044,
199
+ "step": 1350
200
+ },
201
+ {
202
+ "epoch": 0.4691689008042895,
203
+ "grad_norm": 12.0625,
204
+ "learning_rate": 2.6558310991957107e-05,
205
+ "loss": 6.9941,
206
+ "step": 1400
207
+ },
208
+ {
209
+ "epoch": 0.4859249329758713,
210
+ "grad_norm": 12.8125,
211
+ "learning_rate": 2.5720509383378015e-05,
212
+ "loss": 6.9948,
213
+ "step": 1450
214
+ },
215
+ {
216
+ "epoch": 0.5026809651474531,
217
+ "grad_norm": 11.875,
218
+ "learning_rate": 2.488270777479893e-05,
219
+ "loss": 6.9597,
220
+ "step": 1500
221
+ },
222
+ {
223
+ "epoch": 0.5194369973190348,
224
+ "grad_norm": 23.625,
225
+ "learning_rate": 2.4044906166219842e-05,
226
+ "loss": 7.0647,
227
+ "step": 1550
228
+ },
229
+ {
230
+ "epoch": 0.5361930294906166,
231
+ "grad_norm": 7.28125,
232
+ "learning_rate": 2.320710455764075e-05,
233
+ "loss": 6.921,
234
+ "step": 1600
235
+ },
236
+ {
237
+ "epoch": 0.5529490616621984,
238
+ "grad_norm": 8.5,
239
+ "learning_rate": 2.2369302949061662e-05,
240
+ "loss": 6.9261,
241
+ "step": 1650
242
+ },
243
+ {
244
+ "epoch": 0.5697050938337802,
245
+ "grad_norm": 9.25,
246
+ "learning_rate": 2.1531501340482574e-05,
247
+ "loss": 6.9047,
248
+ "step": 1700
249
+ },
250
+ {
251
+ "epoch": 0.5864611260053619,
252
+ "grad_norm": 13.9375,
253
+ "learning_rate": 2.069369973190349e-05,
254
+ "loss": 6.9577,
255
+ "step": 1750
256
+ },
257
+ {
258
+ "epoch": 0.6032171581769437,
259
+ "grad_norm": 9.1875,
260
+ "learning_rate": 1.9855898123324398e-05,
261
+ "loss": 6.9322,
262
+ "step": 1800
263
+ },
264
+ {
265
+ "epoch": 0.6199731903485255,
266
+ "grad_norm": 11.6875,
267
+ "learning_rate": 1.901809651474531e-05,
268
+ "loss": 6.9707,
269
+ "step": 1850
270
+ },
271
+ {
272
+ "epoch": 0.6367292225201072,
273
+ "grad_norm": 12.3125,
274
+ "learning_rate": 1.818029490616622e-05,
275
+ "loss": 6.8986,
276
+ "step": 1900
277
+ },
278
+ {
279
+ "epoch": 0.653485254691689,
280
+ "grad_norm": 10.375,
281
+ "learning_rate": 1.7342493297587133e-05,
282
+ "loss": 6.9319,
283
+ "step": 1950
284
+ },
285
+ {
286
+ "epoch": 0.6702412868632708,
287
+ "grad_norm": 14.8125,
288
+ "learning_rate": 1.6504691689008045e-05,
289
+ "loss": 6.9134,
290
+ "step": 2000
291
+ },
292
+ {
293
+ "epoch": 0.6869973190348525,
294
+ "grad_norm": 11.0,
295
+ "learning_rate": 1.5666890080428956e-05,
296
+ "loss": 6.9034,
297
+ "step": 2050
298
+ },
299
+ {
300
+ "epoch": 0.7037533512064343,
301
+ "grad_norm": 9.8125,
302
+ "learning_rate": 1.4829088471849867e-05,
303
+ "loss": 6.8781,
304
+ "step": 2100
305
+ },
306
+ {
307
+ "epoch": 0.7205093833780161,
308
+ "grad_norm": 16.875,
309
+ "learning_rate": 1.3991286863270778e-05,
310
+ "loss": 6.8682,
311
+ "step": 2150
312
+ },
313
+ {
314
+ "epoch": 0.7372654155495979,
315
+ "grad_norm": 14.9375,
316
+ "learning_rate": 1.3153485254691688e-05,
317
+ "loss": 6.9692,
318
+ "step": 2200
319
+ },
320
+ {
321
+ "epoch": 0.7540214477211796,
322
+ "grad_norm": 15.9375,
323
+ "learning_rate": 1.23156836461126e-05,
324
+ "loss": 6.9615,
325
+ "step": 2250
326
+ },
327
+ {
328
+ "epoch": 0.7707774798927614,
329
+ "grad_norm": 9.5,
330
+ "learning_rate": 1.1477882037533512e-05,
331
+ "loss": 6.937,
332
+ "step": 2300
333
+ },
334
+ {
335
+ "epoch": 0.7875335120643432,
336
+ "grad_norm": 13.25,
337
+ "learning_rate": 1.0640080428954424e-05,
338
+ "loss": 6.8293,
339
+ "step": 2350
340
+ },
341
+ {
342
+ "epoch": 0.8042895442359249,
343
+ "grad_norm": 7.15625,
344
+ "learning_rate": 9.802278820375336e-06,
345
+ "loss": 6.9232,
346
+ "step": 2400
347
+ },
348
+ {
349
+ "epoch": 0.8210455764075067,
350
+ "grad_norm": 12.0625,
351
+ "learning_rate": 8.964477211796247e-06,
352
+ "loss": 6.8923,
353
+ "step": 2450
354
+ },
355
+ {
356
+ "epoch": 0.8378016085790885,
357
+ "grad_norm": 8.25,
358
+ "learning_rate": 8.126675603217159e-06,
359
+ "loss": 6.8789,
360
+ "step": 2500
361
+ },
362
+ {
363
+ "epoch": 0.8545576407506702,
364
+ "grad_norm": 12.4375,
365
+ "learning_rate": 7.288873994638071e-06,
366
+ "loss": 6.9085,
367
+ "step": 2550
368
+ },
369
+ {
370
+ "epoch": 0.871313672922252,
371
+ "grad_norm": 11.9375,
372
+ "learning_rate": 6.451072386058982e-06,
373
+ "loss": 6.8567,
374
+ "step": 2600
375
+ },
376
+ {
377
+ "epoch": 0.8880697050938338,
378
+ "grad_norm": 10.375,
379
+ "learning_rate": 5.613270777479894e-06,
380
+ "loss": 6.9094,
381
+ "step": 2650
382
+ },
383
+ {
384
+ "epoch": 0.9048257372654156,
385
+ "grad_norm": 9.5,
386
+ "learning_rate": 4.7754691689008045e-06,
387
+ "loss": 6.8366,
388
+ "step": 2700
389
+ },
390
+ {
391
+ "epoch": 0.9215817694369973,
392
+ "grad_norm": 14.375,
393
+ "learning_rate": 3.9376675603217155e-06,
394
+ "loss": 6.9258,
395
+ "step": 2750
396
+ },
397
+ {
398
+ "epoch": 0.938337801608579,
399
+ "grad_norm": 12.1875,
400
+ "learning_rate": 3.0998659517426277e-06,
401
+ "loss": 6.8706,
402
+ "step": 2800
403
+ },
404
+ {
405
+ "epoch": 0.9550938337801609,
406
+ "grad_norm": 9.75,
407
+ "learning_rate": 2.262064343163539e-06,
408
+ "loss": 6.9028,
409
+ "step": 2850
410
+ },
411
+ {
412
+ "epoch": 0.9718498659517426,
413
+ "grad_norm": 9.8125,
414
+ "learning_rate": 1.4242627345844506e-06,
415
+ "loss": 6.9018,
416
+ "step": 2900
417
+ },
418
+ {
419
+ "epoch": 0.9886058981233244,
420
+ "grad_norm": 28.0,
421
+ "learning_rate": 5.86461126005362e-07,
422
+ "loss": 6.8597,
423
+ "step": 2950
424
+ }
425
+ ],
426
+ "logging_steps": 50,
427
+ "max_steps": 2984,
428
+ "num_input_tokens_seen": 0,
429
+ "num_train_epochs": 1,
430
+ "save_steps": 5000,
431
+ "stateful_callbacks": {
432
+ "TrainerControl": {
433
+ "args": {
434
+ "should_epoch_stop": false,
435
+ "should_evaluate": false,
436
+ "should_log": false,
437
+ "should_save": true,
438
+ "should_training_stop": true
439
+ },
440
+ "attributes": {}
441
+ }
442
+ },
443
+ "total_flos": 2.32276060090368e+16,
444
+ "train_batch_size": 1,
445
+ "trial_name": null,
446
+ "trial_params": null
447
+ }
LLM-TTS/checkpoints/finetune_llama_xcodec_jonathan/checkpoint-2984/config.json ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "torilab/llama-xcodec-en",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 128000,
9
+ "eos_token_id": [
10
+ 128001,
11
+ 128008,
12
+ 128009
13
+ ],
14
+ "head_dim": 128,
15
+ "hidden_act": "silu",
16
+ "hidden_size": 3072,
17
+ "initializer_range": 0.02,
18
+ "intermediate_size": 8192,
19
+ "max_position_embeddings": 131072,
20
+ "mlp_bias": false,
21
+ "model_type": "llama",
22
+ "num_attention_heads": 24,
23
+ "num_hidden_layers": 28,
24
+ "num_key_value_heads": 8,
25
+ "pretraining_tp": 1,
26
+ "rms_norm_eps": 1e-05,
27
+ "rope_scaling": {
28
+ "factor": 32.0,
29
+ "high_freq_factor": 4.0,
30
+ "low_freq_factor": 1.0,
31
+ "original_max_position_embeddings": 8192,
32
+ "rope_type": "llama3"
33
+ },
34
+ "rope_theta": 500000.0,
35
+ "tie_word_embeddings": true,
36
+ "torch_dtype": "bfloat16",
37
+ "transformers_version": "4.45.2",
38
+ "use_cache": true,
39
+ "vocab_size": 193803
40
+ }
LLM-TTS/checkpoints/finetune_llama_xcodec_jonathan/checkpoint-2984/generation_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 128000,
4
+ "eos_token_id": [
5
+ 128001,
6
+ 128008,
7
+ 128009
8
+ ],
9
+ "transformers_version": "4.45.2"
10
+ }
LLM-TTS/checkpoints/finetune_llama_xcodec_jonathan/checkpoint-2984/model.safetensors.index.json ADDED
@@ -0,0 +1,261 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 6828220416
4
+ },
5
+ "weight_map": {
6
+ "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
7
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
8
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
9
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
10
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
11
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
12
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
13
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
14
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
15
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
16
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
17
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
18
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
19
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
20
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
21
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
22
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
23
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
24
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
25
+ "model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
26
+ "model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
27
+ "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
28
+ "model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
29
+ "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
30
+ "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
31
+ "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
32
+ "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
33
+ "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
34
+ "model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
35
+ "model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
36
+ "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
37
+ "model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
38
+ "model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
39
+ "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
40
+ "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
41
+ "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
42
+ "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
43
+ "model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
44
+ "model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
45
+ "model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
46
+ "model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
47
+ "model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
48
+ "model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
49
+ "model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
50
+ "model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
51
+ "model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
52
+ "model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
53
+ "model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
54
+ "model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
55
+ "model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
56
+ "model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
57
+ "model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
58
+ "model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
59
+ "model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
60
+ "model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
61
+ "model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
62
+ "model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
63
+ "model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
64
+ "model.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
65
+ "model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
66
+ "model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
67
+ "model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
68
+ "model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
69
+ "model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
70
+ "model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
71
+ "model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
72
+ "model.layers.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
73
+ "model.layers.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
74
+ "model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
75
+ "model.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
76
+ "model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
77
+ "model.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
78
+ "model.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
79
+ "model.layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors",
80
+ "model.layers.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
81
+ "model.layers.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
82
+ "model.layers.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
83
+ "model.layers.16.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
84
+ "model.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
85
+ "model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
86
+ "model.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
87
+ "model.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
88
+ "model.layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors",
89
+ "model.layers.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
90
+ "model.layers.17.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
91
+ "model.layers.17.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
92
+ "model.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
93
+ "model.layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
94
+ "model.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
95
+ "model.layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
96
+ "model.layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
97
+ "model.layers.18.input_layernorm.weight": "model-00002-of-00002.safetensors",
98
+ "model.layers.18.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
99
+ "model.layers.18.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
100
+ "model.layers.18.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
101
+ "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
102
+ "model.layers.18.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
103
+ "model.layers.18.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
104
+ "model.layers.18.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
105
+ "model.layers.18.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
106
+ "model.layers.19.input_layernorm.weight": "model-00002-of-00002.safetensors",
107
+ "model.layers.19.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
108
+ "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
109
+ "model.layers.19.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
110
+ "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
111
+ "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
112
+ "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
113
+ "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
114
+ "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
115
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
116
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
117
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
118
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
119
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
120
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
121
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
122
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
123
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
124
+ "model.layers.20.input_layernorm.weight": "model-00002-of-00002.safetensors",
125
+ "model.layers.20.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
126
+ "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
127
+ "model.layers.20.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
128
+ "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
129
+ "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
130
+ "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
131
+ "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
132
+ "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
133
+ "model.layers.21.input_layernorm.weight": "model-00002-of-00002.safetensors",
134
+ "model.layers.21.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
135
+ "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
136
+ "model.layers.21.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
137
+ "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
138
+ "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
139
+ "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
140
+ "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
141
+ "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
142
+ "model.layers.22.input_layernorm.weight": "model-00002-of-00002.safetensors",
143
+ "model.layers.22.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
144
+ "model.layers.22.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
145
+ "model.layers.22.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
146
+ "model.layers.22.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
147
+ "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
148
+ "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
149
+ "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
150
+ "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
151
+ "model.layers.23.input_layernorm.weight": "model-00002-of-00002.safetensors",
152
+ "model.layers.23.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
153
+ "model.layers.23.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
154
+ "model.layers.23.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
155
+ "model.layers.23.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
156
+ "model.layers.23.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
157
+ "model.layers.23.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
158
+ "model.layers.23.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
159
+ "model.layers.23.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
160
+ "model.layers.24.input_layernorm.weight": "model-00002-of-00002.safetensors",
161
+ "model.layers.24.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
162
+ "model.layers.24.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
163
+ "model.layers.24.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
164
+ "model.layers.24.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
165
+ "model.layers.24.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
166
+ "model.layers.24.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
167
+ "model.layers.24.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
168
+ "model.layers.24.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
169
+ "model.layers.25.input_layernorm.weight": "model-00002-of-00002.safetensors",
170
+ "model.layers.25.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
171
+ "model.layers.25.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
172
+ "model.layers.25.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
173
+ "model.layers.25.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
174
+ "model.layers.25.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
175
+ "model.layers.25.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
176
+ "model.layers.25.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
177
+ "model.layers.25.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
178
+ "model.layers.26.input_layernorm.weight": "model-00002-of-00002.safetensors",
179
+ "model.layers.26.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
180
+ "model.layers.26.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
181
+ "model.layers.26.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
182
+ "model.layers.26.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
183
+ "model.layers.26.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
184
+ "model.layers.26.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
185
+ "model.layers.26.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
186
+ "model.layers.26.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
187
+ "model.layers.27.input_layernorm.weight": "model-00002-of-00002.safetensors",
188
+ "model.layers.27.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
189
+ "model.layers.27.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
190
+ "model.layers.27.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
191
+ "model.layers.27.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
192
+ "model.layers.27.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
193
+ "model.layers.27.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
194
+ "model.layers.27.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
195
+ "model.layers.27.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
196
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
197
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
198
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
199
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
200
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
201
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
202
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
203
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
204
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
205
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
206
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
207
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
208
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
209
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
210
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
211
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
212
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
213
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
214
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
215
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
216
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
217
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
218
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
219
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
220
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
221
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
222
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
223
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
224
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
225
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
226
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
227
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
228
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
229
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
230
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
231
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
232
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
233
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
234
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
235
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
236
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
237
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
238
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
239
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
240
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
241
+ "model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
242
+ "model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
243
+ "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
244
+ "model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
245
+ "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
246
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
247
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
248
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
249
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
250
+ "model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
251
+ "model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
252
+ "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
253
+ "model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
254
+ "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
255
+ "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
256
+ "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
257
+ "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
258
+ "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
259
+ "model.norm.weight": "model-00002-of-00002.safetensors"
260
+ }
261
+ }
LLM-TTS/jonathan_ft_data/2288.wav ADDED
Binary file (48.7 kB). View file
 
LLM-TTS/jonathan_ft_data/2314.wav ADDED
Binary file (29.8 kB). View file
 
LLM-TTS/jonathan_ft_data/2316.wav ADDED
Binary file (48.7 kB). View file
 
LLM-TTS/jonathan_ft_data/2379.wav ADDED
Binary file (71.2 kB). View file
 
LLM-TTS/jonathan_ft_data/24.wav ADDED
Binary file (45.1 kB). View file
 
LLM-TTS/jonathan_ft_data/2410.wav ADDED
Binary file (67.7 kB). View file
 
LLM-TTS/jonathan_ft_data/2445.wav ADDED
Binary file (93.8 kB). View file
 
LLM-TTS/jonathan_ft_data/2452.wav ADDED
Binary file (86.6 kB). View file
 
LLM-TTS/jonathan_ft_data/2464.wav ADDED
Binary file (45.1 kB). View file
 
LLM-TTS/jonathan_ft_data/2497.wav ADDED
Binary file (56.4 kB). View file
 
LLM-TTS/jonathan_ft_data/2537.wav ADDED
Binary file (37.5 kB). View file
 
LLM-TTS/jonathan_ft_data/254.wav ADDED
Binary file (78.9 kB). View file
 
LLM-TTS/jonathan_ft_data/2569.wav ADDED
Binary file (52.3 kB). View file
 
LLM-TTS/jonathan_ft_data/258.wav ADDED
Binary file (29.8 kB). View file
 
LLM-TTS/jonathan_ft_data/2602.wav ADDED
Binary file (52.3 kB). View file
 
LLM-TTS/jonathan_ft_data/2624.wav ADDED
Binary file (41 kB). View file
 
LLM-TTS/jonathan_ft_data/264.wav ADDED
Binary file (82.5 kB). View file
 
LLM-TTS/jonathan_ft_data/2648.wav ADDED
Binary file (48.7 kB). View file
 
LLM-TTS/jonathan_ft_data/2659.wav ADDED
Binary file (93.8 kB). View file
 
LLM-TTS/jonathan_ft_data/2682.wav ADDED
Binary file (45.1 kB). View file
 
LLM-TTS/jonathan_ft_data/2704.wav ADDED
Binary file (93.8 kB). View file
 
LLM-TTS/jonathan_ft_data/2706.wav ADDED
Binary file (52.3 kB). View file
 
LLM-TTS/jonathan_ft_data/2715.wav ADDED
Binary file (71.2 kB). View file
 
LLM-TTS/jonathan_ft_data/2720.wav ADDED
Binary file (93.8 kB). View file
 
LLM-TTS/jonathan_ft_data/2725.wav ADDED
Binary file (93.8 kB). View file
 
LLM-TTS/jonathan_ft_data/2772.wav ADDED
Binary file (29.8 kB). View file
 
LLM-TTS/jonathan_ft_data/2787.wav ADDED
Binary file (48.7 kB). View file
 
LLM-TTS/jonathan_ft_data/2793.wav ADDED
Binary file (56.4 kB). View file
 
LLM-TTS/jonathan_ft_data/2827.wav ADDED
Binary file (48.7 kB). View file
 
LLM-TTS/jonathan_ft_data/2870.wav ADDED
Binary file (97.9 kB). View file
 
LLM-TTS/jonathan_ft_data/2889.wav ADDED
Binary file (37.5 kB). View file
 
LLM-TTS/jonathan_ft_data/2909.wav ADDED
Binary file (48.7 kB). View file
 
LLM-TTS/jonathan_ft_data/2911.wav ADDED
Binary file (63.6 kB). View file
 
LLM-TTS/jonathan_ft_data/2939.wav ADDED
Binary file (60 kB). View file
 
LLM-TTS/jonathan_ft_data/317.wav ADDED
Binary file (63.6 kB). View file
 
LLM-TTS/jonathan_ft_data/323.wav ADDED
Binary file (93.8 kB). View file
 
LLM-TTS/jonathan_ft_data/336.wav ADDED
Binary file (33.9 kB). View file
 
LLM-TTS/jonathan_ft_data/342.wav ADDED
Binary file (78.9 kB). View file