Add `add_pooling_layer` arg to XLMRobertaLora
#50
by
gmastrapas
- opened
- modeling_lora.py +5 -2
modeling_lora.py
CHANGED
|
@@ -250,11 +250,14 @@ class XLMRobertaLoRA(XLMRobertaPreTrainedModel):
|
|
| 250 |
"""
|
| 251 |
|
| 252 |
def __init__(
|
| 253 |
-
self,
|
|
|
|
|
|
|
|
|
|
| 254 |
):
|
| 255 |
super().__init__(config)
|
| 256 |
if roberta is None:
|
| 257 |
-
self.roberta = XLMRobertaModel(config)
|
| 258 |
else:
|
| 259 |
self.roberta = roberta
|
| 260 |
|
|
|
|
| 250 |
"""
|
| 251 |
|
| 252 |
def __init__(
|
| 253 |
+
self,
|
| 254 |
+
config: XLMRobertaFlashConfig,
|
| 255 |
+
roberta: Optional[XLMRobertaModel] = None,
|
| 256 |
+
add_pooling_layer: bool = True,
|
| 257 |
):
|
| 258 |
super().__init__(config)
|
| 259 |
if roberta is None:
|
| 260 |
+
self.roberta = XLMRobertaModel(config, add_pooling_layer=add_pooling_layer)
|
| 261 |
else:
|
| 262 |
self.roberta = roberta
|
| 263 |
|