Update modeling_lora.py
Browse files- modeling_lora.py +7 -0
modeling_lora.py
CHANGED
|
@@ -12,6 +12,13 @@ from torch.nn import functional as F
|
|
| 12 |
from transformers import PretrainedConfig
|
| 13 |
|
| 14 |
from .rotary import RotaryEmbedding
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 15 |
from .modeling_xlm_roberta import (XLMRobertaFlashConfig, XLMRobertaModel,
|
| 16 |
XLMRobertaPreTrainedModel)
|
| 17 |
|
|
|
|
| 12 |
from transformers import PretrainedConfig
|
| 13 |
|
| 14 |
from .rotary import RotaryEmbedding
|
| 15 |
+
from .mlp import FusedMLP, Mlp
|
| 16 |
+
from .xlm_padding import index_first_axis_residual, pad_input, unpad_input
|
| 17 |
+
from .stochastic_depth import stochastic_depth
|
| 18 |
+
from .mha import MHA
|
| 19 |
+
from .block import Block
|
| 20 |
+
from .configuration_xlm_roberta import XLMRobertaFlashConfig
|
| 21 |
+
from .embedding import XLMRobertaEmbeddings
|
| 22 |
from .modeling_xlm_roberta import (XLMRobertaFlashConfig, XLMRobertaModel,
|
| 23 |
XLMRobertaPreTrainedModel)
|
| 24 |
|