Browse Source
lazy import Trainer for avoiding potential error.
main
ChengZi
2 years ago
2 changed files with
2 additions and
2 deletions
-
train_clm_with_hf_trainer.py
-
train_mlm_with_hf_trainer.py
|
@ -16,7 +16,6 @@ import evaluate |
|
|
import transformers |
|
|
import transformers |
|
|
from transformers import ( |
|
|
from transformers import ( |
|
|
MODEL_FOR_CAUSAL_LM_MAPPING, |
|
|
MODEL_FOR_CAUSAL_LM_MAPPING, |
|
|
Trainer, |
|
|
|
|
|
TrainingArguments, |
|
|
TrainingArguments, |
|
|
default_data_collator, |
|
|
default_data_collator, |
|
|
is_torch_tpu_available, |
|
|
is_torch_tpu_available, |
|
@ -123,6 +122,7 @@ def train_clm_with_hf_trainer(model, |
|
|
data_args, |
|
|
data_args, |
|
|
training_args, |
|
|
training_args, |
|
|
**kwargs): |
|
|
**kwargs): |
|
|
|
|
|
from transformers import Trainer |
|
|
print('train clm with hugging face transformers trainer') |
|
|
print('train clm with hugging face transformers trainer') |
|
|
|
|
|
|
|
|
data_args = dataclass_from_dict(DataTrainingArguments, data_args) |
|
|
data_args = dataclass_from_dict(DataTrainingArguments, data_args) |
|
|
|
@ -17,7 +17,6 @@ import transformers |
|
|
from transformers import ( |
|
|
from transformers import ( |
|
|
MODEL_FOR_MASKED_LM_MAPPING, |
|
|
MODEL_FOR_MASKED_LM_MAPPING, |
|
|
DataCollatorForLanguageModeling, |
|
|
DataCollatorForLanguageModeling, |
|
|
Trainer, |
|
|
|
|
|
TrainingArguments, |
|
|
TrainingArguments, |
|
|
is_torch_tpu_available, |
|
|
is_torch_tpu_available, |
|
|
set_seed, |
|
|
set_seed, |
|
@ -131,6 +130,7 @@ def train_mlm_with_hf_trainer(model, |
|
|
data_args, |
|
|
data_args, |
|
|
training_args, |
|
|
training_args, |
|
|
**kwargs): |
|
|
**kwargs): |
|
|
|
|
|
from transformers import Trainer |
|
|
print('train mlm with hugging face transformers trainer') |
|
|
print('train mlm with hugging face transformers trainer') |
|
|
|
|
|
|
|
|
data_args = dataclass_from_dict(DataTrainingArguments, data_args) |
|
|
data_args = dataclass_from_dict(DataTrainingArguments, data_args) |
|
|