diff --git a/modelscope/trainers/hooks/logger/text_logger_hook.py b/modelscope/trainers/hooks/logger/text_logger_hook.py index 8552ab4e..95644783 100644 --- a/modelscope/trainers/hooks/logger/text_logger_hook.py +++ b/modelscope/trainers/hooks/logger/text_logger_hook.py @@ -61,7 +61,7 @@ class TextLoggerHook(LoggerHook): self.json_log_path = osp.join(self.out_dir, '{}.log.json'.format(trainer.timestamp)) if hasattr(trainer, 'meta') and trainer.meta is not None: - self._dump_log(trainer.meta, trainer) + self._dump_log(trainer.meta) def _get_max_memory(self, trainer): device = getattr(trainer.model, 'output_device', None) diff --git a/modelscope/trainers/trainer.py b/modelscope/trainers/trainer.py index e1fd7522..aaf24cfa 100644 --- a/modelscope/trainers/trainer.py +++ b/modelscope/trainers/trainer.py @@ -183,7 +183,7 @@ class EpochBasedTrainer(BaseTrainer): preprocessor=self.eval_preprocessor, **kwargs) - self.train_data_collator, self.eval_default_collate = None, None + self.train_data_collator, self.eval_data_collator = None, None if isinstance(data_collator, Mapping): if not (ConfigKeys.train in data_collator or ConfigKeys.val in data_collator):