mirror of
https://github.com/modelscope/modelscope.git
synced 2025-12-21 10:39:24 +01:00
Based on feat/0131/nlp_args branch, the original code review: https://code.alibaba-inc.com/Ali-MaaS/MaaS-lib/codereview/11408570 Support for running finetuning from the command line with training args, Compatible with the configuration optimization.
13 lines
512 B
Bash
13 lines
512 B
Bash
PYTHONPATH=. python examples/pytorch/text_classification/finetune_text_classification.py \
|
|
--model 'damo/nlp_structbert_backbone_base_std' \
|
|
--dataset_name 'clue' \
|
|
--subset_name 'tnews' \
|
|
--first_sequence 'sentence' \
|
|
--preprocessor.label label \
|
|
--model.num_labels 15 \
|
|
--labels '0,1,2,3,4,5,6,7,8,9,10,11,12,13,14' \
|
|
--preprocessor 'sen-cls-tokenizer' \
|
|
--train.dataloader.workers_per_gpu 0 \
|
|
--evaluation.dataloader.workers_per_gpu 0 \
|
|
--train.optimizer.lr 1e-5 \
|