2023-04-28 10:18:42 +08:00
|
|
|
PYTHONPATH=. torchrun examples/pytorch/stable_diffusion/finetune_stable_diffusion.py \
|
|
|
|
|
--model 'damo/multi-modal_efficient-diffusion-tuning-lora' \
|
|
|
|
|
--work_dir './tmp/stable_diffusion_tuning' \
|
2023-05-16 15:27:17 +08:00
|
|
|
--train_dataset_namespace 'damo' \
|
2023-05-25 20:28:36 +08:00
|
|
|
--train_dataset_name 'buptwq/lora-stable-diffusion-finetune-dog' \
|
|
|
|
|
--max_epochs 150 \
|
2023-04-28 10:18:42 +08:00
|
|
|
--save_ckpt_strategy 'by_epoch' \
|
|
|
|
|
--logging_interval 100 \
|
|
|
|
|
--train.dataloader.workers_per_gpu 0 \
|
|
|
|
|
--evaluation.dataloader.workers_per_gpu 0 \
|
2023-05-25 20:28:36 +08:00
|
|
|
--train.optimizer.lr 1e-4 \
|
2023-05-16 15:27:17 +08:00
|
|
|
--use_model_config true
|