diff --git a/infer-web.py b/infer-web.py index b3eac89..faa15ed 100644 --- a/infer-web.py +++ b/infer-web.py @@ -943,21 +943,24 @@ def change_f0(if_f0_3, sr2, version19): # f0method8,pretrained_G14,pretrained_D else "", ) + global log_interval + def set_log_interval(exp_dir, batch_size12): log_interval = 1 - + folder_path = os.path.join(exp_dir, "1_16k_wavs") - + if os.path.exists(folder_path) and os.path.isdir(folder_path): - wav_files = [f for f in os.listdir(folder_path) if f.endswith('.wav')] + wav_files = [f for f in os.listdir(folder_path) if f.endswith(".wav")] if wav_files: sample_size = len(wav_files) log_interval = math.ceil(sample_size / batch_size12) - + return log_interval + # but3.click(click_train,[exp_dir1,sr2,if_f0_3,save_epoch10,total_epoch11,batch_size12,if_save_latest13,pretrained_G14,pretrained_D15,gpus16]) def click_train( exp_dir1, diff --git a/train/utils.py b/train/utils.py index 3fcf392..783f251 100644 --- a/train/utils.py +++ b/train/utils.py @@ -353,11 +353,7 @@ def get_hparams(init=True): help="if caching the dataset in GPU memory, 1 or 0", ) parser.add_argument( - "-li", - "--log_interval", - type=int, - required=True, - help="log interval" + "-li", "--log_interval", type=int, required=True, help="log interval" ) args = parser.parse_args() @@ -402,10 +398,10 @@ def get_hparams(init=True): hparams.train.log_interval = args.log_interval # Update log_interval in the 'train' section of the config dictionary - config['train']['log_interval'] = args.log_interval + config["train"]["log_interval"] = args.log_interval # Save the updated config back to the config_save_path - with open(config_save_path, 'w') as f: + with open(config_save_path, "w") as f: json.dump(config, f, indent=4) return hparams