Fixed bug regarding merging of checkpoints

Fixed bug regarding merging of checkpoints
This commit is contained in:
kalomaze
2023-07-28 18:38:52 -05:00
committed by GitHub

View File

@@ -250,7 +250,7 @@ def merge(path1, path2, alpha1, sr, f0, info, name, version):
elif(sr=="32k"):opt["config"] = [513, 32, 192, 192, 768, 2, 6, 3, 0, "1", [3, 7, 11], [[1, 3, 5], [1, 3, 5], [1, 3, 5]], [10, 4, 2, 2, 2], 512, [16, 16, 4, 4,4], 109, 256, 32000]
"""
opt["sr"] = sr
opt["f0"] = 1 if f0 == i18n("") else 0
opt["f0"] = 1 if f0 else 0
opt["version"] = version
opt["info"] = info
torch.save(opt, "weights/%s.pth" % name)