sqldatastorage to csv

This commit is contained in:
alexlnkp
2023-07-29 00:23:52 +07:00
parent ddd80826f1
commit 8b1ff46921
6 changed files with 231 additions and 142 deletions

View File

View File

@@ -38,7 +38,7 @@ from lib.infer_pack.models import (
from lib.infer_pack.models_onnx import SynthesizerTrnMsNSFsidM
from infer_uvr5 import _audio_pre_, _audio_pre_new
from MDXNet import MDXNetDereverb
from my_utils import load_audio
from my_utils import load_audio, CSVutil
from train.process_ckpt import change_info, extract_small_model, merge, show_info
from vc_infer_pipeline import VC
from sklearn.cluster import MiniBatchKMeans
@@ -56,63 +56,26 @@ os.environ["TEMP"] = tmp
warnings.filterwarnings("ignore")
torch.manual_seed(114514)
import sqlite3
def clear_sql(signal, frame):
cursor.execute("DELETE FROM formant_data")
cursor.execute("DELETE FROM stop_train")
conn.commit()
conn.close()
print("Clearing SQL database...")
sys.exit(0)
if sys.platform == 'win32':
signal.signal(signal.SIGBREAK, clear_sql)
signal.signal(signal.SIGINT, clear_sql)
signal.signal(signal.SIGTERM, clear_sql)
logging.getLogger("numba").setLevel(logging.WARNING)
conn = sqlite3.connect('TEMP/db:cachedb?mode=memory&cache=shared', check_same_thread=False)
cursor = conn.cursor()
import csv
cursor.execute("""
CREATE TABLE IF NOT EXISTS formant_data (
Quefrency FLOAT,
Timbre FLOAT,
DoFormant INTEGER
)
""")
cursor.execute("""
CREATE TABLE IF NOT EXISTS stop_train (
stop BOOL
)
""")
if not os.path.isdir('csvdb/'):
os.makedirs('csvdb')
frmnt, stp = open("csvdb/formanting.csv", 'w'), open("csvdb/stop.csv", 'w')
frmnt.close()
stp.close()
global DoFormant, Quefrency, Timbre
try:
cursor.execute("SELECT Quefrency, Timbre, DoFormant FROM formant_data")
row = cursor.fetchone()
if row is not None:
Quefrency, Timbre, DoFormant = row
else:
raise ValueError("No data")
except (ValueError, TypeError):
Quefrency = 8.0
Timbre = 1.2
DoFormant = False
cursor.execute("DELETE FROM formant_data")
cursor.execute("DELETE FROM stop_train")
cursor.execute("INSERT INTO formant_data (Quefrency, Timbre, DoFormant) VALUES (?, ?, ?)", (Quefrency, Timbre, 0))
conn.commit()
DoFormant, Quefrency, Timbre = CSVutil('csvdb/formanting.csv', 'r', 'formanting')
DoFormant = (
lambda DoFormant: True if DoFormant.lower() == 'true' else (False if DoFormant.lower() == 'false' else DoFormant)
)(DoFormant)
except (ValueError, TypeError, IndexError):
DoFormant, Quefrency, Timbre = False, 1.0, 1.0
CSVutil('csvdb/formanting.csv', 'w+', 'formanting', DoFormant, Quefrency, Timbre)
config = Config()
i18n = I18nAuto()
@@ -653,9 +616,7 @@ def formant_enabled(cbox, qfrency, tmbre, frmntapply, formantpreset, formant_ref
if (cbox):
DoFormant = True
cursor.execute("DELETE FROM formant_data")
cursor.execute("INSERT INTO formant_data (Quefrency, Timbre, DoFormant) VALUES (?, ?, ?)", (qfrency, tmbre, 1))
conn.commit()
CSVutil('csvdb/formanting.csv', 'w+', 'formanting', DoFormant, qfrency, tmbre)
#print(f"is checked? - {cbox}\ngot {DoFormant}")
@@ -672,9 +633,7 @@ def formant_enabled(cbox, qfrency, tmbre, frmntapply, formantpreset, formant_ref
else:
DoFormant = False
cursor.execute("DELETE FROM formant_data")
cursor.execute("INSERT INTO formant_data (Quefrency, Timbre, DoFormant) VALUES (?, ?, ?)", (qfrency, tmbre, int(DoFormant)))
conn.commit()
CSVutil('csvdb/formanting.csv', 'w+', 'formanting', DoFormant, qfrency, tmbre)
#print(f"is checked? - {cbox}\ngot {DoFormant}")
return (
@@ -692,9 +651,7 @@ def formant_apply(qfrency, tmbre):
Quefrency = qfrency
Timbre = tmbre
DoFormant = True
cursor.execute("DELETE FROM formant_data")
cursor.execute("INSERT INTO formant_data (Quefrency, Timbre, DoFormant) VALUES (?, ?, ?)", (qfrency, tmbre, 1))
conn.commit()
CSVutil('csvdb/formanting.csv', 'w+', 'formanting', DoFormant, qfrency, tmbre)
return ({"value": Quefrency, "__type__": "update"}, {"value": Timbre, "__type__": "update"})
@@ -993,8 +950,7 @@ def click_train(
if_save_every_weights18,
version19,
):
cursor.execute("DELETE FROM stop_train")
conn.commit()
CSVutil('csvdb/stop.csv', 'w+', 'formanting', False)
# 生成filelist
exp_dir = "%s/logs/%s" % (now_dir, exp_dir1)
os.makedirs(exp_dir, exist_ok=True)
@@ -1576,18 +1532,14 @@ def cli_infer(com):
DoFormant = False
Quefrency = 0.0
Timbre = 0.0
cursor.execute("DELETE FROM formant_data")
cursor.execute("INSERT INTO formant_data (Quefrency, Timbre, DoFormant) VALUES (?, ?, ?)", (Quefrency, Timbre, 0))
conn.commit()
CSVutil('csvdb/formanting.csv', 'w+', 'formanting', DoFormant, Quefrency, Timbre)
else:
DoFormant = True
Quefrency = float(com[15])
Timbre = float(com[16])
cursor.execute("DELETE FROM formant_data")
cursor.execute("INSERT INTO formant_data (Quefrency, Timbre, DoFormant) VALUES (?, ?, ?)", (Quefrency, Timbre, 1))
conn.commit()
CSVutil('csvdb/formanting.csv', 'w+', 'formanting', DoFormant, Quefrency, Timbre)
print("Mangio-RVC-Fork Infer-CLI: Starting the inference...")
vc_data = get_vc(model_name, protection_amnt, protect1)
print(vc_data)
@@ -1945,8 +1897,7 @@ def match_index(sid0):
def stoptraining(mim):
if int(mim) == 1:
cursor.execute("INSERT INTO stop_train (stop) VALUES (?)", (True,))
conn.commit()
CSVutil('csvdb/stop.csv', 'w+', 'stop', 'True')
#p.terminate()
#p.kill()
try:

View File

@@ -8,7 +8,7 @@ import sys
import random
import sqlite3
import csv
platform_stft_mapping = {
'linux': 'stftpitchshift',
@@ -19,12 +19,34 @@ platform_stft_mapping = {
stft = platform_stft_mapping.get(sys.platform)
# praatEXE = join('.',os.path.abspath(os.getcwd()) + r"\Praat.exe")
def CSVutil(file, rw, type, *args):
if type == 'formanting':
if rw == 'r':
with open(file) as fileCSVread:
csv_reader = list(csv.reader(fileCSVread))
return (
csv_reader[0][0], csv_reader[0][1], csv_reader[0][2]
) if csv_reader is not None else (lambda: exec('raise ValueError("No data")'))()
else:
if args:
doformnt = args[0]
else:
doformnt = False
qfr = args[1] if len(args) > 1 else 1.0
tmb = args[2] if len(args) > 2 else 1.0
with open(file, rw, newline='') as fileCSVwrite:
csv_writer = csv.writer(fileCSVwrite, delimiter=',')
csv_writer.writerow([doformnt, qfr, tmb])
elif type == 'stop':
stop = args[0] if args else False
with open(file, rw, newline='') as fileCSVwrite:
csv_writer = csv.writer(fileCSVwrite, delimiter=',')
csv_writer.writerow([stop])
def load_audio(file, sr, DoFormant, Quefrency, Timbre):
converted = False
DoFormant, Quefrency, Timbre = CSVutil('csvdb/formanting.csv', 'r', 'formanting')
try:
conn = sqlite3.connect('TEMP/db:cachedb?mode=memory&cache=shared', check_same_thread=False)
cursor = conn.cursor()
# https://github.com/openai/whisper/blob/main/whisper/audio.py#L26
# This launches a subprocess to decode audio while down-mixing and resampling as necessary.
# Requires the ffmpeg CLI and `ffmpeg-python` package to be installed.
@@ -32,10 +54,10 @@ def load_audio(file, sr, DoFormant, Quefrency, Timbre):
file.strip(" ").strip('"').strip("\n").strip('"').strip(" ")
) # 防止小白拷路径头尾带了空格和"和回车
file_formanted = file.strip(" ").strip('"').strip("\n").strip('"').strip(" ")
cursor.execute("SELECT Quefrency, Timbre, DoFormant FROM formant_data")
Quefrency, Timbre, DoFormant = cursor.fetchone()
#print(f"dofor={bool(DoFormant)} timbr={Timbre} quef={Quefrency}\n")
if bool(DoFormant):
if (lambda DoFormant: True if DoFormant.lower() == 'true' else (False if DoFormant.lower() == 'false' else DoFormant))(DoFormant):
numerator = round(random.uniform(1,4), 4)
# os.system(f"stftpitchshift -i {file} -q {Quefrency} -t {Timbre} -o {file_formanted}")
# print('stftpitchshift -i "%s" -p 1.0 --rms -w 128 -v 8 -q %s -t %s -o "%s"' % (file, Quefrency, Timbre, file_formanted))
@@ -89,7 +111,8 @@ def load_audio(file, sr, DoFormant, Quefrency, Timbre):
)
)
try: os.remove("%sFORMANTED_%s.wav" % (file_formanted, str(numerator)))
except Exception: pass; print("couldn't remove formanted type of file")
else:
out, _ = (
@@ -102,14 +125,9 @@ def load_audio(file, sr, DoFormant, Quefrency, Timbre):
except Exception as e:
raise RuntimeError(f"Failed to load audio: {e}")
if DoFormant:
try: os.remove("%sFORMANTED_%s.wav" % (file_formanted, str(numerator)))
except Exception: pass; print("couldn't remove formanted type of file")
if converted:
try: os.remove(file_formanted)
except Exception: pass; print("couldn't remove converted type of file")
converted = False
conn.close()
return np.frombuffer(out, np.float32).flatten()

View File

@@ -1,48 +1,164 @@
joblib>=1.1.0
absl-py==1.4.0
aiofiles==23.1.0
aiohttp==3.8.3
aiosignal==1.3.1
altair==4.2.0
antlr4-python3-runtime==4.8
anyio==3.6.2
appdirs==1.4.4
asttokens==2.2.1
async-timeout==4.0.2
attrs==21.4.0
audioread==2.1.9
backcall==0.2.0
beautifulsoup4==4.12.2
bitarray==2.5.1
bs4==0.0.1
cachetools==5.3.0
certifi==2022.12.7
cffi==1.15.1
charset-normalizer==2.1.1
click==8.1.3
colorama==0.4.6
coloredlogs==15.0.1
contourpy==1.0.6
cycler==0.11.0
Cython==0.29.30
decorator==5.1.1
edge-tts==6.1.5
elevenlabs==0.2.21
entrypoints==0.4
executing==1.2.0
fairseq==0.12.2
faiss-cpu==1.7.2
fastapi==0.88.0
ffmpeg-python==0.2.0
ffmpy==0.0.3
filelock==3.10.0
flatbuffers==23.5.9
fonttools==4.38.0
frozenlist==1.3.3
fsspec==2022.11.0
future==0.18.2
google-auth==2.16.2
google-auth-oauthlib==1.0.0
gradio==3.34.0
gradio_client==0.2.10
grpcio==1.51.3
gspread==5.10.0
gTTS==2.3.2
h11==0.12.0
httpcore==0.15.0
httpx==0.23.0
huggingface-hub==0.16.4
humanfriendly==10.0
hydra-core==1.0.7
idna==3.4
importlib-metadata==6.0.0
importlib-resources==6.0.0
ipython==8.14.0
jedi==0.18.2
Jinja2==3.1.2
joblib==1.2.0
json5==0.9.14
jsonschema==4.17.3
kiwisolver==1.4.4
librosa==0.9.1
linkify-it-py==1.0.3
llvmlite==0.39.1
lxml==4.9.2
Markdown==3.4.3
markdown-it-py==2.2.0
MarkupSafe==2.1.1
matplotlib==3.6.2
matplotlib-inline==0.1.6
mdit-py-plugins==0.3.3
mdurl==0.1.1
mega.py==1.0.8
mpmath==1.2.1
multidict==6.0.3
networkx==2.8.8
noisereduce==2.0.1
numba==0.56.4
numpy==1.23.5
oauthlib==3.2.2
omegaconf==2.0.6
onnx==1.14.0
onnxconverter-common==1.13.0
onnxruntime-gpu==1.14.1
orjson==3.8.3
packaging==22.0
pandas==1.5.2
parso==0.8.3
pathlib==1.0.1
pickleshare==0.7.5
Pillow==9.3.0
pooch==1.6.0
portalocker==2.6.0
praat-parselmouth==0.4.2
praatio==6.0.1
prompt-toolkit==3.0.39
protobuf==4.22.1
pure-eval==0.2.2
pyasn1==0.4.8
pyasn1-modules==0.2.8
pycparser==2.21
pycryptodome==3.16.0
pydantic==1.10.2
pydub==0.25.1
Pygments==2.15.1
pyparsing==3.0.9
pyreadline3==3.4.1
pyrsistent==0.19.2
PySimpleGUI==4.60.4
python-dateutil==2.8.2
python-multipart==0.0.5
pytz==2022.6
pywin32==305
pyworld==0.3.2
PyYAML==6.0
regex==2022.10.31
requests==2.28.1
requests-oauthlib==1.3.1
resampy==0.4.2
rfc3986==1.5.0
rich==13.3.5
rsa==4.9
sacrebleu==2.3.1
scikit-learn==1.2.0
scipy==1.9.3
librosa==0.9.1
llvmlite==0.39.0
fairseq==0.12.2
faiss-cpu==1.7.3
gradio==3.34.0
Cython
pydub>=0.25.1
soundfile>=0.12.1
ffmpeg-python>=0.2.0
tensorboardX
Jinja2>=3.1.2
json5
Markdown
matplotlib>=3.7.0
matplotlib-inline>=0.1.3
praat-parselmouth>=0.4.2
Pillow>=9.1.1
resampy>=0.4.2
scikit-learn
tensorboard
tensorboard-data-server
tensorboard-plugin-wit
torchgen>=0.0.1
torch==2.0.0
tqdm>=4.63.1
tornado>=6.1
Werkzeug>=2.2.3
uc-micro-py>=1.0.1
sympy>=1.11.1
tabulate>=0.8.10
PyYAML>=6.0
pyasn1>=0.4.8
pyasn1-modules>=0.2.8
fsspec>=2022.11.0
absl-py>=1.2.0
audioread
uvicorn>=0.21.1
colorama>=0.4.5
pyworld>=0.3.2
httpx==0.23.0
#onnxruntime-gpu
torchcrepe==0.0.20
fastapi==0.88
semantic-version==2.10.0
six==1.16.0
skl2onnx==1.14.1
sniffio==1.3.0
sounddevice==0.4.6
soundfile==0.11.0
soupsieve==2.4.1
stack-data==0.6.2
starlette==0.22.0
stftpitchshift==1.5.1
sympy==1.11.1
tabulate==0.9.0
tenacity==5.1.5
tensorboard==2.13.0
tensorboard-data-server==0.7.0
tensorboard-plugin-wit==1.8.1
tensorboardX==2.6.1
threadpoolctl==3.1.0
toolz==0.12.0
torch @ https://download.pytorch.org/whl/cu118/torch-2.0.0%2Bcu118-cp39-cp39-win_amd64.whl
torchaudio==2.0.1
torchcrepe==0.0.19
torchgen==0.0.1
tornado==6.3.2
tqdm==4.64.1
traitlets==5.9.0
typing_extensions==4.4.0
uc-micro-py==1.0.1
urllib3==1.26.13
uvicorn==0.20.0
wcwidth==0.2.6
websockets==10.4
Werkzeug==2.2.3
yarl==1.8.2
zipp==3.15.0

View File

@@ -33,7 +33,7 @@ from data_utils import (
DistributedBucketSampler,
)
import sqlite3
import csv
if hps.version == "v1":
from lib.infer_pack.models import (
@@ -354,9 +354,6 @@ def train_and_evaluate(
# Run steps
epoch_recorder = EpochRecorder()
conn = sqlite3.connect('TEMP/db:cachedb?mode=memory&cache=shared', check_same_thread=False)
cursor = conn.cursor()
for batch_idx, info in data_iterator:
# Data
## Unpack
@@ -576,8 +573,15 @@ def train_and_evaluate(
)
)
cursor.execute("SELECT stop FROM stop_train LIMIT 1")
if bool(cursor.fetchone()) == True:
try:
with open('csvdb/stop.csv') as CSVStop:
csv_reader = list(csv.reader(CSVStop))
stopbtn = csv_reader[0][0] if csv_reader is not None else (lambda: exec('raise ValueError("No data")'))()
stopbtn = (lambda stopbtn: True if stopbtn.lower() == 'true' else (False if stopbtn.lower() == 'false' else stopbtn))(stopbtn)
except (ValueError, TypeError, IndexError):
stopbtn = False
if stopbtn:
logger.info("Stop Button was pressed. The program is closed.")
if hasattr(net_g, "module"):
ckpt = net_g.module.state_dict()
@@ -598,9 +602,9 @@ def train_and_evaluate(
)
)
sleep(1)
cursor.execute("DELETE FROM stop_train")
conn.commit()
conn.close()
with open('csvdb/stop.csv', 'w+', newline='') as STOPCSVwrite:
csv_writer = csv.writer(STOPCSVwrite, delimiter=',')
csv_writer.writerow(['False'])
os._exit(2333333)
if rank == 0:
@@ -621,9 +625,9 @@ def train_and_evaluate(
)
)
sleep(1)
cursor.execute("DELETE FROM stop_train")
conn.commit()
conn.close()
with open('csvdb/stop.csv', 'w+', newline='') as STOPCSVwrite:
csv_writer = csv.writer(STOPCSVwrite, delimiter=',')
csv_writer.writerow(['False'])
os._exit(2333333)

View File

@@ -18,8 +18,8 @@ from my_utils import load_audio
import tqdm
DoFormant = False
Quefrency = 0.0
Timbre = 0.0
Quefrency = 1.0
Timbre = 1.0
mutex = multiprocessing.Lock()
f = open("%s/preprocess.log" % exp_dir, "a+")
@@ -45,7 +45,7 @@ class PreProcess:
)
self.sr = sr
self.bh, self.ah = signal.butter(N=5, Wn=48, btype="high", fs=self.sr)
self.per = 3.0
self.per = 3.7
self.overlap = 0.3
self.tail = self.per + self.overlap
self.max = 0.9