Add files via upload
This commit is contained in:
37
GPT_SoVITS/AR/utils/__init__.py
Normal file
37
GPT_SoVITS/AR/utils/__init__.py
Normal file
@@ -0,0 +1,37 @@
|
||||
import re
|
||||
|
||||
|
||||
def str2bool(str):
|
||||
return True if str.lower() == 'true' else False
|
||||
|
||||
|
||||
def get_newest_ckpt(string_list):
|
||||
# 定义一个正则表达式模式,用于匹配字符串中的数字
|
||||
pattern = r'epoch=(\d+)-step=(\d+)\.ckpt'
|
||||
|
||||
# 使用正则表达式提取每个字符串中的数字信息,并创建一个包含元组的列表
|
||||
extracted_info = []
|
||||
for string in string_list:
|
||||
match = re.match(pattern, string)
|
||||
if match:
|
||||
epoch = int(match.group(1))
|
||||
step = int(match.group(2))
|
||||
extracted_info.append((epoch, step, string))
|
||||
# 按照 epoch 后面的数字和 step 后面的数字进行排序
|
||||
sorted_info = sorted(
|
||||
extracted_info, key=lambda x: (x[0], x[1]), reverse=True)
|
||||
# 获取最新的 ckpt 文件名
|
||||
newest_ckpt = sorted_info[0][2]
|
||||
return newest_ckpt
|
||||
|
||||
|
||||
# 文本存在且不为空时 return True
|
||||
def check_txt_file(file_path):
|
||||
try:
|
||||
with open(file_path, 'r') as file:
|
||||
text = file.readline().strip()
|
||||
assert text.strip() != ''
|
||||
return text
|
||||
except Exception:
|
||||
return False
|
||||
return False
|
||||
38
GPT_SoVITS/AR/utils/initialize.py
Normal file
38
GPT_SoVITS/AR/utils/initialize.py
Normal file
@@ -0,0 +1,38 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Initialize modules for espnet2 neural networks."""
|
||||
import torch
|
||||
from typeguard import check_argument_types
|
||||
|
||||
|
||||
def initialize(model: torch.nn.Module, init: str):
|
||||
"""Initialize weights of a neural network module.
|
||||
|
||||
Parameters are initialized using the given method or distribution.
|
||||
|
||||
Custom initialization routines can be implemented into submodules
|
||||
as function `espnet_initialization_fn` within the custom module.
|
||||
|
||||
Args:
|
||||
model: Target.
|
||||
init: Method of initialization.
|
||||
"""
|
||||
assert check_argument_types()
|
||||
print("init with", init)
|
||||
|
||||
# weight init
|
||||
for p in model.parameters():
|
||||
if p.dim() > 1:
|
||||
if init == "xavier_uniform":
|
||||
torch.nn.init.xavier_uniform_(p.data)
|
||||
elif init == "xavier_normal":
|
||||
torch.nn.init.xavier_normal_(p.data)
|
||||
elif init == "kaiming_uniform":
|
||||
torch.nn.init.kaiming_uniform_(p.data, nonlinearity="relu")
|
||||
elif init == "kaiming_normal":
|
||||
torch.nn.init.kaiming_normal_(p.data, nonlinearity="relu")
|
||||
else:
|
||||
raise ValueError("Unknown initialization: " + init)
|
||||
# bias init
|
||||
for name, p in model.named_parameters():
|
||||
if ".bias" in name and p.dim() == 1:
|
||||
p.data.zero_()
|
||||
32
GPT_SoVITS/AR/utils/io.py
Normal file
32
GPT_SoVITS/AR/utils/io.py
Normal file
@@ -0,0 +1,32 @@
|
||||
import sys
|
||||
|
||||
import torch
|
||||
import yaml
|
||||
|
||||
|
||||
def load_yaml_config(path):
|
||||
with open(path) as f:
|
||||
config = yaml.full_load(f)
|
||||
return config
|
||||
|
||||
|
||||
def save_config_to_yaml(config, path):
|
||||
assert path.endswith('.yaml')
|
||||
with open(path, 'w') as f:
|
||||
f.write(yaml.dump(config))
|
||||
f.close()
|
||||
|
||||
|
||||
def write_args(args, path):
|
||||
args_dict = dict((name, getattr(args, name)) for name in dir(args)
|
||||
if not name.startswith('_'))
|
||||
with open(path, 'a') as args_file:
|
||||
args_file.write('==> torch version: {}\n'.format(torch.__version__))
|
||||
args_file.write(
|
||||
'==> cudnn version: {}\n'.format(torch.backends.cudnn.version()))
|
||||
args_file.write('==> Cmd:\n')
|
||||
args_file.write(str(sys.argv))
|
||||
args_file.write('\n==> args:\n')
|
||||
for k, v in sorted(args_dict.items()):
|
||||
args_file.write(' %s: %s\n' % (str(k), str(v)))
|
||||
args_file.close()
|
||||
Reference in New Issue
Block a user