Version Check (#1390)
* version check * fix webui and symbols * fix v1 language map
This commit is contained in:
@@ -1,13 +1,17 @@
|
||||
from text import japanese, cleaned_text_to_sequence, english,korean,cantonese
|
||||
import os
|
||||
if os.environ.get("version","v1")=="v1":
|
||||
from text import chinese
|
||||
from text.symbols import symbols
|
||||
else:
|
||||
from text import chinese2 as chinese
|
||||
from text.symbols2 import symbols
|
||||
# if os.environ.get("version","v1")=="v1":
|
||||
# from text import chinese
|
||||
# from text.symbols import symbols
|
||||
# else:
|
||||
# from text import chinese2 as chinese
|
||||
# from text.symbols2 import symbols
|
||||
|
||||
from text import symbols as symbols_v1
|
||||
from text import symbols2 as symbols_v2
|
||||
from text import chinese as chinese_v1
|
||||
from text import chinese2 as chinese_v2
|
||||
|
||||
language_module_map = {"zh": chinese, "ja": japanese, "en": english, "ko": korean,"yue":cantonese}
|
||||
special = [
|
||||
# ("%", "zh", "SP"),
|
||||
("¥", "zh", "SP2"),
|
||||
@@ -16,13 +20,20 @@ special = [
|
||||
]
|
||||
|
||||
|
||||
def clean_text(text, language):
|
||||
def clean_text(text, language, version):
|
||||
if version == "v1":
|
||||
symbols = symbols_v1.symbols
|
||||
language_module_map = {"zh": chinese_v1, "ja": japanese, "en": english}
|
||||
else:
|
||||
symbols = symbols_v2.symbols
|
||||
language_module_map = {"zh": chinese_v2, "ja": japanese, "en": english, "ko": korean,"yue":cantonese}
|
||||
|
||||
if(language not in language_module_map):
|
||||
language="en"
|
||||
text=" "
|
||||
for special_s, special_l, target_symbol in special:
|
||||
if special_s in text and language == special_l:
|
||||
return clean_special(text, language, special_s, target_symbol)
|
||||
return clean_special(text, language, special_s, target_symbol, version)
|
||||
language_module = language_module_map[language]
|
||||
if hasattr(language_module,"text_normalize"):
|
||||
norm_text = language_module.text_normalize(text)
|
||||
@@ -42,11 +53,18 @@ def clean_text(text, language):
|
||||
word2ph = None
|
||||
|
||||
for ph in phones:
|
||||
assert ph in symbols
|
||||
phones = ['UNK' if ph not in symbols else ph for ph in phones]
|
||||
return phones, word2ph, norm_text
|
||||
|
||||
|
||||
def clean_special(text, language, special_s, target_symbol):
|
||||
def clean_special(text, language, special_s, target_symbol, version):
|
||||
if version == "v1":
|
||||
symbols = symbols_v1.symbols
|
||||
language_module_map = {"zh": chinese_v1, "ja": japanese, "en": english}
|
||||
else:
|
||||
symbols = symbols_v2.symbols
|
||||
language_module_map = {"zh": chinese_v2, "ja": japanese, "en": english, "ko": korean,"yue":cantonese}
|
||||
|
||||
"""
|
||||
特殊静音段sp符号处理
|
||||
"""
|
||||
|
||||
Reference in New Issue
Block a user