增加flash attention 选项: GPT_SoVITS/AR/models/t2s_lightning_module.py
增加flash attention 选项: GPT_SoVITS/AR/models/t2s_model.py 增加flash attention 选项: GPT_SoVITS/TTS_infer_pack/TTS.py 增加flash attention 选项: GPT_SoVITS/TTS_infer_pack/TextPreprocessor.py 增加flash attention 选项: GPT_SoVITS/configs/tts_infer.yaml 增加flash attention 选项: GPT_SoVITS/inference_webui.py
This commit is contained in:
@@ -2,6 +2,7 @@ custom:
|
||||
bert_base_path: GPT_SoVITS/pretrained_models/chinese-roberta-wwm-ext-large
|
||||
cnhuhbert_base_path: GPT_SoVITS/pretrained_models/chinese-hubert-base
|
||||
device: cuda
|
||||
flash_attn_enabled: true
|
||||
is_half: true
|
||||
t2s_weights_path: GPT_SoVITS/pretrained_models/s1bert25hz-2kh-longer-epoch=68e-step=50232.ckpt
|
||||
vits_weights_path: GPT_SoVITS/pretrained_models/s2G488k.pth
|
||||
@@ -9,6 +10,7 @@ default:
|
||||
bert_base_path: GPT_SoVITS/pretrained_models/chinese-roberta-wwm-ext-large
|
||||
cnhuhbert_base_path: GPT_SoVITS/pretrained_models/chinese-hubert-base
|
||||
device: cpu
|
||||
flash_attn_enabled: true
|
||||
is_half: false
|
||||
t2s_weights_path: GPT_SoVITS/pretrained_models/s1bert25hz-2kh-longer-epoch=68e-step=50232.ckpt
|
||||
vits_weights_path: GPT_SoVITS/pretrained_models/s2G488k.pth
|
||||
|
||||
Reference in New Issue
Block a user