fix cpu+fp16 inference issue

fix cpu+fp16 inference issue
This commit is contained in:
RVC-Boss
2024-08-01 21:47:30 +08:00
committed by GitHub
parent 9498fc775b
commit bce451a2d1
3 changed files with 8 additions and 6 deletions

View File

@@ -10,13 +10,13 @@ all_parts = os.environ.get("all_parts")
os.environ["CUDA_VISIBLE_DEVICES"] = os.environ.get("_CUDA_VISIBLE_DEVICES")
opt_dir = os.environ.get("opt_dir")
bert_pretrained_dir = os.environ.get("bert_pretrained_dir")
is_half = eval(os.environ.get("is_half", "True"))
import torch
is_half = eval(os.environ.get("is_half", "True")) and torch.cuda.is_available()
import sys, numpy as np, traceback, pdb
import os.path
from glob import glob
from tqdm import tqdm
from text.cleaner import clean_text
import torch
from transformers import AutoModelForMaskedLM, AutoTokenizer
import numpy as np