Spaces:
Running
Running
Update modules/ChatTTS/ChatTTS/core.py
Browse files
modules/ChatTTS/ChatTTS/core.py
CHANGED
@@ -7,6 +7,9 @@ from huggingface_hub import snapshot_download
|
|
7 |
from omegaconf import OmegaConf
|
8 |
from vocos import Vocos
|
9 |
|
|
|
|
|
|
|
10 |
from .infer.api import infer_code, refine_text
|
11 |
from .model.dvae import DVAE
|
12 |
from .model.gpt import GPT_warpper
|
@@ -170,6 +173,7 @@ class Chat:
|
|
170 |
self.logger.log(logging.INFO, "decoder loaded.")
|
171 |
|
172 |
if tokenizer_path:
|
|
|
173 |
tokenizer = torch.load(tokenizer_path, map_location=map_location)
|
174 |
tokenizer.padding_side = "left"
|
175 |
self.pretrain_models["tokenizer"] = tokenizer
|
|
|
7 |
from omegaconf import OmegaConf
|
8 |
from vocos import Vocos
|
9 |
|
10 |
+
from transformers.models.bert.tokenization_bert_fast import BertTokenizerFast
|
11 |
+
import torch.serialization
|
12 |
+
|
13 |
from .infer.api import infer_code, refine_text
|
14 |
from .model.dvae import DVAE
|
15 |
from .model.gpt import GPT_warpper
|
|
|
173 |
self.logger.log(logging.INFO, "decoder loaded.")
|
174 |
|
175 |
if tokenizer_path:
|
176 |
+
torch.serialization.add_safe_globals({'transformers.models.bert.tokenization_bert_fast.BertTokenizerFast': BertTokenizerFast})
|
177 |
tokenizer = torch.load(tokenizer_path, map_location=map_location)
|
178 |
tokenizer.padding_side = "left"
|
179 |
self.pretrain_models["tokenizer"] = tokenizer
|