|
|
@@ -19,7 +19,10 @@ from fish_speech.text.parser import clean_text
|
|
|
os.environ["TOKENIZERS_PARALLELISM"] = "false"
|
|
|
torch._inductor.config.coordinate_descent_tuning = True
|
|
|
torch._inductor.config.triton.unique_kernel_names = True
|
|
|
-torch._inductor.config.fx_graph_cache = True # Experimental feature to reduce compilation times, will be on by default in future
|
|
|
+
|
|
|
+if hasattr(torch._inductor.config, "fx_graph_cache"):
|
|
|
+ # Experimental feature to reduce compilation times, will be on by default in future
|
|
|
+ torch._inductor.config.fx_graph_cache = True
|
|
|
|
|
|
|
|
|
from fish_speech.models.text2semantic.llama import Transformer
|