Prechádzať zdrojové kódy

Fix inference error

Lengyue 2 rokov pred
rodič
commit
45b8ed0bb0
1 zmenil súbory, kde vykonal 4 pridanie a 1 odobranie
  1. 4 1
      tools/llama/generate.py

+ 4 - 1
tools/llama/generate.py

@@ -19,7 +19,10 @@ from fish_speech.text.parser import clean_text
 os.environ["TOKENIZERS_PARALLELISM"] = "false"
 torch._inductor.config.coordinate_descent_tuning = True
 torch._inductor.config.triton.unique_kernel_names = True
-torch._inductor.config.fx_graph_cache = True  # Experimental feature to reduce compilation times, will be on by default in future
+
+if hasattr(torch._inductor.config, "fx_graph_cache"):
+    # Experimental feature to reduce compilation times, will be on by default in future
+    torch._inductor.config.fx_graph_cache = True
 
 
 from fish_speech.models.text2semantic.llama import Transformer