Browse Source

Fix prompt encoding

Lengyue 2 years ago
parent
commit
ed46e8396c
1 changed files with 1 additions and 1 deletions
  1. 1 1
      tools/llama/generate.py

+ 1 - 1
tools/llama/generate.py

@@ -525,7 +525,7 @@ def main(
         )
         print(f"Encoded text: {text}")
 
-    if use_prompt and iterative_prompt:
+    if use_prompt:
         encoded_prompt = encode_tokens(
             tokenizer,
             prompt_text,