We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent 5ab40e6 commit 9d053d6Copy full SHA for 9d053d6
llama_cpp/llama.py
@@ -1016,6 +1016,10 @@ def _create_completion(
1016
)
1017
model_name: str = model if model is not None else self.model_path
1018
1019
+ # User or template may have added an unwanted extra BOS
1020
+ if prompt_tokens[:2] == [self.token_bos()] * 2:
1021
+ del prompt_tokens[0]
1022
+
1023
# NOTE: This likely doesn't work correctly for the first token in the prompt
1024
# because of the extra space added to the start of the prompt_tokens
1025
if logit_bias is not None:
0 commit comments