Skip to content

Commit 165b4dc

Browse files
committed
fix: Fix typo in Llama3VisionAlphaChatHandler. Closes abetlen#1488
1 parent 91d05ab commit 165b4dc

File tree

1 file changed

+5
-2
lines changed

1 file changed

+5
-2
lines changed

llama_cpp/llama_chat_format.py

+5-2
Original file line numberDiff line numberDiff line change
@@ -3098,7 +3098,7 @@ class NanoLlavaChatHandler(Llava15ChatHandler):
30983098
"{% endif %}"
30993099
)
31003100

3101-
class Llama3VisionAlpha(Llava15ChatHandler):
3101+
class Llama3VisionAlphaChatHandler(Llava15ChatHandler):
31023102
# question = "<image>" + q
31033103

31043104
# prompt = f"<|start_header_id|>user<|end_header_id|>\n\n{question}<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n"
@@ -3159,6 +3159,10 @@ class Llama3VisionAlpha(Llava15ChatHandler):
31593159
"{% endif %}"
31603160
)
31613161

3162+
# alias
3163+
Llama3VisionAlpha = Llama3VisionAlphaChatHandler
3164+
3165+
31623166
@register_chat_completion_handler("chatml-function-calling")
31633167
def chatml_function_calling(
31643168
llama: llama.Llama,
@@ -3193,7 +3197,6 @@ def chatml_function_calling(
31933197
llama_types.CreateChatCompletionResponse,
31943198
Iterator[llama_types.CreateChatCompletionStreamResponse],
31953199
]:
3196-
print(logprobs)
31973200
function_calling_template = (
31983201
"{% for message in messages %}"
31993202
"<|im_start|>{{ message.role }}\n"

0 commit comments

Comments
 (0)