diff --git a/mlx_vlm/chat_ui.py b/mlx_vlm/chat_ui.py index aa3c5a1..3c89fa6 100644 --- a/mlx_vlm/chat_ui.py +++ b/mlx_vlm/chat_ui.py @@ -96,12 +96,22 @@ def chat(message, history, temperature, max_tokens): if message["files"]: chat.append(get_message_json(config["model_type"], message["text"])) - - messages = processor.apply_chat_template( - chat, - tokenize=False, - add_generation_prompt=True, - ) + else: + raise Exception("Please upload an image. Text only chat is not supported.") + + if "chat_template" in processor.__dict__.keys(): + messages = processor.apply_chat_template( + chat, + tokenize=False, + add_generation_prompt=True, + ) + + elif "tokenizer" in processor.__dict__.keys(): + messages = processor.tokenizer.apply_chat_template( + chat, + tokenize=False, + add_generation_prompt=True, + ) response = "" for chunk in generate( model,