From 0879aaae33643575e1212e73a8b38bd6f1c762c3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Fri, 19 Apr 2024 06:40:07 -0400 Subject: [PATCH] optional --- litgpt/prompts.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/litgpt/prompts.py b/litgpt/prompts.py index bbc60e318d..04a0551cd1 100644 --- a/litgpt/prompts.py +++ b/litgpt/prompts.py @@ -202,9 +202,10 @@ def apply(self, prompt: str, **kwargs: str) -> str: class Llama3(PromptStyle): def apply(self, prompt: str, **kwargs: str) -> str: + # https://github.com/meta-llama/llama3/blob/359887376f0aaf30e433f23e25df858d8c2a9833/llama/tokenizer.py#L202-L229 return ( "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\n" - "You are a helpful assistant.<|eot_id|>\n" + "You are a helpful assistant.<|eot_id|>\n" # The system prompt is optional "<|start_header_id|>user<|end_header_id|>\n\n" f"{prompt}<|eot_id|>\n" "<|start_header_id|>assistant<|end_header_id|>\n\n"