From 9518515de0514d9c5fa524cc79496993a504e25d Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 19 Aug 2023 18:02:21 -0700 Subject: [PATCH] Extra header configuration for openrouter.ai, refs #165 --- docs/other-models.md | 14 ++++++++++++++ llm/default_plugins/openai_models.py | 26 +++++++++++++++++--------- 2 files changed, 31 insertions(+), 9 deletions(-) diff --git a/docs/other-models.md b/docs/other-models.md index 767cb36a..398ee314 100644 --- a/docs/other-models.md +++ b/docs/other-models.md @@ -96,3 +96,17 @@ And confirm they were logged correctly with: ```bash llm logs -n 1 ``` + +### Extra HTTP headers + +Some providers such as [openrouter.ai](https://openrouter.ai/docs) may require the setting of additional HTTP headers. You can set those using the `headers:` key like this: + +```yaml +- model_id: claude + model_name: anthropic/claude-2 + api_base: "https://openrouter.ai/api/v1" + api_key_name: openrouter + headers: + HTTP-Referer: "https://llm.datasette.io/" + X-Title: LLM +``` \ No newline at end of file diff --git a/llm/default_plugins/openai_models.py b/llm/default_plugins/openai_models.py index f331c228..4dc9d57e 100644 --- a/llm/default_plugins/openai_models.py +++ b/llm/default_plugins/openai_models.py @@ -23,16 +23,19 @@ def register_models(register): return with open(extra_path) as f: extra_models = yaml.safe_load(f) - for model in extra_models: - model_id = model["model_id"] - aliases = model.get("aliases", []) - model_name = model["model_name"] - api_base = model.get("api_base") - chat_model = Chat(model_id, model_name=model_name, api_base=api_base) + for extra_model in extra_models: + model_id = extra_model["model_id"] + aliases = extra_model.get("aliases", []) + model_name = extra_model["model_name"] + api_base = extra_model.get("api_base") + headers = extra_model.get("headers") + chat_model = Chat( + model_id, model_name=model_name, api_base=api_base, headers=headers + ) if api_base: chat_model.needs_key = None - if model.get("api_key_name"): - chat_model.needs_key = model["api_key_name"] + if extra_model.get("api_key_name"): + chat_model.needs_key = extra_model["api_key_name"] register( chat_model, aliases=aliases, @@ -170,11 +173,14 @@ def validate_logit_bias(cls, logit_bias): return validated_logit_bias - def __init__(self, model_id, key=None, model_name=None, api_base=None): + def __init__( + self, model_id, key=None, model_name=None, api_base=None, headers=None + ): self.model_id = model_id self.key = key self.model_name = model_name self.api_base = api_base + self.headers = headers def __str__(self): return "OpenAI Chat: {}".format(self.model_id) @@ -210,6 +216,8 @@ def execute(self, prompt, stream, response, conversation=None): # OpenAI-compatible models don't need a key, but the # openai client library requires one kwargs["api_key"] = "DUMMY_KEY" + if self.headers: + kwargs["headers"] = self.headers if stream: completion = openai.ChatCompletion.create( model=self.model_name or self.model_id,