From f459d9ced3d3c6d78f8f29416bef272defab319c Mon Sep 17 00:00:00 2001 From: Mahdi Davari <8115151+MahdiDavari@users.noreply.github.com> Date: Sun, 18 Feb 2024 04:20:46 +0000 Subject: [PATCH] Together.ai provider added --- .../jupyter_ai_magics/__init__.py | 1 + .../jupyter_ai_magics/providers.py | 36 +++++++++++++++++++ packages/jupyter-ai-magics/pyproject.toml | 3 ++ 3 files changed, 40 insertions(+) diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/__init__.py b/packages/jupyter-ai-magics/jupyter_ai_magics/__init__.py index a00d4877c..a8f4225de 100644 --- a/packages/jupyter-ai-magics/jupyter_ai_magics/__init__.py +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/__init__.py @@ -29,6 +29,7 @@ OpenAIProvider, QianfanProvider, SmEndpointProvider, + TogetherAIProvider, ) diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py b/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py index 2704e8f32..ff33947a5 100644 --- a/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py @@ -31,6 +31,7 @@ from langchain_community.llms import ( AI21, Anthropic, + Together, Bedrock, Cohere, GPT4All, @@ -853,6 +854,41 @@ def allows_concurrency(self): return not "anthropic" in self.model_id +class TogetherAIProvider(BaseProvider, Together): + id = "togetherai" + name = "Together AI" + model_id_key = "model" + models = ['Austism/chronos-hermes-13b', + 'DiscoResearch/DiscoLM-mixtral-8x7b-v2', + 'EleutherAI/llemma_7b', + 'Gryphe/MythoMax-L2-13b', + 'Meta-Llama/Llama-Guard-7b', + 'Nexusflow/NexusRaven-V2-13B', + 'NousResearch/Nous-Capybara-7B-V1p9', + 'NousResearch/Nous-Hermes-2-Yi-34B', + 'NousResearch/Nous-Hermes-Llama2-13b', + 'NousResearch/Nous-Hermes-Llama2-70b' + ] + pypi_package_deps = ["together"] + auth_strategy = EnvAuthStrategy(name="TOGETHER_API_KEY") + + def __init__(self, **kwargs): + model = kwargs.get("model_id") + + if model not in self.models: + kwargs["responses"] = ["Model not supported! Please check model list with %ai list"] + + super().__init__(**kwargs) + + def get_prompt_template(self, format) -> PromptTemplate: + if format == "code": + return PromptTemplate.from_template( + "{prompt}\n\nProduce output as source code only, " + "with no text or explanation before or after it." + ) + return super().get_prompt_template(format) + + # Baidu QianfanChat provider. temporarily living as a separate class until class QianfanProvider(BaseProvider, QianfanChatEndpoint): id = "qianfan" diff --git a/packages/jupyter-ai-magics/pyproject.toml b/packages/jupyter-ai-magics/pyproject.toml index 377776fba..ec0af6ba2 100644 --- a/packages/jupyter-ai-magics/pyproject.toml +++ b/packages/jupyter-ai-magics/pyproject.toml @@ -46,6 +46,8 @@ all = [ "openai~=1.6.1", "boto3", "qianfan", + "together", + ] [project.entry-points."jupyter_ai.model_providers"] @@ -63,6 +65,7 @@ anthropic-chat = "jupyter_ai_magics:ChatAnthropicProvider" amazon-bedrock-chat = "jupyter_ai_magics:BedrockChatProvider" qianfan = "jupyter_ai_magics:QianfanProvider" nvidia-chat = "jupyter_ai_magics.partner_providers.nvidia:ChatNVIDIAProvider" +together-ai = "jupyter_ai_magics:TogetherAIProvider" [project.entry-points."jupyter_ai.embeddings_model_providers"] bedrock = "jupyter_ai_magics:BedrockEmbeddingsProvider"