diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/__init__.py b/packages/jupyter-ai-magics/jupyter_ai_magics/__init__.py index a00d4877c..a8f4225de 100644 --- a/packages/jupyter-ai-magics/jupyter_ai_magics/__init__.py +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/__init__.py @@ -29,6 +29,7 @@ OpenAIProvider, QianfanProvider, SmEndpointProvider, + TogetherAIProvider, ) diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py b/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py index b1a8b1950..f88f65f6f 100644 --- a/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py @@ -37,6 +37,7 @@ HuggingFaceHub, OpenAI, SagemakerEndpoint, + Together, ) # this is necessary because `langchain.pydantic_v1.main` does not include @@ -804,6 +805,44 @@ def allows_concurrency(self): return not "anthropic" in self.model_id +class TogetherAIProvider(BaseProvider, Together): + id = "togetherai" + name = "Together AI" + model_id_key = "model" + models = [ + "Austism/chronos-hermes-13b", + "DiscoResearch/DiscoLM-mixtral-8x7b-v2", + "EleutherAI/llemma_7b", + "Gryphe/MythoMax-L2-13b", + "Meta-Llama/Llama-Guard-7b", + "Nexusflow/NexusRaven-V2-13B", + "NousResearch/Nous-Capybara-7B-V1p9", + "NousResearch/Nous-Hermes-2-Yi-34B", + "NousResearch/Nous-Hermes-Llama2-13b", + "NousResearch/Nous-Hermes-Llama2-70b", + ] + pypi_package_deps = ["together"] + auth_strategy = EnvAuthStrategy(name="TOGETHER_API_KEY") + + def __init__(self, **kwargs): + model = kwargs.get("model_id") + + if model not in self.models: + kwargs["responses"] = [ + "Model not supported! Please check model list with %ai list" + ] + + super().__init__(**kwargs) + + def get_prompt_template(self, format) -> PromptTemplate: + if format == "code": + return PromptTemplate.from_template( + "{prompt}\n\nProduce output as source code only, " + "with no text or explanation before or after it." + ) + return super().get_prompt_template(format) + + # Baidu QianfanChat provider. temporarily living as a separate class until class QianfanProvider(BaseProvider, QianfanChatEndpoint): id = "qianfan" diff --git a/packages/jupyter-ai-magics/pyproject.toml b/packages/jupyter-ai-magics/pyproject.toml index 377776fba..ec0af6ba2 100644 --- a/packages/jupyter-ai-magics/pyproject.toml +++ b/packages/jupyter-ai-magics/pyproject.toml @@ -46,6 +46,8 @@ all = [ "openai~=1.6.1", "boto3", "qianfan", + "together", + ] [project.entry-points."jupyter_ai.model_providers"] @@ -63,6 +65,7 @@ anthropic-chat = "jupyter_ai_magics:ChatAnthropicProvider" amazon-bedrock-chat = "jupyter_ai_magics:BedrockChatProvider" qianfan = "jupyter_ai_magics:QianfanProvider" nvidia-chat = "jupyter_ai_magics.partner_providers.nvidia:ChatNVIDIAProvider" +together-ai = "jupyter_ai_magics:TogetherAIProvider" [project.entry-points."jupyter_ai.embeddings_model_providers"] bedrock = "jupyter_ai_magics:BedrockEmbeddingsProvider"