Skip to content

Commit

Permalink
Backport PR #654: Together.ai provider added
Browse files Browse the repository at this point in the history
  • Loading branch information
MahdiDavari authored and meeseeksmachine committed Mar 5, 2024
1 parent a6a276d commit af465b5
Show file tree
Hide file tree
Showing 3 changed files with 43 additions and 0 deletions.
1 change: 1 addition & 0 deletions packages/jupyter-ai-magics/jupyter_ai_magics/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
OpenAIProvider,
QianfanProvider,
SmEndpointProvider,
TogetherAIProvider,
)


Expand Down
39 changes: 39 additions & 0 deletions packages/jupyter-ai-magics/jupyter_ai_magics/providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
HuggingFaceHub,
OpenAI,
SagemakerEndpoint,
Together,
)

# this is necessary because `langchain.pydantic_v1.main` does not include
Expand Down Expand Up @@ -804,6 +805,44 @@ def allows_concurrency(self):
return not "anthropic" in self.model_id


class TogetherAIProvider(BaseProvider, Together):
id = "togetherai"
name = "Together AI"
model_id_key = "model"
models = [
"Austism/chronos-hermes-13b",
"DiscoResearch/DiscoLM-mixtral-8x7b-v2",
"EleutherAI/llemma_7b",
"Gryphe/MythoMax-L2-13b",
"Meta-Llama/Llama-Guard-7b",
"Nexusflow/NexusRaven-V2-13B",
"NousResearch/Nous-Capybara-7B-V1p9",
"NousResearch/Nous-Hermes-2-Yi-34B",
"NousResearch/Nous-Hermes-Llama2-13b",
"NousResearch/Nous-Hermes-Llama2-70b",
]
pypi_package_deps = ["together"]
auth_strategy = EnvAuthStrategy(name="TOGETHER_API_KEY")

def __init__(self, **kwargs):
model = kwargs.get("model_id")

if model not in self.models:
kwargs["responses"] = [
"Model not supported! Please check model list with %ai list"
]

super().__init__(**kwargs)

def get_prompt_template(self, format) -> PromptTemplate:
if format == "code":
return PromptTemplate.from_template(
"{prompt}\n\nProduce output as source code only, "
"with no text or explanation before or after it."
)
return super().get_prompt_template(format)


# Baidu QianfanChat provider. temporarily living as a separate class until
class QianfanProvider(BaseProvider, QianfanChatEndpoint):
id = "qianfan"
Expand Down
3 changes: 3 additions & 0 deletions packages/jupyter-ai-magics/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,8 @@ all = [
"openai~=1.6.1",
"boto3",
"qianfan",
"together",

]

[project.entry-points."jupyter_ai.model_providers"]
Expand All @@ -63,6 +65,7 @@ anthropic-chat = "jupyter_ai_magics:ChatAnthropicProvider"
amazon-bedrock-chat = "jupyter_ai_magics:BedrockChatProvider"
qianfan = "jupyter_ai_magics:QianfanProvider"
nvidia-chat = "jupyter_ai_magics.partner_providers.nvidia:ChatNVIDIAProvider"
together-ai = "jupyter_ai_magics:TogetherAIProvider"

[project.entry-points."jupyter_ai.embeddings_model_providers"]
bedrock = "jupyter_ai_magics:BedrockEmbeddingsProvider"
Expand Down

0 comments on commit af465b5

Please sign in to comment.