Skip to content

Commit

Permalink
Together.ai provider added (#654)
Browse files Browse the repository at this point in the history
* Together.ai provider added

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
  • Loading branch information
MahdiDavari and pre-commit-ci[bot] authored Mar 5, 2024
1 parent e3cd019 commit e7c4309
Show file tree
Hide file tree
Showing 3 changed files with 43 additions and 0 deletions.
1 change: 1 addition & 0 deletions packages/jupyter-ai-magics/jupyter_ai_magics/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
OpenAIProvider,
QianfanProvider,
SmEndpointProvider,
TogetherAIProvider,
)


Expand Down
39 changes: 39 additions & 0 deletions packages/jupyter-ai-magics/jupyter_ai_magics/providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
HuggingFaceHub,
OpenAI,
SagemakerEndpoint,
Together,
)

# this is necessary because `langchain.pydantic_v1.main` does not include
Expand Down Expand Up @@ -853,6 +854,44 @@ def allows_concurrency(self):
return not "anthropic" in self.model_id


class TogetherAIProvider(BaseProvider, Together):
id = "togetherai"
name = "Together AI"
model_id_key = "model"
models = [
"Austism/chronos-hermes-13b",
"DiscoResearch/DiscoLM-mixtral-8x7b-v2",
"EleutherAI/llemma_7b",
"Gryphe/MythoMax-L2-13b",
"Meta-Llama/Llama-Guard-7b",
"Nexusflow/NexusRaven-V2-13B",
"NousResearch/Nous-Capybara-7B-V1p9",
"NousResearch/Nous-Hermes-2-Yi-34B",
"NousResearch/Nous-Hermes-Llama2-13b",
"NousResearch/Nous-Hermes-Llama2-70b",
]
pypi_package_deps = ["together"]
auth_strategy = EnvAuthStrategy(name="TOGETHER_API_KEY")

def __init__(self, **kwargs):
model = kwargs.get("model_id")

if model not in self.models:
kwargs["responses"] = [
"Model not supported! Please check model list with %ai list"
]

super().__init__(**kwargs)

def get_prompt_template(self, format) -> PromptTemplate:
if format == "code":
return PromptTemplate.from_template(
"{prompt}\n\nProduce output as source code only, "
"with no text or explanation before or after it."
)
return super().get_prompt_template(format)


# Baidu QianfanChat provider. temporarily living as a separate class until
class QianfanProvider(BaseProvider, QianfanChatEndpoint):
id = "qianfan"
Expand Down
3 changes: 3 additions & 0 deletions packages/jupyter-ai-magics/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,8 @@ all = [
"openai~=1.6.1",
"boto3",
"qianfan",
"together",

]

[project.entry-points."jupyter_ai.model_providers"]
Expand All @@ -63,6 +65,7 @@ anthropic-chat = "jupyter_ai_magics:ChatAnthropicProvider"
amazon-bedrock-chat = "jupyter_ai_magics:BedrockChatProvider"
qianfan = "jupyter_ai_magics:QianfanProvider"
nvidia-chat = "jupyter_ai_magics.partner_providers.nvidia:ChatNVIDIAProvider"
together-ai = "jupyter_ai_magics:TogetherAIProvider"

[project.entry-points."jupyter_ai.embeddings_model_providers"]
bedrock = "jupyter_ai_magics:BedrockEmbeddingsProvider"
Expand Down

0 comments on commit e7c4309

Please sign in to comment.