diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/ollama.py b/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/ollama.py index 396341705..5babc5adb 100644 --- a/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/ollama.py +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/ollama.py @@ -30,4 +30,3 @@ class OllamaEmbeddingsProvider(BaseEmbeddingsProvider, OllamaEmbeddings): "snowflake-arctic-embed", ] model_id_key = "model" - diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py b/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py index ca8d60e73..8a28c5251 100644 --- a/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py @@ -29,12 +29,7 @@ from langchain.schema.output_parser import StrOutputParser from langchain.schema.runnable import Runnable from langchain_community.chat_models import QianfanChatEndpoint -from langchain_community.llms import ( - AI21, - GPT4All, - HuggingFaceEndpoint, - Together, -) +from langchain_community.llms import AI21, GPT4All, HuggingFaceEndpoint, Together from langchain_core.language_models.chat_models import BaseChatModel from langchain_core.language_models.llms import BaseLLM @@ -706,7 +701,6 @@ async def _acall(self, *args, **kwargs) -> Coroutine[Any, Any, str]: return await self._call_in_executor(*args, **kwargs) - class TogetherAIProvider(BaseProvider, Together): id = "togetherai" name = "Together AI"