From c98ff5ee1c4a20441b745194ed74cc63ef853401 Mon Sep 17 00:00:00 2001 From: "David L. Qiu" Date: Wed, 24 Jul 2024 15:17:56 -0700 Subject: [PATCH 1/9] migrate to langchain-aws --- .../jupyter_ai_magics/__init__.py | 4 - .../jupyter_ai_magics/embedding_providers.py | 12 -- .../partner_providers/aws.py | 167 ++++++++++++++++++ .../jupyter_ai_magics/providers.py | 156 +--------------- packages/jupyter-ai-magics/pyproject.toml | 9 +- 5 files changed, 174 insertions(+), 174 deletions(-) create mode 100644 packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/__init__.py b/packages/jupyter-ai-magics/jupyter_ai_magics/__init__.py index f596c08cd..f43dad09d 100644 --- a/packages/jupyter-ai-magics/jupyter_ai_magics/__init__.py +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/__init__.py @@ -3,7 +3,6 @@ # expose embedding model providers on the package root from .embedding_providers import ( BaseEmbeddingsProvider, - BedrockEmbeddingsProvider, GPT4AllEmbeddingsProvider, HfHubEmbeddingsProvider, OllamaEmbeddingsProvider, @@ -20,13 +19,10 @@ from .providers import ( AI21Provider, BaseProvider, - BedrockChatProvider, - BedrockProvider, GPT4AllProvider, HfHubProvider, OllamaProvider, QianfanProvider, - SmEndpointProvider, TogetherAIProvider, ) diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/embedding_providers.py b/packages/jupyter-ai-magics/jupyter_ai_magics/embedding_providers.py index 6a175193b..f1abc7ed1 100644 --- a/packages/jupyter-ai-magics/jupyter_ai_magics/embedding_providers.py +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/embedding_providers.py @@ -2,14 +2,12 @@ from jupyter_ai_magics.providers import ( AuthStrategy, - AwsAuthStrategy, EnvAuthStrategy, Field, MultiEnvAuthStrategy, ) from langchain.pydantic_v1 import BaseModel, Extra from langchain_community.embeddings import ( - BedrockEmbeddings, GPT4AllEmbeddings, HuggingFaceHubEmbeddings, OllamaEmbeddings, @@ -93,16 +91,6 @@ class HfHubEmbeddingsProvider(BaseEmbeddingsProvider, HuggingFaceHubEmbeddings): registry = True -# See model ID list here: https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids.html -class BedrockEmbeddingsProvider(BaseEmbeddingsProvider, BedrockEmbeddings): - id = "bedrock" - name = "Bedrock" - models = ["amazon.titan-embed-text-v1", "amazon.titan-embed-text-v2:0"] - model_id_key = "model_id" - pypi_package_deps = ["boto3"] - auth_strategy = AwsAuthStrategy() - - class GPT4AllEmbeddingsProvider(BaseEmbeddingsProvider, GPT4AllEmbeddings): def __init__(self, **kwargs): from gpt4all import GPT4All diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py b/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py new file mode 100644 index 000000000..e1faa5caf --- /dev/null +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py @@ -0,0 +1,167 @@ +from typing import Any, Coroutine, Dict +import copy +import json +from jsonpath_ng import parse + +from langchain_aws import Bedrock, ChatBedrock, SagemakerEndpoint, BedrockEmbeddings +from langchain_aws.llms.sagemaker_endpoint import LLMContentHandler +from langchain_core.outputs import LLMResult + +from ..providers import BaseProvider, AwsAuthStrategy, TextField, MultilineTextField +from ..embedding_providers import BaseEmbeddingsProvider + +# See model ID list here: https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids.html +class BedrockProvider(BaseProvider, Bedrock): + id = "bedrock" + name = "Amazon Bedrock" + models = [ + "amazon.titan-text-express-v1", + "amazon.titan-text-lite-v1", + "ai21.j2-ultra-v1", + "ai21.j2-mid-v1", + "cohere.command-light-text-v14", + "cohere.command-text-v14", + "cohere.command-r-v1:0", + "cohere.command-r-plus-v1:0", + "meta.llama2-13b-chat-v1", + "meta.llama2-70b-chat-v1", + "meta.llama3-8b-instruct-v1:0", + "meta.llama3-70b-instruct-v1:0", + "meta.llama3-1-8b-instruct-v1:0", + "meta.llama3-1-70b-instruct-v1:0", + "mistral.mistral-7b-instruct-v0:2", + "mistral.mixtral-8x7b-instruct-v0:1", + "mistral.mistral-large-2402-v1:0", + ] + model_id_key = "model_id" + pypi_package_deps = ["langchain-aws"] + auth_strategy = AwsAuthStrategy() + fields = [ + TextField( + key="credentials_profile_name", + label="AWS profile (optional)", + format="text", + ), + TextField(key="region_name", label="Region name (optional)", format="text"), + ] + + async def _acall(self, *args, **kwargs) -> Coroutine[Any, Any, str]: + return await self._call_in_executor(*args, **kwargs) + + +# See model ID list here: https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids.html +class BedrockChatProvider(BaseProvider, ChatBedrock): + id = "bedrock-chat" + name = "Amazon Bedrock Chat" + models = [ + "anthropic.claude-v2", + "anthropic.claude-v2:1", + "anthropic.claude-instant-v1", + "anthropic.claude-3-sonnet-20240229-v1:0", + "anthropic.claude-3-haiku-20240307-v1:0", + "anthropic.claude-3-opus-20240229-v1:0", + "anthropic.claude-3-5-sonnet-20240620-v1:0", + ] + model_id_key = "model_id" + pypi_package_deps = ["langchain-aws"] + auth_strategy = AwsAuthStrategy() + fields = [ + TextField( + key="credentials_profile_name", + label="AWS profile (optional)", + format="text", + ), + TextField(key="region_name", label="Region name (optional)", format="text"), + ] + + async def _acall(self, *args, **kwargs) -> Coroutine[Any, Any, str]: + return await self._call_in_executor(*args, **kwargs) + + async def _agenerate(self, *args, **kwargs) -> Coroutine[Any, Any, LLMResult]: + return await self._generate_in_executor(*args, **kwargs) + + @property + def allows_concurrency(self): + return not "anthropic" in self.model_id + + +# See model ID list here: https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids.html +class BedrockEmbeddingsProvider(BaseEmbeddingsProvider, BedrockEmbeddings): + id = "bedrock" + name = "Bedrock" + models = ["amazon.titan-embed-text-v1", "amazon.titan-embed-text-v2:0"] + model_id_key = "model_id" + pypi_package_deps = ["langchain-aws"] + auth_strategy = AwsAuthStrategy() + + +class JsonContentHandler(LLMContentHandler): + content_type = "application/json" + accepts = "application/json" + + def __init__(self, request_schema, response_path): + self.request_schema = json.loads(request_schema) + self.response_path = response_path + self.response_parser = parse(response_path) + + def replace_values(self, old_val, new_val, d: Dict[str, Any]): + """Replaces values of a dictionary recursively.""" + for key, val in d.items(): + if val == old_val: + d[key] = new_val + if isinstance(val, dict): + self.replace_values(old_val, new_val, val) + + return d + + def transform_input(self, prompt: str, model_kwargs: Dict) -> bytes: + request_obj = copy.deepcopy(self.request_schema) + self.replace_values("", prompt, request_obj) + request = json.dumps(request_obj).encode("utf-8") + return request + + def transform_output(self, output: bytes) -> str: + response_json = json.loads(output.read().decode("utf-8")) + matches = self.response_parser.find(response_json) + return matches[0].value + + +class SmEndpointProvider(BaseProvider, SagemakerEndpoint): + id = "sagemaker-endpoint" + name = "SageMaker endpoint" + models = ["*"] + model_id_key = "endpoint_name" + model_id_label = "Endpoint name" + # This all needs to be on one line of markdown, for use in a table + help = ( + "Specify an endpoint name as the model ID. " + "In addition, you must specify a region name, request schema, and response path. " + "For more information, see the documentation about [SageMaker endpoints deployment](https://docs.aws.amazon.com/sagemaker/latest/dg/realtime-endpoints-deployment.html) " + "and about [using magic commands with SageMaker endpoints](https://jupyter-ai.readthedocs.io/en/latest/users/index.html#using-magic-commands-with-sagemaker-endpoints)." + ) + + pypi_package_deps = ["langchain-aws"] + auth_strategy = AwsAuthStrategy() + registry = True + fields = [ + TextField(key="region_name", label="Region name (required)", format="text"), + MultilineTextField( + key="request_schema", label="Request schema (required)", format="json" + ), + TextField( + key="response_path", label="Response path (required)", format="jsonpath" + ), + ] + + def __init__(self, *args, **kwargs): + request_schema = kwargs.pop("request_schema") + response_path = kwargs.pop("response_path") + content_handler = JsonContentHandler( + request_schema=request_schema, response_path=response_path + ) + + super().__init__(*args, **kwargs, content_handler=content_handler) + + async def _acall(self, *args, **kwargs) -> Coroutine[Any, Any, str]: + return await self._call_in_executor(*args, **kwargs) + diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py b/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py index 02ea8d013..551db8bbc 100644 --- a/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py @@ -1,6 +1,5 @@ import asyncio import base64 -import copy import functools import io import json @@ -18,7 +17,6 @@ Union, ) -from jsonpath_ng import parse from langchain.prompts import ( ChatPromptTemplate, HumanMessagePromptTemplate, @@ -26,22 +24,18 @@ PromptTemplate, SystemMessagePromptTemplate, ) -from langchain.pydantic_v1 import BaseModel, Extra, root_validator +from langchain.pydantic_v1 import BaseModel, Extra from langchain.schema import LLMResult from langchain.schema.output_parser import StrOutputParser from langchain.schema.runnable import Runnable -from langchain.utils import get_from_dict_or_env -from langchain_community.chat_models import BedrockChat, QianfanChatEndpoint +from langchain_community.chat_models import QianfanChatEndpoint from langchain_community.llms import ( AI21, - Bedrock, GPT4All, HuggingFaceEndpoint, Ollama, - SagemakerEndpoint, Together, ) -from langchain_community.llms.sagemaker_endpoint import LLMContentHandler from langchain_core.language_models.chat_models import BaseChatModel from langchain_core.language_models.llms import BaseLLM @@ -710,152 +704,6 @@ class OllamaProvider(BaseProvider, Ollama): ] -class JsonContentHandler(LLMContentHandler): - content_type = "application/json" - accepts = "application/json" - - def __init__(self, request_schema, response_path): - self.request_schema = json.loads(request_schema) - self.response_path = response_path - self.response_parser = parse(response_path) - - def replace_values(self, old_val, new_val, d: Dict[str, Any]): - """Replaces values of a dictionary recursively.""" - for key, val in d.items(): - if val == old_val: - d[key] = new_val - if isinstance(val, dict): - self.replace_values(old_val, new_val, val) - - return d - - def transform_input(self, prompt: str, model_kwargs: Dict) -> bytes: - request_obj = copy.deepcopy(self.request_schema) - self.replace_values("", prompt, request_obj) - request = json.dumps(request_obj).encode("utf-8") - return request - - def transform_output(self, output: bytes) -> str: - response_json = json.loads(output.read().decode("utf-8")) - matches = self.response_parser.find(response_json) - return matches[0].value - - -class SmEndpointProvider(BaseProvider, SagemakerEndpoint): - id = "sagemaker-endpoint" - name = "SageMaker endpoint" - models = ["*"] - model_id_key = "endpoint_name" - model_id_label = "Endpoint name" - # This all needs to be on one line of markdown, for use in a table - help = ( - "Specify an endpoint name as the model ID. " - "In addition, you must specify a region name, request schema, and response path. " - "For more information, see the documentation about [SageMaker endpoints deployment](https://docs.aws.amazon.com/sagemaker/latest/dg/realtime-endpoints-deployment.html) " - "and about [using magic commands with SageMaker endpoints](https://jupyter-ai.readthedocs.io/en/latest/users/index.html#using-magic-commands-with-sagemaker-endpoints)." - ) - - pypi_package_deps = ["boto3"] - auth_strategy = AwsAuthStrategy() - registry = True - fields = [ - TextField(key="region_name", label="Region name (required)", format="text"), - MultilineTextField( - key="request_schema", label="Request schema (required)", format="json" - ), - TextField( - key="response_path", label="Response path (required)", format="jsonpath" - ), - ] - - def __init__(self, *args, **kwargs): - request_schema = kwargs.pop("request_schema") - response_path = kwargs.pop("response_path") - content_handler = JsonContentHandler( - request_schema=request_schema, response_path=response_path - ) - - super().__init__(*args, **kwargs, content_handler=content_handler) - - async def _acall(self, *args, **kwargs) -> Coroutine[Any, Any, str]: - return await self._call_in_executor(*args, **kwargs) - - -# See model ID list here: https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids.html -class BedrockProvider(BaseProvider, Bedrock): - id = "bedrock" - name = "Amazon Bedrock" - models = [ - "amazon.titan-text-express-v1", - "amazon.titan-text-lite-v1", - "ai21.j2-ultra-v1", - "ai21.j2-mid-v1", - "cohere.command-light-text-v14", - "cohere.command-text-v14", - "cohere.command-r-v1:0", - "cohere.command-r-plus-v1:0", - "meta.llama2-13b-chat-v1", - "meta.llama2-70b-chat-v1", - "meta.llama3-8b-instruct-v1:0", - "meta.llama3-70b-instruct-v1:0", - "meta.llama3-1-8b-instruct-v1:0", - "meta.llama3-1-70b-instruct-v1:0", - "mistral.mistral-7b-instruct-v0:2", - "mistral.mixtral-8x7b-instruct-v0:1", - "mistral.mistral-large-2402-v1:0", - ] - model_id_key = "model_id" - pypi_package_deps = ["boto3"] - auth_strategy = AwsAuthStrategy() - fields = [ - TextField( - key="credentials_profile_name", - label="AWS profile (optional)", - format="text", - ), - TextField(key="region_name", label="Region name (optional)", format="text"), - ] - - async def _acall(self, *args, **kwargs) -> Coroutine[Any, Any, str]: - return await self._call_in_executor(*args, **kwargs) - - -# See model ID list here: https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids.html -class BedrockChatProvider(BaseProvider, BedrockChat): - id = "bedrock-chat" - name = "Amazon Bedrock Chat" - models = [ - "anthropic.claude-v2", - "anthropic.claude-v2:1", - "anthropic.claude-instant-v1", - "anthropic.claude-3-sonnet-20240229-v1:0", - "anthropic.claude-3-haiku-20240307-v1:0", - "anthropic.claude-3-opus-20240229-v1:0", - "anthropic.claude-3-5-sonnet-20240620-v1:0", - ] - model_id_key = "model_id" - pypi_package_deps = ["boto3"] - auth_strategy = AwsAuthStrategy() - fields = [ - TextField( - key="credentials_profile_name", - label="AWS profile (optional)", - format="text", - ), - TextField(key="region_name", label="Region name (optional)", format="text"), - ] - - async def _acall(self, *args, **kwargs) -> Coroutine[Any, Any, str]: - return await self._call_in_executor(*args, **kwargs) - - async def _agenerate(self, *args, **kwargs) -> Coroutine[Any, Any, LLMResult]: - return await self._generate_in_executor(*args, **kwargs) - - @property - def allows_concurrency(self): - return not "anthropic" in self.model_id - - class TogetherAIProvider(BaseProvider, Together): id = "togetherai" name = "Together AI" diff --git a/packages/jupyter-ai-magics/pyproject.toml b/packages/jupyter-ai-magics/pyproject.toml index 42acc4ad7..c73df8aea 100644 --- a/packages/jupyter-ai-magics/pyproject.toml +++ b/packages/jupyter-ai-magics/pyproject.toml @@ -42,6 +42,7 @@ all = [ "huggingface_hub", "ipywidgets", "langchain_anthropic", + "langchain_aws", "langchain_cohere", "langchain_google_genai", "langchain_mistralai", @@ -64,9 +65,9 @@ ollama = "jupyter_ai_magics:OllamaProvider" openai = "jupyter_ai_magics.partner_providers.openai:OpenAIProvider" openai-chat = "jupyter_ai_magics.partner_providers.openai:ChatOpenAIProvider" azure-chat-openai = "jupyter_ai_magics.partner_providers.openai:AzureChatOpenAIProvider" -sagemaker-endpoint = "jupyter_ai_magics:SmEndpointProvider" -amazon-bedrock = "jupyter_ai_magics:BedrockProvider" -amazon-bedrock-chat = "jupyter_ai_magics:BedrockChatProvider" +sagemaker-endpoint = "jupyter_ai_magics.partner_providers.aws:SmEndpointProvider" +amazon-bedrock = "jupyter_ai_magics.partner_providers.aws:BedrockProvider" +amazon-bedrock-chat = "jupyter_ai_magics.partner_providers.aws:BedrockChatProvider" qianfan = "jupyter_ai_magics:QianfanProvider" nvidia-chat = "jupyter_ai_magics.partner_providers.nvidia:ChatNVIDIAProvider" together-ai = "jupyter_ai_magics:TogetherAIProvider" @@ -74,7 +75,7 @@ gemini = "jupyter_ai_magics.partner_providers.gemini:GeminiProvider" mistralai = "jupyter_ai_magics.partner_providers.mistralai:MistralAIProvider" [project.entry-points."jupyter_ai.embeddings_model_providers"] -bedrock = "jupyter_ai_magics:BedrockEmbeddingsProvider" +bedrock = "jupyter_ai_magics.partner_providers.aws:BedrockEmbeddingsProvider" cohere = "jupyter_ai_magics.partner_providers.cohere:CohereEmbeddingsProvider" mistralai = "jupyter_ai_magics.partner_providers.mistralai:MistralAIEmbeddingsProvider" gpt4all = "jupyter_ai_magics:GPT4AllEmbeddingsProvider" From a4775385087f6b00f8f9e5bcddfba293e21abc11 Mon Sep 17 00:00:00 2001 From: "David L. Qiu" Date: Wed, 24 Jul 2024 15:27:37 -0700 Subject: [PATCH 2/9] pre-commit --- .../jupyter_ai_magics/partner_providers/aws.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py b/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py index e1faa5caf..75a36e040 100644 --- a/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py @@ -1,14 +1,15 @@ -from typing import Any, Coroutine, Dict import copy import json -from jsonpath_ng import parse +from typing import Any, Coroutine, Dict -from langchain_aws import Bedrock, ChatBedrock, SagemakerEndpoint, BedrockEmbeddings +from jsonpath_ng import parse +from langchain_aws import Bedrock, BedrockEmbeddings, ChatBedrock, SagemakerEndpoint from langchain_aws.llms.sagemaker_endpoint import LLMContentHandler from langchain_core.outputs import LLMResult -from ..providers import BaseProvider, AwsAuthStrategy, TextField, MultilineTextField from ..embedding_providers import BaseEmbeddingsProvider +from ..providers import AwsAuthStrategy, BaseProvider, MultilineTextField, TextField + # See model ID list here: https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids.html class BedrockProvider(BaseProvider, Bedrock): @@ -164,4 +165,3 @@ def __init__(self, *args, **kwargs): async def _acall(self, *args, **kwargs) -> Coroutine[Any, Any, str]: return await self._call_in_executor(*args, **kwargs) - From 513e476814057f1ca6b8206a088ad976731017be Mon Sep 17 00:00:00 2001 From: "David L. Qiu" Date: Wed, 24 Jul 2024 15:28:33 -0700 Subject: [PATCH 3/9] update aws provider dependencies in docs --- docs/source/users/index.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/source/users/index.md b/docs/source/users/index.md index 5376c29b0..6cd1118d6 100644 --- a/docs/source/users/index.md +++ b/docs/source/users/index.md @@ -158,8 +158,8 @@ Jupyter AI supports the following model providers: | AI21 | `ai21` | `AI21_API_KEY` | `ai21` | | Anthropic | `anthropic` | `ANTHROPIC_API_KEY` | `langchain-anthropic` | | Anthropic (chat) | `anthropic-chat` | `ANTHROPIC_API_KEY` | `langchain-anthropic` | -| Bedrock | `bedrock` | N/A | `boto3` | -| Bedrock (chat) | `bedrock-chat` | N/A | `boto3` | +| Bedrock | `bedrock` | N/A | `langchain-aws` | +| Bedrock (chat) | `bedrock-chat` | N/A | `langchain-aws` | | Cohere | `cohere` | `COHERE_API_KEY` | `langchain_cohere` | | ERNIE-Bot | `qianfan` | `QIANFAN_AK`, `QIANFAN_SK` | `qianfan` | | Gemini | `gemini` | `GOOGLE_API_KEY` | `langchain-google-genai` | @@ -169,7 +169,7 @@ Jupyter AI supports the following model providers: | NVIDIA | `nvidia-chat` | `NVIDIA_API_KEY` | `langchain_nvidia_ai_endpoints` | | OpenAI | `openai` | `OPENAI_API_KEY` | `langchain-openai` | | OpenAI (chat) | `openai-chat` | `OPENAI_API_KEY` | `langchain-openai` | -| SageMaker | `sagemaker-endpoint` | N/A | `boto3` | +| SageMaker | `sagemaker-endpoint` | N/A | `langchain-aws` | The environment variable names shown above are also the names of the settings keys used when setting up the chat interface. If multiple variables are listed for a provider, **all** must be specified. From 3577061cfd29f3ef2b6789a156eca9be07e8b5b2 Mon Sep 17 00:00:00 2001 From: david qiu Date: Thu, 25 Jul 2024 09:04:31 -0700 Subject: [PATCH 4/9] correct SM endpoints docs URL Co-authored-by: Jason Weill <93281816+JasonWeill@users.noreply.github.com> --- .../jupyter_ai_magics/partner_providers/aws.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py b/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py index 75a36e040..dfd3cc41f 100644 --- a/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py @@ -137,7 +137,7 @@ class SmEndpointProvider(BaseProvider, SagemakerEndpoint): help = ( "Specify an endpoint name as the model ID. " "In addition, you must specify a region name, request schema, and response path. " - "For more information, see the documentation about [SageMaker endpoints deployment](https://docs.aws.amazon.com/sagemaker/latest/dg/realtime-endpoints-deployment.html) " + "For more information, see the documentation about [SageMaker endpoints deployment](https://docs.aws.amazon.com/sagemaker/latest/dg/realtime-endpoints-deploy-models.html) " "and about [using magic commands with SageMaker endpoints](https://jupyter-ai.readthedocs.io/en/latest/users/index.html#using-magic-commands-with-sagemaker-endpoints)." ) From 7988554dbc52b0b87f3036917382b3920a491849 Mon Sep 17 00:00:00 2001 From: david qiu Date: Thu, 25 Jul 2024 11:14:42 -0700 Subject: [PATCH 5/9] add new Cohere model IDs to BedrockEmbeddings Co-authored-by: Piyush Jain --- .../jupyter_ai_magics/partner_providers/aws.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py b/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py index dfd3cc41f..b56043aad 100644 --- a/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py @@ -90,7 +90,12 @@ def allows_concurrency(self): class BedrockEmbeddingsProvider(BaseEmbeddingsProvider, BedrockEmbeddings): id = "bedrock" name = "Bedrock" - models = ["amazon.titan-embed-text-v1", "amazon.titan-embed-text-v2:0"] + models = [ + "amazon.titan-embed-text-v1", + "amazon.titan-embed-text-v2:0", + "cohere.embed-english-v3", + "cohere.embed-multilingual-v3" + ] model_id_key = "model_id" pypi_package_deps = ["langchain-aws"] auth_strategy = AwsAuthStrategy() From 78836bfc73eabbcbf3ff63b2daf0807ba5115f8e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 25 Jul 2024 18:14:50 +0000 Subject: [PATCH 6/9] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../jupyter_ai_magics/partner_providers/aws.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py b/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py index b56043aad..14d90f0b5 100644 --- a/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py @@ -91,10 +91,10 @@ class BedrockEmbeddingsProvider(BaseEmbeddingsProvider, BedrockEmbeddings): id = "bedrock" name = "Bedrock" models = [ - "amazon.titan-embed-text-v1", + "amazon.titan-embed-text-v1", "amazon.titan-embed-text-v2:0", "cohere.embed-english-v3", - "cohere.embed-multilingual-v3" + "cohere.embed-multilingual-v3", ] model_id_key = "model_id" pypi_package_deps = ["langchain-aws"] From c3ff0eaa7e6e00611f3c2a271df2fe3dc00874d1 Mon Sep 17 00:00:00 2001 From: "David L. Qiu" Date: Thu, 25 Jul 2024 13:18:07 -0700 Subject: [PATCH 7/9] use BedrockLLM instead of Bedrock class --- .../jupyter_ai_magics/partner_providers/aws.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py b/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py index 14d90f0b5..6311d50ff 100644 --- a/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py @@ -3,7 +3,7 @@ from typing import Any, Coroutine, Dict from jsonpath_ng import parse -from langchain_aws import Bedrock, BedrockEmbeddings, ChatBedrock, SagemakerEndpoint +from langchain_aws import BedrockLLM, BedrockEmbeddings, ChatBedrock, SagemakerEndpoint from langchain_aws.llms.sagemaker_endpoint import LLMContentHandler from langchain_core.outputs import LLMResult @@ -12,7 +12,7 @@ # See model ID list here: https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids.html -class BedrockProvider(BaseProvider, Bedrock): +class BedrockProvider(BaseProvider, BedrockLLM): id = "bedrock" name = "Amazon Bedrock" models = [ From 3fd4f416dd0217b86b1e9551cb57cef204fbedff Mon Sep 17 00:00:00 2001 From: "David L. Qiu" Date: Thu, 25 Jul 2024 13:29:10 -0700 Subject: [PATCH 8/9] add Amazon, Meta, Mistral models to BedrockChatProvider --- .../jupyter_ai_magics/partner_providers/aws.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py b/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py index 6311d50ff..a05368f4b 100644 --- a/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py @@ -55,6 +55,8 @@ class BedrockChatProvider(BaseProvider, ChatBedrock): id = "bedrock-chat" name = "Amazon Bedrock Chat" models = [ + "amazon.titan-text-express-v1", + "amazon.titan-text-lite-v1", "anthropic.claude-v2", "anthropic.claude-v2:1", "anthropic.claude-instant-v1", @@ -62,6 +64,15 @@ class BedrockChatProvider(BaseProvider, ChatBedrock): "anthropic.claude-3-haiku-20240307-v1:0", "anthropic.claude-3-opus-20240229-v1:0", "anthropic.claude-3-5-sonnet-20240620-v1:0", + "meta.llama2-13b-chat-v1", + "meta.llama2-70b-chat-v1", + "meta.llama3-8b-instruct-v1:0", + "meta.llama3-70b-instruct-v1:0", + "meta.llama3-1-8b-instruct-v1:0", + "meta.llama3-1-70b-instruct-v1:0", + "mistral.mistral-7b-instruct-v0:2", + "mistral.mixtral-8x7b-instruct-v0:1", + "mistral.mistral-large-2402-v1:0", ] model_id_key = "model_id" pypi_package_deps = ["langchain-aws"] From feac5598dcbcaa9b255b115d30bdd22eb0f0194a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 25 Jul 2024 20:29:22 +0000 Subject: [PATCH 9/9] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../jupyter_ai_magics/partner_providers/aws.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py b/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py index a05368f4b..dd95172c1 100644 --- a/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/partner_providers/aws.py @@ -3,7 +3,7 @@ from typing import Any, Coroutine, Dict from jsonpath_ng import parse -from langchain_aws import BedrockLLM, BedrockEmbeddings, ChatBedrock, SagemakerEndpoint +from langchain_aws import BedrockEmbeddings, BedrockLLM, ChatBedrock, SagemakerEndpoint from langchain_aws.llms.sagemaker_endpoint import LLMContentHandler from langchain_core.outputs import LLMResult