From 98c74e7508e918e7860f2cb0b95eaa4feaa3cb32 Mon Sep 17 00:00:00 2001 From: Alexandra Stephens Date: Mon, 8 Jan 2024 14:58:20 -0800 Subject: [PATCH 01/15] Add NVIDIA chat and embeddings providers --- .../jupyter_ai_magics/__init__.py | 2 ++ .../jupyter_ai_magics/embedding_providers.py | 10 +++++++++ .../jupyter_ai_magics/providers.py | 21 ++++++++++++++++++- packages/jupyter-ai-magics/pyproject.toml | 3 +++ 4 files changed, 35 insertions(+), 1 deletion(-) diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/__init__.py b/packages/jupyter-ai-magics/jupyter_ai_magics/__init__.py index a00d4877c..6898332e7 100644 --- a/packages/jupyter-ai-magics/jupyter_ai_magics/__init__.py +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/__init__.py @@ -7,6 +7,7 @@ CohereEmbeddingsProvider, GPT4AllEmbeddingsProvider, HfHubEmbeddingsProvider, + NVIDIAEmbeddingsProvider, OpenAIEmbeddingsProvider, QianfanEmbeddingsEndpointProvider, ) @@ -22,6 +23,7 @@ BedrockChatProvider, BedrockProvider, ChatAnthropicProvider, + ChatNVIDIAProvider, ChatOpenAIProvider, CohereProvider, GPT4AllProvider, diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/embedding_providers.py b/packages/jupyter-ai-magics/jupyter_ai_magics/embedding_providers.py index 75c8fa0a3..df74cb058 100644 --- a/packages/jupyter-ai-magics/jupyter_ai_magics/embedding_providers.py +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/embedding_providers.py @@ -16,6 +16,7 @@ OpenAIEmbeddings, QianfanEmbeddingsEndpoint, ) +from langchain_nvidia_ai_endpoints import NVIDIAEmbeddings from langchain.pydantic_v1 import BaseModel, Extra @@ -140,3 +141,12 @@ class QianfanEmbeddingsEndpointProvider( model_id_key = "model" pypi_package_deps = ["qianfan"] auth_strategy = MultiEnvAuthStrategy(names=["QIANFAN_AK", "QIANFAN_SK"]) + + +class NVIDIAEmbeddingsProvider(BaseEmbeddingsProvider, NVIDIAEmbeddings +): + id = "nvidia" + name = "NVIDIA" + models = ["playground_nvolveqa_40k"] + model_id_key = "model" + auth_strategy = EnvAuthStrategy(name="NVIDIA_API_KEY") diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py b/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py index d15afe80f..fbd7bab83 100644 --- a/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py @@ -44,6 +44,7 @@ from langchain.schema import LLMResult from langchain.utils import get_from_dict_or_env from langchain_community.chat_models import ChatOpenAI +from langchain_nvidia_ai_endpoints import ChatNVIDIA class EnvAuthStrategy(BaseModel): @@ -336,7 +337,6 @@ class ChatAnthropicProvider(BaseProvider, ChatAnthropic): def allows_concurrency(self): return False - class CohereProvider(BaseProvider, Cohere): id = "cohere" name = "Cohere" @@ -747,3 +747,22 @@ class QianfanProvider(BaseProvider, QianfanChatEndpoint): model_id_key = "model_name" pypi_package_deps = ["qianfan"] auth_strategy = MultiEnvAuthStrategy(names=["QIANFAN_AK", "QIANFAN_SK"]) + + +class ChatNVIDIAProvider(BaseProvider, ChatNVIDIA): + id = "nvidia-chat" + name = "ChatNVIDIA" + models = ['playground_llama2_70b', + 'playground_nemotron_steerlm_8b', + 'playground_mistral_7b', + 'playground_nv_llama2_rlhf_70b', + 'playground_llama2_13b', + 'playground_steerlm_llama_70b', + 'playground_llama2_code_13b', + 'playground_yi_34b', + 'playground_mixtral_8x7b', + 'playground_neva_22b', + 'playground_llama2_code_34b' + ] + model_id_key = "model" + auth_strategy = EnvAuthStrategy(name="NVIDIA_API_KEY") diff --git a/packages/jupyter-ai-magics/pyproject.toml b/packages/jupyter-ai-magics/pyproject.toml index bd180562d..8ad9db92a 100644 --- a/packages/jupyter-ai-magics/pyproject.toml +++ b/packages/jupyter-ai-magics/pyproject.toml @@ -28,6 +28,7 @@ dependencies = [ "typing_extensions>=4.5.0", "click~=8.0", "jsonpath-ng>=1.5.3,<2", + "langchain_nvidia_ai_endpoints", ] [project.optional-dependencies] @@ -69,6 +70,7 @@ amazon-bedrock = "jupyter_ai_magics:BedrockProvider" anthropic-chat = "jupyter_ai_magics:ChatAnthropicProvider" amazon-bedrock-chat = "jupyter_ai_magics:BedrockChatProvider" qianfan = "jupyter_ai_magics:QianfanProvider" +nvidia-chat = "jupyter_ai_magics:ChatNVIDIAProvider" [project.entry-points."jupyter_ai.embeddings_model_providers"] bedrock = "jupyter_ai_magics:BedrockEmbeddingsProvider" @@ -77,6 +79,7 @@ gpt4all = "jupyter_ai_magics:GPT4AllEmbeddingsProvider" huggingface_hub = "jupyter_ai_magics:HfHubEmbeddingsProvider" openai = "jupyter_ai_magics:OpenAIEmbeddingsProvider" qianfan = "jupyter_ai_magics:QianfanEmbeddingsEndpointProvider" +nvidia = "jupyter_ai_magics:NVIDIAEmbeddingsProvider: [tool.hatch.version] source = "nodejs" From 22d38e45e2108b5e10cfebb441019ba67a29e7c0 Mon Sep 17 00:00:00 2001 From: Alexandra Stephens Date: Mon, 8 Jan 2024 15:04:11 -0800 Subject: [PATCH 02/15] Fix typo --- packages/jupyter-ai-magics/pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/jupyter-ai-magics/pyproject.toml b/packages/jupyter-ai-magics/pyproject.toml index 8ad9db92a..8b65603ba 100644 --- a/packages/jupyter-ai-magics/pyproject.toml +++ b/packages/jupyter-ai-magics/pyproject.toml @@ -79,7 +79,7 @@ gpt4all = "jupyter_ai_magics:GPT4AllEmbeddingsProvider" huggingface_hub = "jupyter_ai_magics:HfHubEmbeddingsProvider" openai = "jupyter_ai_magics:OpenAIEmbeddingsProvider" qianfan = "jupyter_ai_magics:QianfanEmbeddingsEndpointProvider" -nvidia = "jupyter_ai_magics:NVIDIAEmbeddingsProvider: +nvidia = "jupyter_ai_magics:NVIDIAEmbeddingsProvider" [tool.hatch.version] source = "nodejs" From 933b4a0cbcbc6a2c1eb1b0daeaf0f619ca55a65e Mon Sep 17 00:00:00 2001 From: Alexandra Stephens Date: Tue, 9 Jan 2024 14:06:36 -0800 Subject: [PATCH 03/15] Some NVIDIA provider documentation --- docs/source/index.md | 2 +- docs/source/users/index.md | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/docs/source/index.md b/docs/source/index.md index 28d094e93..8d70538e3 100644 --- a/docs/source/index.md +++ b/docs/source/index.md @@ -8,7 +8,7 @@ in JupyterLab and the Jupyter Notebook. More specifically, Jupyter AI offers: This works anywhere the IPython kernel runs (JupyterLab, Jupyter Notebook, Google Colab, VSCode, etc.). * A native chat UI in JupyterLab that enables you to work with generative AI as a conversational assistant. * Support for a wide range of generative model providers and models - (AI21, Anthropic, Cohere, Hugging Face, OpenAI, SageMaker, etc.). + (AI21, Anthropic, Cohere, Hugging Face, OpenAI, SageMaker, NVIDIA, etc.). A screenshot of Jupyter AI showing the chat interface and the magic commands [AI Foundation Models](https://catalog.ngc.nvidia.com/ai-foundation-models) > (Model with API endpoint). Select the API option and click Generate Key > save as `NVIDIA_API_KEY`. + SageMaker endpoint names are created when you deploy a model. For more information, see ["Create your endpoint and deploy your model"](https://docs.aws.amazon.com/sagemaker/latest/dg/realtime-endpoints-deployment.html) in the SageMaker documentation. From 34a4557d67ee05dc82b32f5f586775d369bdce0d Mon Sep 17 00:00:00 2001 From: Alexandra Stephens Date: Fri, 12 Jan 2024 13:51:56 -0800 Subject: [PATCH 04/15] Update docs and naming for NVIDIA --- README.md | 2 +- docs/source/users/index.md | 4 ++-- packages/jupyter-ai-magics/jupyter_ai_magics/providers.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 5943bb23e..fa3b68cd4 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ in JupyterLab and the Jupyter Notebook. More specifically, Jupyter AI offers: This works anywhere the IPython kernel runs (JupyterLab, Jupyter Notebook, Google Colab, VSCode, etc.). * A native chat UI in JupyterLab that enables you to work with generative AI as a conversational assistant. * Support for a wide range of generative model providers, including AI21, Anthropic, AWS, Cohere, - Hugging Face, and OpenAI. + Hugging Face, NVIDIA, and OpenAI. * Local model support through GPT4All, enabling use of generative AI models on consumer grade machines with ease and privacy. diff --git a/docs/source/users/index.md b/docs/source/users/index.md index 519ef0618..d196d2618 100644 --- a/docs/source/users/index.md +++ b/docs/source/users/index.md @@ -133,8 +133,7 @@ Jupyter AI supports the following model providers: | ERNIE-Bot | `qianfan` | `QIANFAN_AK`, `QIANFAN_SK` | `qianfan` | | GPT4All | `gpt4all` | N/A | `gpt4all` | | Hugging Face Hub | `huggingface_hub` | `HUGGINGFACEHUB_API_TOKEN` | `huggingface_hub`, `ipywidgets`, `pillow` | -| NVIDIA | `nvidia` | `NVIDIA_API_KEY` | N/A | -| NVIDIA (chat) | `nvidia-chat` | `NVIDIA_API_KEY` | N/A | +| NVIDIA | `nvidia-chat` | `NVIDIA_API_KEY` | N/A | | OpenAI | `openai` | `OPENAI_API_KEY` | `openai` | | OpenAI (chat) | `openai-chat` | `OPENAI_API_KEY` | `openai` | | SageMaker | `sagemaker-endpoint` | N/A | `boto3` | @@ -519,6 +518,7 @@ We currently support the following language model providers: - `bedrock-chat` - `cohere` - `huggingface_hub` +- `nvidia-chat` - `openai` - `openai-chat` - `sagemaker-endpoint` diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py b/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py index fbd7bab83..b9b82f044 100644 --- a/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py @@ -751,7 +751,7 @@ class QianfanProvider(BaseProvider, QianfanChatEndpoint): class ChatNVIDIAProvider(BaseProvider, ChatNVIDIA): id = "nvidia-chat" - name = "ChatNVIDIA" + name = "NVIDIA" models = ['playground_llama2_70b', 'playground_nemotron_steerlm_8b', 'playground_mistral_7b', From aaec540f87d77eee946d36f867b4ac3eed1a504b Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 12 Jan 2024 22:13:27 +0000 Subject: [PATCH 05/15] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../jupyter_ai_magics/embedding_providers.py | 3 +-- .../jupyter_ai_magics/providers.py | 26 ++++++++++--------- 2 files changed, 15 insertions(+), 14 deletions(-) diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/embedding_providers.py b/packages/jupyter-ai-magics/jupyter_ai_magics/embedding_providers.py index 7a7744d6e..3f109f72f 100644 --- a/packages/jupyter-ai-magics/jupyter_ai_magics/embedding_providers.py +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/embedding_providers.py @@ -142,8 +142,7 @@ class QianfanEmbeddingsEndpointProvider( auth_strategy = MultiEnvAuthStrategy(names=["QIANFAN_AK", "QIANFAN_SK"]) -class NVIDIAEmbeddingsProvider(BaseEmbeddingsProvider, NVIDIAEmbeddings -): +class NVIDIAEmbeddingsProvider(BaseEmbeddingsProvider, NVIDIAEmbeddings): id = "nvidia" name = "NVIDIA" models = ["playground_nvolveqa_40k"] diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py b/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py index 60179a65e..062bb1863 100644 --- a/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py @@ -360,6 +360,7 @@ class ChatAnthropicProvider(BaseProvider, ChatAnthropic): def allows_concurrency(self): return False + class CohereProvider(BaseProvider, Cohere): id = "cohere" name = "Cohere" @@ -775,17 +776,18 @@ class QianfanProvider(BaseProvider, QianfanChatEndpoint): class ChatNVIDIAProvider(BaseProvider, ChatNVIDIA): id = "nvidia-chat" name = "NVIDIA" - models = ['playground_llama2_70b', - 'playground_nemotron_steerlm_8b', - 'playground_mistral_7b', - 'playground_nv_llama2_rlhf_70b', - 'playground_llama2_13b', - 'playground_steerlm_llama_70b', - 'playground_llama2_code_13b', - 'playground_yi_34b', - 'playground_mixtral_8x7b', - 'playground_neva_22b', - 'playground_llama2_code_34b' - ] + models = [ + "playground_llama2_70b", + "playground_nemotron_steerlm_8b", + "playground_mistral_7b", + "playground_nv_llama2_rlhf_70b", + "playground_llama2_13b", + "playground_steerlm_llama_70b", + "playground_llama2_code_13b", + "playground_yi_34b", + "playground_mixtral_8x7b", + "playground_neva_22b", + "playground_llama2_code_34b", + ] model_id_key = "model" auth_strategy = EnvAuthStrategy(name="NVIDIA_API_KEY") From 9200eeae87ce18d963f0e11f435838b6bb6d74c6 Mon Sep 17 00:00:00 2001 From: Alex Stephens <146462356+stevie-35@users.noreply.github.com> Date: Tue, 16 Jan 2024 11:35:23 -0500 Subject: [PATCH 06/15] Update docs/source/users/index.md Co-authored-by: Jason Weill <93281816+JasonWeill@users.noreply.github.com> --- docs/source/users/index.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/users/index.md b/docs/source/users/index.md index d196d2618..6a79e6fe1 100644 --- a/docs/source/users/index.md +++ b/docs/source/users/index.md @@ -151,7 +151,7 @@ You need the `pillow` Python package to use Hugging Face Hub's text-to-image mod You can find a list of Hugging Face's models at [https://huggingface.co/models](https://huggingface.co/models). -To use NVIDIA models, create a free account with the [NVIDIA NGC service](https://catalog.ngc.nvidia.com/), which hosts AI solution catalogs, containers, models, etc. Navigate to Catalog > [AI Foundation Models](https://catalog.ngc.nvidia.com/ai-foundation-models) > (Model with API endpoint). Select the API option and click Generate Key > save as `NVIDIA_API_KEY`. +To use NVIDIA models, create a free account with the [NVIDIA NGC service](https://catalog.ngc.nvidia.com/), which hosts AI solution catalogs, containers, models, and more. Navigate to Catalog > [AI Foundation Models](https://catalog.ngc.nvidia.com/ai-foundation-models), and select a model with an API endpoint. Click "API" on the model's detail page, and click "Generate Key". Save this key, and set it as the environment variable `NVIDIA_API_KEY`. SageMaker endpoint names are created when you deploy a model. For more information, see ["Create your endpoint and deploy your model"](https://docs.aws.amazon.com/sagemaker/latest/dg/realtime-endpoints-deployment.html) From e735a111ec256216416a7ef606d00a004f486528 Mon Sep 17 00:00:00 2001 From: Alex Stephens <146462356+stevie-35@users.noreply.github.com> Date: Tue, 16 Jan 2024 11:40:36 -0500 Subject: [PATCH 07/15] Update index.md - add note about api key access --- docs/source/users/index.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/users/index.md b/docs/source/users/index.md index 6a79e6fe1..6b3318a99 100644 --- a/docs/source/users/index.md +++ b/docs/source/users/index.md @@ -151,7 +151,7 @@ You need the `pillow` Python package to use Hugging Face Hub's text-to-image mod You can find a list of Hugging Face's models at [https://huggingface.co/models](https://huggingface.co/models). -To use NVIDIA models, create a free account with the [NVIDIA NGC service](https://catalog.ngc.nvidia.com/), which hosts AI solution catalogs, containers, models, and more. Navigate to Catalog > [AI Foundation Models](https://catalog.ngc.nvidia.com/ai-foundation-models), and select a model with an API endpoint. Click "API" on the model's detail page, and click "Generate Key". Save this key, and set it as the environment variable `NVIDIA_API_KEY`. +To use NVIDIA models, create a free account with the [NVIDIA NGC service](https://catalog.ngc.nvidia.com/), which hosts AI solution catalogs, containers, models, and more. Navigate to Catalog > [AI Foundation Models](https://catalog.ngc.nvidia.com/ai-foundation-models), and select a model with an API endpoint. Click "API" on the model's detail page, and click "Generate Key". Save this key, and set it as the environment variable `NVIDIA_API_KEY` to access any of the model endpoints. SageMaker endpoint names are created when you deploy a model. For more information, see ["Create your endpoint and deploy your model"](https://docs.aws.amazon.com/sagemaker/latest/dg/realtime-endpoints-deployment.html) From 5f0c2c7f4d9b31f01c1b0b5b84a1145b20d7b2fa Mon Sep 17 00:00:00 2001 From: Alex Stephens <146462356+stevie-35@users.noreply.github.com> Date: Thu, 25 Jan 2024 17:10:07 -0500 Subject: [PATCH 08/15] Remove nvidia embeddings model --- packages/jupyter-ai-magics/pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/jupyter-ai-magics/pyproject.toml b/packages/jupyter-ai-magics/pyproject.toml index 151543ba9..b84eab58a 100644 --- a/packages/jupyter-ai-magics/pyproject.toml +++ b/packages/jupyter-ai-magics/pyproject.toml @@ -71,7 +71,6 @@ gpt4all = "jupyter_ai_magics:GPT4AllEmbeddingsProvider" huggingface_hub = "jupyter_ai_magics:HfHubEmbeddingsProvider" openai = "jupyter_ai_magics:OpenAIEmbeddingsProvider" qianfan = "jupyter_ai_magics:QianfanEmbeddingsEndpointProvider" -nvidia = "jupyter_ai_magics:NVIDIAEmbeddingsProvider" [tool.hatch.version] source = "nodejs" From d5c8e38e4e5662cbaf2ab394c272e069e24db75d Mon Sep 17 00:00:00 2001 From: Alex Stephens <146462356+stevie-35@users.noreply.github.com> Date: Thu, 25 Jan 2024 17:10:54 -0500 Subject: [PATCH 09/15] Remove nvidia embedding provider --- .../jupyter_ai_magics/embedding_providers.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/embedding_providers.py b/packages/jupyter-ai-magics/jupyter_ai_magics/embedding_providers.py index 3f109f72f..ca9fed4b4 100644 --- a/packages/jupyter-ai-magics/jupyter_ai_magics/embedding_providers.py +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/embedding_providers.py @@ -16,7 +16,6 @@ OpenAIEmbeddings, QianfanEmbeddingsEndpoint, ) -from langchain_nvidia_ai_endpoints import NVIDIAEmbeddings class BaseEmbeddingsProvider(BaseModel): @@ -140,11 +139,3 @@ class QianfanEmbeddingsEndpointProvider( model_id_key = "model" pypi_package_deps = ["qianfan"] auth_strategy = MultiEnvAuthStrategy(names=["QIANFAN_AK", "QIANFAN_SK"]) - - -class NVIDIAEmbeddingsProvider(BaseEmbeddingsProvider, NVIDIAEmbeddings): - id = "nvidia" - name = "NVIDIA" - models = ["playground_nvolveqa_40k"] - model_id_key = "model" - auth_strategy = EnvAuthStrategy(name="NVIDIA_API_KEY") From fdc1dd3170a9e9f107554132468f0fd26450243d Mon Sep 17 00:00:00 2001 From: Alex Stephens <146462356+stevie-35@users.noreply.github.com> Date: Thu, 25 Jan 2024 17:11:44 -0500 Subject: [PATCH 10/15] Remove nvidia embeddings provider --- packages/jupyter-ai-magics/jupyter_ai_magics/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/__init__.py b/packages/jupyter-ai-magics/jupyter_ai_magics/__init__.py index 6898332e7..176e30ada 100644 --- a/packages/jupyter-ai-magics/jupyter_ai_magics/__init__.py +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/__init__.py @@ -7,7 +7,6 @@ CohereEmbeddingsProvider, GPT4AllEmbeddingsProvider, HfHubEmbeddingsProvider, - NVIDIAEmbeddingsProvider, OpenAIEmbeddingsProvider, QianfanEmbeddingsEndpointProvider, ) From 348ae2e0e45d05e92aab8bf77b64a2f400d2e556 Mon Sep 17 00:00:00 2001 From: Jason Weill Date: Wed, 31 Jan 2024 15:21:39 -0800 Subject: [PATCH 11/15] Mentions conda install instructions in docs --- docs/source/users/index.md | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/docs/source/users/index.md b/docs/source/users/index.md index 0093e1206..f902b834c 100644 --- a/docs/source/users/index.md +++ b/docs/source/users/index.md @@ -81,7 +81,7 @@ $ pip install jupyter_ai_magics `jupyter_ai` depends on `jupyter_ai_magics`, so installing `jupyter_ai` automatically installs `jupyter_ai_magics`. -### Installation via `pip` within Conda environment (recommended) +### Installation via `pip` or `conda` in a Conda environment (recommended) We highly recommend installing both JupyterLab and Jupyter AI within an isolated Conda environment to avoid clobbering Python packages in your existing Python @@ -93,10 +93,16 @@ and create an environment that uses Python 3.11: $ conda create -n jupyter-ai python=3.11 $ conda activate jupyter-ai - $ pip install jupyter_ai -Then, follow the steps from "Requirements" and "Installation via `pip`" to -install JupyterLab and Jupyter AI in this Conda environment. +Then, use either `pip` or `conda` to install JupyterLab and Jupyter AI in this +Conda environment. + + $ pip install jupyter_ai # or, + $ conda install -c conda-forge jupyter-ai # or, + $ conda install conda-forge::jupyter-ai + +Note that the `pip` package name is `jupyter_ai` with an underscore, and that +the `conda` package name is `jupyter-ai` with a hyphen. When starting JupyterLab with Jupyter AI, make sure to activate the Conda environment first: From 8082fa43b4f90b60d776ec4bb6bea8e2a12dd74f Mon Sep 17 00:00:00 2001 From: Jason Weill Date: Wed, 31 Jan 2024 15:40:34 -0800 Subject: [PATCH 12/15] Uninstallation with conda --- docs/source/users/index.md | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/docs/source/users/index.md b/docs/source/users/index.md index f902b834c..e784b1262 100644 --- a/docs/source/users/index.md +++ b/docs/source/users/index.md @@ -114,7 +114,7 @@ jupyter lab ## Uninstallation -To remove the extension, run: +If you installed Jupyter AI using `pip`, to remove the extension, run: $ pip uninstall jupyter_ai @@ -122,6 +122,17 @@ or $ pip uninstall jupyter_ai_magics +If you installed Jupyter AI using `conda`, you can remove it by running: + + $ conda remove jupyter-ai + +or + + $ conda remove jupyter-ai-magics + +Note that the `pip` package names use underscores, and the `conda` package +names use hyphens. + ## Model providers Jupyter AI supports a wide range of model providers and models. To use Jupyter AI with a particular provider, you must install its Python packages and set its API key (or other credentials) in your environment or in the chat interface. From afea0af0662385f04aa00a5e820f9823044b63b4 Mon Sep 17 00:00:00 2001 From: Jason Weill Date: Thu, 1 Feb 2024 09:22:03 -0800 Subject: [PATCH 13/15] Makes NVIDIA package dependency optional, updates docs --- docs/source/users/index.md | 2 +- packages/jupyter-ai-magics/pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/users/index.md b/docs/source/users/index.md index e784b1262..9016444c6 100644 --- a/docs/source/users/index.md +++ b/docs/source/users/index.md @@ -150,7 +150,7 @@ Jupyter AI supports the following model providers: | ERNIE-Bot | `qianfan` | `QIANFAN_AK`, `QIANFAN_SK` | `qianfan` | | GPT4All | `gpt4all` | N/A | `gpt4all` | | Hugging Face Hub | `huggingface_hub` | `HUGGINGFACEHUB_API_TOKEN` | `huggingface_hub`, `ipywidgets`, `pillow` | -| NVIDIA | `nvidia-chat` | `NVIDIA_API_KEY` | N/A | +| NVIDIA | `nvidia-chat` | `NVIDIA_API_KEY` | `langchain_nvidia_ai_endpoints` | | OpenAI | `openai` | `OPENAI_API_KEY` | `openai` | | OpenAI (chat) | `openai-chat` | `OPENAI_API_KEY` | `openai` | | SageMaker | `sagemaker-endpoint` | N/A | `boto3` | diff --git a/packages/jupyter-ai-magics/pyproject.toml b/packages/jupyter-ai-magics/pyproject.toml index 9fe5c0721..138b79ee3 100644 --- a/packages/jupyter-ai-magics/pyproject.toml +++ b/packages/jupyter-ai-magics/pyproject.toml @@ -27,7 +27,6 @@ dependencies = [ "typing_extensions>=4.5.0", "click~=8.0", "jsonpath-ng>=1.5.3,<2", - "langchain_nvidia_ai_endpoints", ] [project.optional-dependencies] @@ -42,6 +41,7 @@ all = [ "gpt4all", "huggingface_hub", "ipywidgets", + "langchain_nvidia_ai_endpoints", "pillow", "openai~=1.6.1", "boto3", From b7b0f5fe08c9e42e53b9012929ec0e6abe8e8525 Mon Sep 17 00:00:00 2001 From: Jason Weill Date: Thu, 1 Feb 2024 09:24:58 -0800 Subject: [PATCH 14/15] Revert "Uninstallation with conda" This reverts commit 8082fa43b4f90b60d776ec4bb6bea8e2a12dd74f. --- docs/source/users/index.md | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/docs/source/users/index.md b/docs/source/users/index.md index 9016444c6..03aa39575 100644 --- a/docs/source/users/index.md +++ b/docs/source/users/index.md @@ -114,7 +114,7 @@ jupyter lab ## Uninstallation -If you installed Jupyter AI using `pip`, to remove the extension, run: +To remove the extension, run: $ pip uninstall jupyter_ai @@ -122,17 +122,6 @@ or $ pip uninstall jupyter_ai_magics -If you installed Jupyter AI using `conda`, you can remove it by running: - - $ conda remove jupyter-ai - -or - - $ conda remove jupyter-ai-magics - -Note that the `pip` package names use underscores, and the `conda` package -names use hyphens. - ## Model providers Jupyter AI supports a wide range of model providers and models. To use Jupyter AI with a particular provider, you must install its Python packages and set its API key (or other credentials) in your environment or in the chat interface. From 239a26c17d25b1a2b68d152bbbda0d130862013a Mon Sep 17 00:00:00 2001 From: Jason Weill Date: Thu, 1 Feb 2024 09:25:03 -0800 Subject: [PATCH 15/15] Revert "Mentions conda install instructions in docs" This reverts commit 348ae2e0e45d05e92aab8bf77b64a2f400d2e556. --- docs/source/users/index.md | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/docs/source/users/index.md b/docs/source/users/index.md index 03aa39575..5a9ca7898 100644 --- a/docs/source/users/index.md +++ b/docs/source/users/index.md @@ -81,7 +81,7 @@ $ pip install jupyter_ai_magics `jupyter_ai` depends on `jupyter_ai_magics`, so installing `jupyter_ai` automatically installs `jupyter_ai_magics`. -### Installation via `pip` or `conda` in a Conda environment (recommended) +### Installation via `pip` within Conda environment (recommended) We highly recommend installing both JupyterLab and Jupyter AI within an isolated Conda environment to avoid clobbering Python packages in your existing Python @@ -93,16 +93,10 @@ and create an environment that uses Python 3.11: $ conda create -n jupyter-ai python=3.11 $ conda activate jupyter-ai + $ pip install jupyter_ai -Then, use either `pip` or `conda` to install JupyterLab and Jupyter AI in this -Conda environment. - - $ pip install jupyter_ai # or, - $ conda install -c conda-forge jupyter-ai # or, - $ conda install conda-forge::jupyter-ai - -Note that the `pip` package name is `jupyter_ai` with an underscore, and that -the `conda` package name is `jupyter-ai` with a hyphen. +Then, follow the steps from "Requirements" and "Installation via `pip`" to +install JupyterLab and Jupyter AI in this Conda environment. When starting JupyterLab with Jupyter AI, make sure to activate the Conda environment first: