From b06e25914dbb91e2679c9e1335c056f473f3b248 Mon Sep 17 00:00:00 2001 From: 3coins <3coins@users.noreply.github.com> Date: Tue, 24 Oct 2023 20:10:19 +0000 Subject: [PATCH 1/2] Publish 2.4.0 SHA256 hashes: jupyter-ai-core-2.4.0.tgz: 04773e2b888853cd1c27785ac3c8434226e9a279a2fd253962cb20e5e9f72c1d jupyter_ai-2.4.0-py3-none-any.whl: a5880cc108a107c746935d7eaa2513dffa29d2812e6628fd22a972a97aba4e2a jupyter_ai-2.4.0.tar.gz: 0d065b18f4985fb726010e76d9c6059932e21327ea2951ccaa18b6e7b5189240 jupyter_ai_magics-2.4.0-py3-none-any.whl: 585bd960ac5c254e28ea165db840276883155a0a720720aa850e3272edc2001e jupyter_ai_magics-2.4.0.tar.gz: 2cdfb1e084aad46cdbbfb4eed64b4e7abc96ad7fde31da2ddb6899225dfa0684 --- CHANGELOG.md | 26 +++++++++++++++++++++++-- lerna.json | 2 +- package.json | 2 +- packages/jupyter-ai-magics/package.json | 2 +- packages/jupyter-ai/package.json | 2 +- 5 files changed, 28 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 89e5822d8..0e6907299 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,30 @@ +## 2.4.0 + +([Full Changelog](https://github.com/jupyterlab/jupyter-ai/compare/@jupyter-ai/core@2.3.0...92dab10608ea090b6bd87ca5ffe9ccff7a15b449)) + +Hey Jupyternauts! We're excited to announce the 2.4.0 release of Jupyter AI, which includes better support for Bedrock Anthropic models. Thanks to [@krassowski](https://github.com/krassowski) for providing a new feature in Jupyter AI that let's admins specify allowlists and blocklists to filter the list of providers available in the chat settings panel. + +### Enhancements made + +- Allow to define block and allow lists for providers [#415](https://github.com/jupyterlab/jupyter-ai/pull/415) ([@krassowski](https://github.com/krassowski)) + +### Bugs fixed + +- Refactor generate for better stability with all providers/models [#407](https://github.com/jupyterlab/jupyter-ai/pull/407) ([@3coins](https://github.com/3coins)) + +### Maintenance and upkeep improvements + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyterlab/jupyter-ai/graphs/contributors?from=2023-10-09&to=2023-10-24&type=c)) + +[@3coins](https://github.com/search?q=repo%3Ajupyterlab%2Fjupyter-ai+involves%3A3coins+updated%3A2023-10-09..2023-10-24&type=Issues) | [@krassowski](https://github.com/search?q=repo%3Ajupyterlab%2Fjupyter-ai+involves%3Akrassowski+updated%3A2023-10-09..2023-10-24&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyterlab%2Fjupyter-ai+involves%3Apre-commit-ci+updated%3A2023-10-09..2023-10-24&type=Issues) + + + ## 2.3.0 ([Full Changelog](https://github.com/jupyterlab/jupyter-ai/compare/@jupyter-ai/core@2.2.0...7f854007263f1a9393e41611028d7cc57313c577)) @@ -44,8 +68,6 @@ Updating to 2.3.0 shouldn't require any changes on your end. However, if you not [@3coins](https://github.com/search?q=repo%3Ajupyterlab%2Fjupyter-ai+involves%3A3coins+updated%3A2023-09-05..2023-10-09&type=Issues) | [@andrii-i](https://github.com/search?q=repo%3Ajupyterlab%2Fjupyter-ai+involves%3Aandrii-i+updated%3A2023-09-05..2023-10-09&type=Issues) | [@dlqqq](https://github.com/search?q=repo%3Ajupyterlab%2Fjupyter-ai+involves%3Adlqqq+updated%3A2023-09-05..2023-10-09&type=Issues) | [@JasonWeill](https://github.com/search?q=repo%3Ajupyterlab%2Fjupyter-ai+involves%3AJasonWeill+updated%3A2023-09-05..2023-10-09&type=Issues) | [@krassowski](https://github.com/search?q=repo%3Ajupyterlab%2Fjupyter-ai+involves%3Akrassowski+updated%3A2023-09-05..2023-10-09&type=Issues) - - ## 2.2.0 ([Full Changelog](https://github.com/jupyterlab/jupyter-ai/compare/@jupyter-ai/core@2.1.0...56c1f518afd09d0d09a43221f0767aa961e9430f)) diff --git a/lerna.json b/lerna.json index 626a3f09f..2c270143d 100644 --- a/lerna.json +++ b/lerna.json @@ -1,7 +1,7 @@ { "$schema": "node_modules/lerna/schemas/lerna-schema.json", "useWorkspaces": true, - "version": "2.3.0", + "version": "2.4.0", "npmClient": "yarn", "useNx": true } diff --git a/package.json b/package.json index d09859efc..4db946244 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@jupyter-ai/monorepo", - "version": "2.3.0", + "version": "2.4.0", "description": "A generative AI extension for JupyterLab", "private": true, "keywords": [ diff --git a/packages/jupyter-ai-magics/package.json b/packages/jupyter-ai-magics/package.json index a18796241..c9e06f3ab 100644 --- a/packages/jupyter-ai-magics/package.json +++ b/packages/jupyter-ai-magics/package.json @@ -1,6 +1,6 @@ { "name": "@jupyter-ai/magics", - "version": "2.3.0", + "version": "2.4.0", "description": "Jupyter AI magics Python package. Not published on NPM.", "private": true, "homepage": "https://github.com/jupyterlab/jupyter-ai", diff --git a/packages/jupyter-ai/package.json b/packages/jupyter-ai/package.json index eb8b55fa0..9727d67c8 100644 --- a/packages/jupyter-ai/package.json +++ b/packages/jupyter-ai/package.json @@ -1,6 +1,6 @@ { "name": "@jupyter-ai/core", - "version": "2.3.0", + "version": "2.4.0", "description": "A generative AI extension for JupyterLab", "keywords": [ "jupyter", From 7e4a2a5115ea691afb68522ec267d879e8b3fc95 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Krassowski?= <5832902+krassowski@users.noreply.github.com> Date: Mon, 30 Oct 2023 23:40:59 +0000 Subject: [PATCH 2/2] Document how to add custom model providers (#420) * Document how to add custom model providers * Apply suggestions from review Co-authored-by: Jason Weill <93281816+JasonWeill@users.noreply.github.com> --------- Co-authored-by: Jason Weill <93281816+JasonWeill@users.noreply.github.com> --- docs/source/users/index.md | 92 ++++++++++++++++++- .../jupyter_ai_magics/utils.py | 10 +- packages/jupyter-ai/jupyter_ai/handlers.py | 2 +- 3 files changed, 98 insertions(+), 6 deletions(-) diff --git a/docs/source/users/index.md b/docs/source/users/index.md index 72b826a67..b519f6ea4 100644 --- a/docs/source/users/index.md +++ b/docs/source/users/index.md @@ -159,6 +159,96 @@ responsible for all charges they incur when they make API requests. Review your provider's pricing information before submitting requests via Jupyter AI. ::: +### Custom model providers + +You can define new providers using the LangChain framework API. Custom providers +inherit from both `jupyter-ai`'s ``BaseProvider`` and `langchain`'s [``LLM``][LLM]. +You can either import a pre-defined model from [LangChain LLM list][langchain_llms], +or define a [custom LLM][custom_llm]. +In the example below, we define a provider with two models using +a dummy ``FakeListLLM`` model, which returns responses from the ``responses`` +keyword argument. + +```python +# my_package/my_provider.py +from jupyter_ai_magics import BaseProvider +from langchain.llms import FakeListLLM + + +class MyProvider(BaseProvider, FakeListLLM): + id = "my_provider" + name = "My Provider" + model_id_key = "model" + models = [ + "model_a", + "model_b" + ] + def __init__(self, **kwargs): + model = kwargs.get("model_id") + kwargs["responses"] = ( + ["This is a response from model 'a'"] + if model == "model_a" else + ["This is a response from model 'b'"] + ) + super().__init__(**kwargs) +``` + + +If the new provider inherits from [``BaseChatModel``][BaseChatModel], it will be available +both in the chat UI and with magic commands. Otherwise, users can only use the new provider +with magic commands. + +To make the new provider available, you need to declare it as an [entry point](https://setuptools.pypa.io/en/latest/userguide/entry_point.html): + +```toml +# my_package/pyproject.toml +[project] +name = "my_package" +version = "0.0.1" + +[project.entry-points."jupyter_ai.model_providers"] +my-provider = "my_provider:MyProvider" +``` + +To test that the above minimal provider package works, install it with: + +```sh +# from `my_package` directory +pip install -e . +``` + +Then, restart JupyterLab. You should now see an info message in the log that mentions +your new provider's `id`: + +``` +[I 2023-10-29 13:56:16.915 AiExtension] Registered model provider `my_provider`. +``` + +[langchain_llms]: https://api.python.langchain.com/en/latest/api_reference.html#module-langchain.llms +[custom_llm]: https://python.langchain.com/docs/modules/model_io/models/llms/custom_llm +[LLM]: https://api.python.langchain.com/en/latest/llms/langchain.llms.base.LLM.html#langchain.llms.base.LLM +[BaseChatModel]: https://api.python.langchain.com/en/latest/chat_models/langchain.chat_models.base.BaseChatModel.html + + +### Customizing prompt templates + +To modify the prompt template for a given format, override the ``get_prompt_template`` method: + +```python +from langchain.prompts import PromptTemplate + + +class MyProvider(BaseProvider, FakeListLLM): + # (... properties as above ...) + def get_prompt_template(self, format) -> PromptTemplate: + if format === "code": + return PromptTemplate.from_template( + "{prompt}\n\nProduce output as source code only, " + "with no text or explanation before or after it." + ) + return super().get_prompt_template(format) +``` + ## The chat interface The easiest way to get started with Jupyter AI is to use the chat interface. @@ -689,7 +779,7 @@ Write a poem about C++. You can also define a custom LangChain chain: -``` +```python from langchain.chains import LLMChain from langchain.prompts import PromptTemplate from langchain.llms import OpenAI diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/utils.py b/packages/jupyter-ai-magics/jupyter_ai_magics/utils.py index c651581bc..0441d707c 100644 --- a/packages/jupyter-ai-magics/jupyter_ai_magics/utils.py +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/utils.py @@ -30,9 +30,10 @@ def get_lm_providers( for model_provider_ep in model_provider_eps: try: provider = model_provider_ep.load() - except: + except Exception as e: log.error( - f"Unable to load model provider class from entry point `{model_provider_ep.name}`." + f"Unable to load model provider class from entry point `{model_provider_ep.name}`: %s.", + e, ) continue if not is_provider_allowed(provider.id, restrictions): @@ -58,9 +59,10 @@ def get_em_providers( for model_provider_ep in model_provider_eps: try: provider = model_provider_ep.load() - except: + except Exception as e: log.error( - f"Unable to load embeddings model provider class from entry point `{model_provider_ep.name}`." + f"Unable to load embeddings model provider class from entry point `{model_provider_ep.name}`: %s.", + e, ) continue if not is_provider_allowed(provider.id, restrictions): diff --git a/packages/jupyter-ai/jupyter_ai/handlers.py b/packages/jupyter-ai/jupyter_ai/handlers.py index ddc4c6255..23b96ad7a 100644 --- a/packages/jupyter-ai/jupyter_ai/handlers.py +++ b/packages/jupyter-ai/jupyter_ai/handlers.py @@ -182,7 +182,7 @@ def broadcast_message(self, message: Message): self.chat_history.append(message) async def on_message(self, message): - self.log.debug("Message recieved: %s", message) + self.log.debug("Message received: %s", message) try: message = json.loads(message)