From e50d15b52666c5c928ba7aa82347ddb1671a7e7f Mon Sep 17 00:00:00 2001 From: Erick Friis Date: Mon, 16 Dec 2024 14:13:45 -0800 Subject: [PATCH] x --- .../integrations/text_embedding/localai.ipynb | 53 +++++++++---------- 1 file changed, 25 insertions(+), 28 deletions(-) diff --git a/docs/docs/integrations/text_embedding/localai.ipynb b/docs/docs/integrations/text_embedding/localai.ipynb index 79542a61a2af5..263a3b346f56c 100644 --- a/docs/docs/integrations/text_embedding/localai.ipynb +++ b/docs/docs/integrations/text_embedding/localai.ipynb @@ -8,22 +8,40 @@ "source": [ "# LocalAI\n", "\n", + ":::info\n", + "\n", + "`langchain-localai` is a 3rd party integration package for LocalAI. It provides a simple way to use LocalAI services in Langchain.\n", + "\n", + "The source code is available on [Github](https://github.com/mkhludnev/langchain-localai)\n", + "\n", + ":::\n", + "\n", "Let's load the LocalAI Embedding class. In order to use the LocalAI Embedding class, you need to have the LocalAI service hosted somewhere and configure the embedding models. See the documentation at https://localai.io/basics/getting_started/index.html and https://localai.io/features/embeddings/index.html." ] }, { "cell_type": "code", - "execution_count": 1, - "id": "0be1af71", + "execution_count": null, + "id": "799d1f77", "metadata": {}, "outputs": [], "source": [ - "from langchain_community.embeddings import LocalAIEmbeddings" + "%pip install -U langchain-localai" ] }, { "cell_type": "code", "execution_count": 2, + "id": "0be1af71", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain_localai import LocalAIEmbeddings" + ] + }, + { + "cell_type": "code", + "execution_count": 3, "id": "2c66e5da", "metadata": {}, "outputs": [], @@ -35,7 +53,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 4, "id": "01370375", "metadata": {}, "outputs": [], @@ -45,7 +63,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "id": "bfb6142c", "metadata": {}, "outputs": [], @@ -136,27 +154,11 @@ "# if you are behind an explicit proxy, you can use the OPENAI_PROXY environment variable to pass through\n", "os.environ[\"OPENAI_PROXY\"] = \"http://proxy.yourcompany.com:8080\"" ] - }, - { - "cell_type": "markdown", - "id": "4a824a90", - "metadata": {}, - "source": [ - "## Limitations \n", - "\n", - "`langchain_community.embeddings.LocalAIEmbeddings` has two issues: \n", - " - it depends on Open AI SDK v0, which is outdated\n", - " - and it requests document embeddings one by one in `embed_documents` without bulking them into the single request. \n", - "\n", - " `langchain-localai` is the separate integration package provided for resolving these issues: \n", - " - the distribution: https://pypi.org/project/langchain-localai/ \n", - " - the source code: https://github.com/mkhludnev/langchain-localai" - ] } ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": ".venv", "language": "python", "name": "python3" }, @@ -170,12 +172,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.12" - }, - "vscode": { - "interpreter": { - "hash": "e971737741ff4ec9aff7dc6155a1060a59a8a6d52c757dbbe66bf8ee389494b1" - } + "version": "3.11.4" } }, "nbformat": 4,