Skip to content

Commit

Permalink
docs: langchain-openai (#15513)
Browse files Browse the repository at this point in the history
Updates docs and cookbooks to import ChatOpenAI, OpenAI, and OpenAI
Embeddings from `langchain_openai`

There are likely more

---------

Co-authored-by: Harrison Chase <[email protected]>
  • Loading branch information
efriis and hwchase17 authored Jan 6, 2024
1 parent be612f4 commit b1fa726
Show file tree
Hide file tree
Showing 414 changed files with 808 additions and 834 deletions.
4 changes: 2 additions & 2 deletions cookbook/LLaMA2_sql_chat.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@
],
"source": [
"# Prompt\n",
"from langchain.prompts import ChatPromptTemplate\n",
"from langchain_core.prompts import ChatPromptTemplate\n",
"\n",
"# Update the template based on the type of SQL Database like MySQL, Microsoft SQL Server and so on\n",
"template = \"\"\"Based on the table schema below, write a SQL query that would answer the user's question:\n",
Expand Down Expand Up @@ -278,7 +278,7 @@
"source": [
"# Prompt\n",
"from langchain.memory import ConversationBufferMemory\n",
"from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder\n",
"from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder\n",
"\n",
"template = \"\"\"Given an input question, convert it to a SQL query. No pre-amble. Based on the table schema below, write a SQL query that would answer the user's question:\n",
"{schema}\n",
Expand Down
6 changes: 3 additions & 3 deletions cookbook/Multi_modal_RAG.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -198,9 +198,9 @@
"metadata": {},
"outputs": [],
"source": [
"from langchain.prompts import ChatPromptTemplate\n",
"from langchain_community.chat_models import ChatOpenAI\n",
"from langchain_core.output_parsers import StrOutputParser\n",
"from langchain_core.prompts import ChatPromptTemplate\n",
"from langchain_openai import ChatOpenAI\n",
"\n",
"\n",
"# Generate summaries of text elements\n",
Expand Down Expand Up @@ -355,9 +355,9 @@
"\n",
"from langchain.retrievers.multi_vector import MultiVectorRetriever\n",
"from langchain.storage import InMemoryStore\n",
"from langchain_community.embeddings import OpenAIEmbeddings\n",
"from langchain_community.vectorstores import Chroma\n",
"from langchain_core.documents import Document\n",
"from langchain_openai import OpenAIEmbeddings\n",
"\n",
"\n",
"def create_multi_vector_retriever(\n",
Expand Down
8 changes: 4 additions & 4 deletions cookbook/Semi_Structured_RAG.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -235,9 +235,9 @@
"metadata": {},
"outputs": [],
"source": [
"from langchain.prompts import ChatPromptTemplate\n",
"from langchain_community.chat_models import ChatOpenAI\n",
"from langchain_core.output_parsers import StrOutputParser"
"from langchain_core.output_parsers import StrOutputParser\n",
"from langchain_core.prompts import ChatPromptTemplate\n",
"from langchain_openai import ChatOpenAI"
]
},
{
Expand Down Expand Up @@ -320,9 +320,9 @@
"\n",
"from langchain.retrievers.multi_vector import MultiVectorRetriever\n",
"from langchain.storage import InMemoryStore\n",
"from langchain_community.embeddings import OpenAIEmbeddings\n",
"from langchain_community.vectorstores import Chroma\n",
"from langchain_core.documents import Document\n",
"from langchain_openai import OpenAIEmbeddings\n",
"\n",
"# The vectorstore to use to index the child chunks\n",
"vectorstore = Chroma(collection_name=\"summaries\", embedding_function=OpenAIEmbeddings())\n",
Expand Down
8 changes: 4 additions & 4 deletions cookbook/Semi_structured_and_multi_modal_RAG.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -211,9 +211,9 @@
"metadata": {},
"outputs": [],
"source": [
"from langchain.prompts import ChatPromptTemplate\n",
"from langchain_community.chat_models import ChatOpenAI\n",
"from langchain_core.output_parsers import StrOutputParser"
"from langchain_core.output_parsers import StrOutputParser\n",
"from langchain_core.prompts import ChatPromptTemplate\n",
"from langchain_openai import ChatOpenAI"
]
},
{
Expand Down Expand Up @@ -375,9 +375,9 @@
"\n",
"from langchain.retrievers.multi_vector import MultiVectorRetriever\n",
"from langchain.storage import InMemoryStore\n",
"from langchain_community.embeddings import OpenAIEmbeddings\n",
"from langchain_community.vectorstores import Chroma\n",
"from langchain_core.documents import Document\n",
"from langchain_openai import OpenAIEmbeddings\n",
"\n",
"# The vectorstore to use to index the child chunks\n",
"vectorstore = Chroma(collection_name=\"summaries\", embedding_function=OpenAIEmbeddings())\n",
Expand Down
4 changes: 2 additions & 2 deletions cookbook/Semi_structured_multi_modal_RAG_LLaMA2.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -209,9 +209,9 @@
"metadata": {},
"outputs": [],
"source": [
"from langchain.prompts import ChatPromptTemplate\n",
"from langchain_community.chat_models import ChatOllama\n",
"from langchain_core.output_parsers import StrOutputParser"
"from langchain_core.output_parsers import StrOutputParser\n",
"from langchain_core.prompts import ChatPromptTemplate"
]
},
{
Expand Down
6 changes: 3 additions & 3 deletions cookbook/advanced_rag_eval.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -132,8 +132,8 @@
"metadata": {},
"outputs": [],
"source": [
"from langchain_community.embeddings import OpenAIEmbeddings\n",
"from langchain_community.vectorstores import Chroma\n",
"from langchain_openai import OpenAIEmbeddings\n",
"\n",
"baseline = Chroma.from_texts(\n",
" texts=all_splits_pypdf_texts,\n",
Expand All @@ -160,9 +160,9 @@
"metadata": {},
"outputs": [],
"source": [
"from langchain.prompts import ChatPromptTemplate\n",
"from langchain_community.chat_models import ChatOpenAI\n",
"from langchain_core.output_parsers import StrOutputParser\n",
"from langchain_core.prompts import ChatPromptTemplate\n",
"from langchain_openai import ChatOpenAI\n",
"\n",
"# Prompt\n",
"prompt_text = \"\"\"You are an assistant tasked with summarizing tables and text for retrieval. \\\n",
Expand Down
5 changes: 2 additions & 3 deletions cookbook/agent_vectorstore.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,8 @@
"source": [
"from langchain.chains import RetrievalQA\n",
"from langchain.text_splitter import CharacterTextSplitter\n",
"from langchain_community.embeddings.openai import OpenAIEmbeddings\n",
"from langchain_community.llms import OpenAI\n",
"from langchain_community.vectorstores import Chroma\n",
"from langchain_openai import OpenAI, OpenAIEmbeddings\n",
"\n",
"llm = OpenAI(temperature=0)"
]
Expand Down Expand Up @@ -161,7 +160,7 @@
"source": [
"# Import things that are needed generically\n",
"from langchain.agents import AgentType, Tool, initialize_agent\n",
"from langchain_community.llms import OpenAI"
"from langchain_openai import OpenAI"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion cookbook/analyze_document.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
"outputs": [],
"source": [
"from langchain.chains import AnalyzeDocumentChain\n",
"from langchain_community.chat_models import ChatOpenAI\n",
"from langchain_openai import ChatOpenAI\n",
"\n",
"llm = ChatOpenAI(model=\"gpt-3.5-turbo\", temperature=0)"
]
Expand Down
8 changes: 4 additions & 4 deletions cookbook/autogpt/autogpt.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -62,8 +62,8 @@
"outputs": [],
"source": [
"from langchain.docstore import InMemoryDocstore\n",
"from langchain_community.embeddings import OpenAIEmbeddings\n",
"from langchain_community.vectorstores import FAISS"
"from langchain_community.vectorstores import FAISS\n",
"from langchain_openai import OpenAIEmbeddings"
]
},
{
Expand Down Expand Up @@ -100,8 +100,8 @@
"metadata": {},
"outputs": [],
"source": [
"from langchain_community.chat_models import ChatOpenAI\n",
"from langchain_experimental.autonomous_agents import AutoGPT"
"from langchain_experimental.autonomous_agents import AutoGPT\n",
"from langchain_openai import ChatOpenAI"
]
},
{
Expand Down
4 changes: 2 additions & 2 deletions cookbook/autogpt/marathon_times.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,8 @@
"import pandas as pd\n",
"from langchain.docstore.document import Document\n",
"from langchain_community.agent_toolkits.pandas.base import create_pandas_dataframe_agent\n",
"from langchain_community.chat_models import ChatOpenAI\n",
"from langchain_experimental.autonomous_agents import AutoGPT\n",
"from langchain_openai import ChatOpenAI\n",
"\n",
"# Needed synce jupyter runs an async eventloop\n",
"nest_asyncio.apply()"
Expand Down Expand Up @@ -311,8 +311,8 @@
"# Memory\n",
"import faiss\n",
"from langchain.docstore import InMemoryDocstore\n",
"from langchain_community.embeddings import OpenAIEmbeddings\n",
"from langchain_community.vectorstores import FAISS\n",
"from langchain_openai import OpenAIEmbeddings\n",
"\n",
"embeddings_model = OpenAIEmbeddings()\n",
"embedding_size = 1536\n",
Expand Down
5 changes: 2 additions & 3 deletions cookbook/baby_agi.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,8 @@
"source": [
"from typing import Optional\n",
"\n",
"from langchain_community.embeddings import OpenAIEmbeddings\n",
"from langchain_community.llms import OpenAI\n",
"from langchain_experimental.autonomous_agents import BabyAGI"
"from langchain_experimental.autonomous_agents import BabyAGI\n",
"from langchain_openai import OpenAI, OpenAIEmbeddings"
]
},
{
Expand Down
7 changes: 3 additions & 4 deletions cookbook/baby_agi_with_agent.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,8 @@
"\n",
"from langchain.chains import LLMChain\n",
"from langchain.prompts import PromptTemplate\n",
"from langchain_community.embeddings import OpenAIEmbeddings\n",
"from langchain_community.llms import OpenAI\n",
"from langchain_experimental.autonomous_agents import BabyAGI"
"from langchain_experimental.autonomous_agents import BabyAGI\n",
"from langchain_openai import OpenAI, OpenAIEmbeddings"
]
},
{
Expand Down Expand Up @@ -108,8 +107,8 @@
"source": [
"from langchain.agents import AgentExecutor, Tool, ZeroShotAgent\n",
"from langchain.chains import LLMChain\n",
"from langchain_community.llms import OpenAI\n",
"from langchain_community.utilities import SerpAPIWrapper\n",
"from langchain_openai import OpenAI\n",
"\n",
"todo_prompt = PromptTemplate.from_template(\n",
" \"You are a planner who is an expert at coming up with a todo list for a given objective. Come up with a todo list for this objective: {objective}\"\n",
Expand Down
2 changes: 1 addition & 1 deletion cookbook/camel_role_playing.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@
" HumanMessage,\n",
" SystemMessage,\n",
")\n",
"from langchain_community.chat_models import ChatOpenAI"
"from langchain_openai import ChatOpenAI"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion cookbook/causal_program_aided_language_model.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,9 @@
"outputs": [],
"source": [
"from IPython.display import SVG\n",
"from langchain_community.llms import OpenAI\n",
"from langchain_experimental.cpal.base import CPALChain\n",
"from langchain_experimental.pal_chain import PALChain\n",
"from langchain_openai import OpenAI\n",
"\n",
"llm = OpenAI(temperature=0, max_tokens=512)\n",
"cpal_chain = CPALChain.from_univariate_prompt(llm=llm, verbose=True)\n",
Expand Down
4 changes: 2 additions & 2 deletions cookbook/code-analysis-deeplake.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -657,7 +657,7 @@
}
],
"source": [
"from langchain_community.embeddings.openai import OpenAIEmbeddings\n",
"from langchain_openai import OpenAIEmbeddings\n",
"\n",
"embeddings = OpenAIEmbeddings()\n",
"embeddings"
Expand Down Expand Up @@ -834,7 +834,7 @@
"outputs": [],
"source": [
"from langchain.chains import ConversationalRetrievalChain\n",
"from langchain_community.chat_models import ChatOpenAI\n",
"from langchain_openai import ChatOpenAI\n",
"\n",
"model = ChatOpenAI(\n",
" model_name=\"gpt-3.5-turbo-0613\"\n",
Expand Down
8 changes: 4 additions & 4 deletions cookbook/custom_agent_with_plugin_retrieval.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,8 @@
"from langchain.prompts import StringPromptTemplate\n",
"from langchain.schema import AgentAction, AgentFinish\n",
"from langchain_community.agent_toolkits import NLAToolkit\n",
"from langchain_community.llms import OpenAI\n",
"from langchain_community.tools.plugin import AIPlugin"
"from langchain_community.tools.plugin import AIPlugin\n",
"from langchain_openai import OpenAI"
]
},
{
Expand Down Expand Up @@ -115,8 +115,8 @@
"outputs": [],
"source": [
"from langchain.schema import Document\n",
"from langchain_community.embeddings import OpenAIEmbeddings\n",
"from langchain_community.vectorstores import FAISS"
"from langchain_community.vectorstores import FAISS\n",
"from langchain_openai import OpenAIEmbeddings"
]
},
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,8 +69,8 @@
"from langchain.prompts import StringPromptTemplate\n",
"from langchain.schema import AgentAction, AgentFinish\n",
"from langchain_community.agent_toolkits import NLAToolkit\n",
"from langchain_community.llms import OpenAI\n",
"from langchain_community.tools.plugin import AIPlugin"
"from langchain_community.tools.plugin import AIPlugin\n",
"from langchain_openai import OpenAI"
]
},
{
Expand Down Expand Up @@ -139,8 +139,8 @@
"outputs": [],
"source": [
"from langchain.schema import Document\n",
"from langchain_community.embeddings import OpenAIEmbeddings\n",
"from langchain_community.vectorstores import FAISS"
"from langchain_community.vectorstores import FAISS\n",
"from langchain_openai import OpenAIEmbeddings"
]
},
{
Expand Down
8 changes: 4 additions & 4 deletions cookbook/custom_agent_with_tool_retrieval.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,8 @@
"from langchain.chains import LLMChain\n",
"from langchain.prompts import StringPromptTemplate\n",
"from langchain.schema import AgentAction, AgentFinish\n",
"from langchain_community.llms import OpenAI\n",
"from langchain_community.utilities import SerpAPIWrapper"
"from langchain_community.utilities import SerpAPIWrapper\n",
"from langchain_openai import OpenAI"
]
},
{
Expand Down Expand Up @@ -104,8 +104,8 @@
"outputs": [],
"source": [
"from langchain.schema import Document\n",
"from langchain_community.embeddings import OpenAIEmbeddings\n",
"from langchain_community.vectorstores import FAISS"
"from langchain_community.vectorstores import FAISS\n",
"from langchain_openai import OpenAIEmbeddings"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion cookbook/databricks_sql_db.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@
"outputs": [],
"source": [
"# Creating a OpenAI Chat LLM wrapper\n",
"from langchain_community.chat_models import ChatOpenAI\n",
"from langchain_openai import ChatOpenAI\n",
"\n",
"llm = ChatOpenAI(temperature=0, model_name=\"gpt-4\")"
]
Expand Down
3 changes: 1 addition & 2 deletions cookbook/deeplake_semantic_search_over_chat.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -56,9 +56,8 @@
" CharacterTextSplitter,\n",
" RecursiveCharacterTextSplitter,\n",
")\n",
"from langchain_community.embeddings.openai import OpenAIEmbeddings\n",
"from langchain_community.llms import OpenAI\n",
"from langchain_community.vectorstores import DeepLake\n",
"from langchain_openai import OpenAI, OpenAIEmbeddings\n",
"\n",
"os.environ[\"OPENAI_API_KEY\"] = getpass.getpass(\"OpenAI API Key:\")\n",
"activeloop_token = getpass.getpass(\"Activeloop Token:\")\n",
Expand Down
6 changes: 3 additions & 3 deletions cookbook/docugami_xml_kg_rag.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -475,8 +475,8 @@
" HumanMessagePromptTemplate,\n",
" SystemMessagePromptTemplate,\n",
")\n",
"from langchain_community.chat_models import ChatOpenAI\n",
"from langchain_core.output_parsers import StrOutputParser"
"from langchain_core.output_parsers import StrOutputParser\n",
"from langchain_openai import ChatOpenAI"
]
},
{
Expand Down Expand Up @@ -547,9 +547,9 @@
"\n",
"from langchain.retrievers.multi_vector import MultiVectorRetriever\n",
"from langchain.storage import InMemoryStore\n",
"from langchain_community.embeddings import OpenAIEmbeddings\n",
"from langchain_community.vectorstores.chroma import Chroma\n",
"from langchain_core.documents import Document\n",
"from langchain_openai import OpenAIEmbeddings\n",
"\n",
"\n",
"def build_retriever(text_elements, tables, table_summaries):\n",
Expand Down
2 changes: 1 addition & 1 deletion cookbook/elasticsearch_db_qa.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
"source": [
"from elasticsearch import Elasticsearch\n",
"from langchain.chains.elasticsearch_database import ElasticsearchDatabaseChain\n",
"from langchain_community.chat_models import ChatOpenAI"
"from langchain_openai import ChatOpenAI"
]
},
{
Expand Down
Loading

0 comments on commit b1fa726

Please sign in to comment.