From a52c0488c9691a54bbd467020d7f33921f5eff0b Mon Sep 17 00:00:00 2001 From: Saurav Prateek Date: Wed, 18 Dec 2024 22:44:13 +0530 Subject: [PATCH 1/4] Added DocString for method in chat_models module --- .../langchain_google_vertexai/chat_models.py | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/libs/vertexai/langchain_google_vertexai/chat_models.py b/libs/vertexai/langchain_google_vertexai/chat_models.py index ebb9464a..73f470a6 100644 --- a/libs/vertexai/langchain_google_vertexai/chat_models.py +++ b/libs/vertexai/langchain_google_vertexai/chat_models.py @@ -442,6 +442,29 @@ def _parse_content(raw_content: str | Dict[Any, Any]) -> Dict[Any, Any]: def _parse_examples(examples: List[BaseMessage]) -> List[InputOutputTextPair]: + """Parse the list of messages. The method expects the messages to be in the order of Human Message followed by an AI Message. + + Args: + examples: The list of messages to be parsed + Returns: + A parsed example list. + Raises: + ValueError: + + - If an odd number of examples are given as argument. + - If an instance of Human Message is not found at every even index in the input examples list. + - If an instance of AI Message is not found at every odd index in the input examples list. + + A valid list examples can be as follows: + .. code-block:: python + examples = [ + HumanMessage(content = "A first sample Human Message"), + AIMessage(content = "A first sample AI Message"), + HumanMessage(content = "A second sample Human Message"), + AIMessage(content = "A second sample AI Message"), + ] + """ + if len(examples) % 2 != 0: raise ValueError( f"Expect examples to have an even amount of messages, got {len(examples)}." From 15d0109a5dca2debd1dd2fb90df0c22f814b155e Mon Sep 17 00:00:00 2001 From: Saurav Prateek Date: Wed, 18 Dec 2024 22:49:41 +0530 Subject: [PATCH 2/4] modified docstring --- libs/vertexai/langchain_google_vertexai/chat_models.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/libs/vertexai/langchain_google_vertexai/chat_models.py b/libs/vertexai/langchain_google_vertexai/chat_models.py index 73f470a6..fc4eef78 100644 --- a/libs/vertexai/langchain_google_vertexai/chat_models.py +++ b/libs/vertexai/langchain_google_vertexai/chat_models.py @@ -442,10 +442,10 @@ def _parse_content(raw_content: str | Dict[Any, Any]) -> Dict[Any, Any]: def _parse_examples(examples: List[BaseMessage]) -> List[InputOutputTextPair]: - """Parse the list of messages. The method expects the messages to be in the order of Human Message followed by an AI Message. + """Parse the list of examples. The method expects the examples to be in the order of Human Message followed by an AI Message. Args: - examples: The list of messages to be parsed + examples: The list of examples to be parsed Returns: A parsed example list. Raises: From d566eb63d655ee1183b6c1692dd4abed2d6f0bab Mon Sep 17 00:00:00 2001 From: Saurav Prateek Date: Sun, 22 Dec 2024 15:08:41 +0530 Subject: [PATCH 3/4] Added intro for Vertex AI library --- .../langchain_google_vertexai/__init__.py | 106 ++++++++++++++++++ .../langchain_google_vertexai/chat_models.py | 23 ---- 2 files changed, 106 insertions(+), 23 deletions(-) diff --git a/libs/vertexai/langchain_google_vertexai/__init__.py b/libs/vertexai/langchain_google_vertexai/__init__.py index bd057448..31983658 100644 --- a/libs/vertexai/langchain_google_vertexai/__init__.py +++ b/libs/vertexai/langchain_google_vertexai/__init__.py @@ -1,3 +1,109 @@ +""" +## langchain-google-vertexai + +This module contains the LangChain integrations for Google Cloud generative models. + +## Installation + +```bash +pip install -U langchain-google-vertexai +``` + +## Chat Models + +`ChatVertexAI` class exposes models such as `gemini-pro` and `chat-bison`. + +To use, you should have Google Cloud project with APIs enabled, and configured credentials. Initialize the model as: + +```python +from langchain_google_vertexai import ChatVertexAI + +llm = ChatVertexAI(model_name="gemini-pro") +llm.invoke("Sing a ballad of LangChain.") +``` + +You can use other models, e.g. `chat-bison`: + +```python +from langchain_google_vertexai import ChatVertexAI + +llm = ChatVertexAI(model_name="chat-bison", temperature=0.3) +llm.invoke("Sing a ballad of LangChain.") +``` + +#### Multimodal inputs + +Gemini vision model supports image inputs when providing a single chat message. Example: + +```python +from langchain_core.messages import HumanMessage +from langchain_google_vertexai import ChatVertexAI + +llm = ChatVertexAI(model_name="gemini-pro-vision") +# example +message = HumanMessage( + content=[ + { + "type": "text", + "text": "What's in this image?", + }, # You can optionally provide text parts + {"type": "image_url", "image_url": {"url": "https://picsum.photos/seed/picsum/200/300"}}, + ] +) +llm.invoke([message]) +``` + +The value of `image_url` can be any of the following: + +- A public image URL +- An accessible gcs file (e.g., "gcs://path/to/file.png") +- A base64 encoded image (e.g., `data:image/png;base64,abcd124`) + +## Embeddings + +You can use Google Cloud's embeddings models as: + +```python +from langchain_google_vertexai import VertexAIEmbeddings + +embeddings = VertexAIEmbeddings() +embeddings.embed_query("hello, world!") +``` + +## LLMs + +You can use Google Cloud's generative AI models as Langchain LLMs: + +```python +from langchain_core.prompts import PromptTemplate +from langchain_google_vertexai import ChatVertexAI + +template = \"""Question: {question} + +Answer: Let's think step by step.\""" +prompt = PromptTemplate.from_template(template) + +llm = ChatVertexAI(model_name="gemini-pro") +chain = prompt | llm + +question = "Who was the president of the USA in 1994?" +print(chain.invoke({"question": question})) +``` + +You can use Gemini and Palm models, including code-generations ones: + +```python + +from langchain_google_vertexai import VertexAI + +llm = VertexAI(model_name="code-bison", max_output_tokens=1000, temperature=0.3) + +question = "Write a python function that checks if a string is a valid email address" + +output = llm(question) +``` +""" + from google.cloud.aiplatform_v1beta1.types import ( FunctionCallingConfig, FunctionDeclaration, diff --git a/libs/vertexai/langchain_google_vertexai/chat_models.py b/libs/vertexai/langchain_google_vertexai/chat_models.py index fc4eef78..ebb9464a 100644 --- a/libs/vertexai/langchain_google_vertexai/chat_models.py +++ b/libs/vertexai/langchain_google_vertexai/chat_models.py @@ -442,29 +442,6 @@ def _parse_content(raw_content: str | Dict[Any, Any]) -> Dict[Any, Any]: def _parse_examples(examples: List[BaseMessage]) -> List[InputOutputTextPair]: - """Parse the list of examples. The method expects the examples to be in the order of Human Message followed by an AI Message. - - Args: - examples: The list of examples to be parsed - Returns: - A parsed example list. - Raises: - ValueError: - - - If an odd number of examples are given as argument. - - If an instance of Human Message is not found at every even index in the input examples list. - - If an instance of AI Message is not found at every odd index in the input examples list. - - A valid list examples can be as follows: - .. code-block:: python - examples = [ - HumanMessage(content = "A first sample Human Message"), - AIMessage(content = "A first sample AI Message"), - HumanMessage(content = "A second sample Human Message"), - AIMessage(content = "A second sample AI Message"), - ] - """ - if len(examples) % 2 != 0: raise ValueError( f"Expect examples to have an even amount of messages, got {len(examples)}." From 558ca2c5e0763cc84bfaab6b0ed5a98eebf89888 Mon Sep 17 00:00:00 2001 From: Saurav Prateek Date: Tue, 24 Dec 2024 12:20:41 +0530 Subject: [PATCH 4/4] Resolved lint issue: Line too long --- libs/vertexai/langchain_google_vertexai/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/libs/vertexai/langchain_google_vertexai/__init__.py b/libs/vertexai/langchain_google_vertexai/__init__.py index 31983658..7c0444f1 100644 --- a/libs/vertexai/langchain_google_vertexai/__init__.py +++ b/libs/vertexai/langchain_google_vertexai/__init__.py @@ -13,7 +13,8 @@ `ChatVertexAI` class exposes models such as `gemini-pro` and `chat-bison`. -To use, you should have Google Cloud project with APIs enabled, and configured credentials. Initialize the model as: +To use, you should have Google Cloud project with APIs enabled, and configured +credentials. Initialize the model as: ```python from langchain_google_vertexai import ChatVertexAI