Skip to content

Commit

Permalink
Merge branch 'master' into harrison/improve-integration-docs
Browse files Browse the repository at this point in the history
  • Loading branch information
ccurme authored Dec 3, 2024
2 parents ff214eb + ab831ce commit fcbca18
Show file tree
Hide file tree
Showing 38 changed files with 2,673 additions and 1,205 deletions.
3 changes: 2 additions & 1 deletion .github/workflows/_compile_integration_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,15 @@ on:
description: "Python version to use"

env:
POETRY_VERSION: "1.7.1"
POETRY_VERSION: "1.8.4"

jobs:
build:
defaults:
run:
working-directory: ${{ inputs.working-directory }}
runs-on: ubuntu-latest
timeout-minutes: 20
name: "poetry run pytest -m compile tests/integration_tests #${{ inputs.python-version }}"
steps:
- uses: actions/checkout@v4
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/_integration_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ on:
description: "Python version to use"

env:
POETRY_VERSION: "1.7.1"
POETRY_VERSION: "1.8.4"

jobs:
build:
Expand Down
3 changes: 2 additions & 1 deletion .github/workflows/_lint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ on:
description: "Python version to use"

env:
POETRY_VERSION: "1.7.1"
POETRY_VERSION: "1.8.4"
WORKDIR: ${{ inputs.working-directory == '' && '.' || inputs.working-directory }}

# This env var allows us to get inline annotations when ruff has complaints.
Expand All @@ -23,6 +23,7 @@ jobs:
build:
name: "make lint #${{ inputs.python-version }}"
runs-on: ubuntu-latest
timeout-minutes: 20
steps:
- uses: actions/checkout@v4

Expand Down
3 changes: 2 additions & 1 deletion .github/workflows/_release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ on:

env:
PYTHON_VERSION: "3.11"
POETRY_VERSION: "1.7.1"
POETRY_VERSION: "1.8.4"

jobs:
build:
Expand Down Expand Up @@ -167,6 +167,7 @@ jobs:
- release-notes
- test-pypi-publish
runs-on: ubuntu-latest
timeout-minutes: 20
steps:
- uses: actions/checkout@v4

Expand Down
3 changes: 2 additions & 1 deletion .github/workflows/_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,15 @@ on:
description: "Python version to use"

env:
POETRY_VERSION: "1.7.1"
POETRY_VERSION: "1.8.4"

jobs:
build:
defaults:
run:
working-directory: ${{ inputs.working-directory }}
runs-on: ubuntu-latest
timeout-minutes: 20
name: "make test #${{ inputs.python-version }}"
steps:
- uses: actions/checkout@v4
Expand Down
3 changes: 2 additions & 1 deletion .github/workflows/_test_doc_imports.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,12 @@ on:
description: "Python version to use"

env:
POETRY_VERSION: "1.7.1"
POETRY_VERSION: "1.8.4"

jobs:
build:
runs-on: ubuntu-latest
timeout-minutes: 20
name: "check doc imports #${{ inputs.python-version }}"
steps:
- uses: actions/checkout@v4
Expand Down
3 changes: 2 additions & 1 deletion .github/workflows/_test_pydantic.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,15 @@ on:
description: "Pydantic version to test."

env:
POETRY_VERSION: "1.7.1"
POETRY_VERSION: "1.8.4"

jobs:
build:
defaults:
run:
working-directory: ${{ inputs.working-directory }}
runs-on: ubuntu-latest
timeout-minutes: 20
name: "make test # pydantic: ~=${{ inputs.pydantic-version }}, python: ${{ inputs.python-version }}, "
steps:
- uses: actions/checkout@v4
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/_test_release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ on:
description: "Release from a non-master branch (danger!)"

env:
POETRY_VERSION: "1.7.1"
POETRY_VERSION: "1.8.4"
PYTHON_VERSION: "3.10"

jobs:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/api_doc_build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ on:
schedule:
- cron: '0 13 * * *'
env:
POETRY_VERSION: "1.8.1"
POETRY_VERSION: "1.8.4"
PYTHON_VERSION: "3.11"

jobs:
Expand Down
3 changes: 2 additions & 1 deletion .github/workflows/check_diffs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ concurrency:
cancel-in-progress: true

env:
POETRY_VERSION: "1.7.1"
POETRY_VERSION: "1.8.4"

jobs:
build:
Expand Down Expand Up @@ -119,6 +119,7 @@ jobs:
job-configs: ${{ fromJson(needs.build.outputs.extended-tests) }}
fail-fast: false
runs-on: ubuntu-latest
timeout-minutes: 20
defaults:
run:
working-directory: ${{ matrix.job-configs.working-directory }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/run_notebooks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ on:
- cron: '0 13 * * *'

env:
POETRY_VERSION: "1.7.1"
POETRY_VERSION: "1.8.4"

jobs:
build:
Expand Down
64 changes: 51 additions & 13 deletions .github/workflows/scheduled_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,32 +2,70 @@ name: Scheduled tests

on:
workflow_dispatch: # Allows to trigger the workflow manually in GitHub UI
inputs:
working-directory-force:
type: string
description: "From which folder this pipeline executes - defaults to all in matrix - example value: libs/partners/anthropic"
python-version-force:
type: string
description: "Python version to use - defaults to 3.9 and 3.11 in matrix - example value: 3.9"
schedule:
- cron: '0 13 * * *'

env:
POETRY_VERSION: "1.7.1"
POETRY_VERSION: "1.8.4"
DEFAULT_LIBS: >
[
"libs/partners/openai",
"libs/partners/anthropic",
"libs/partners/fireworks",
"libs/partners/groq",
"libs/partners/mistralai",
"libs/partners/google-vertexai",
"libs/partners/google-genai",
"libs/partners/aws"
]
jobs:
compute-matrix:
if: github.repository_owner == 'langchain-ai' || github.event_name != 'schedule'
runs-on: ubuntu-latest
name: Compute matrix
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}
steps:
- name: Set matrix
id: set-matrix
env:
DEFAULT_LIBS: ${{ env.DEFAULT_LIBS }}
WORKING_DIRECTORY_FORCE: ${{ github.event.inputs.working-directory-force || '' }}
PYTHON_VERSION_FORCE: ${{ github.event.inputs.python-version-force || '' }}
run: |
# echo "matrix=..." where matrix is a json formatted str with keys python-version and working-directory
# python-version should default to 3.9 and 3.11, but is overridden to [PYTHON_VERSION_FORCE] if set
# working-directory should default to DEFAULT_LIBS, but is overridden to [WORKING_DIRECTORY_FORCE] if set
python_version='["3.9", "3.11"]'
working_directory="$DEFAULT_LIBS"
if [ -n "$PYTHON_VERSION_FORCE" ]; then
python_version="[\"$PYTHON_VERSION_FORCE\"]"
fi
if [ -n "$WORKING_DIRECTORY_FORCE" ]; then
working_directory="[\"$WORKING_DIRECTORY_FORCE\"]"
fi
matrix="{\"python-version\": $python_version, \"working-directory\": $working_directory}"
echo $matrix
echo "matrix=$matrix" >> $GITHUB_OUTPUT
build:
if: github.repository_owner == 'langchain-ai' || github.event_name != 'schedule'
name: Python ${{ matrix.python-version }} - ${{ matrix.working-directory }}
runs-on: ubuntu-latest
needs: [compute-matrix]
timeout-minutes: 20
strategy:
fail-fast: false
matrix:
python-version:
- "3.9"
- "3.11"
working-directory:
- "libs/partners/openai"
- "libs/partners/anthropic"
- "libs/partners/fireworks"
- "libs/partners/groq"
- "libs/partners/mistralai"
- "libs/partners/google-vertexai"
- "libs/partners/google-genai"
- "libs/partners/aws"
python-version: ${{ fromJSON(needs.compute-matrix.outputs.matrix).python-version }}
working-directory: ${{ fromJSON(needs.compute-matrix.outputs.matrix).working-directory }}

steps:
- uses: actions/checkout@v4
Expand Down
2 changes: 1 addition & 1 deletion docs/docs/integrations/chat/mistralai.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
"### Credentials\n",
"\n",
"\n",
"A valid [API key](https://console.mistral.ai/users/api-keys/) is needed to communicate with the API. Once you've done this set the MISTRAL_API_KEY environment variable:"
"A valid [API key](https://console.mistral.ai/api-keys/) is needed to communicate with the API. Once you've done this set the MISTRAL_API_KEY environment variable:"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion docs/docs/integrations/providers/pinecone.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ For a more detailed walkthrough of the Pinecone vectorstore, see [this notebook]
### Pinecone Hybrid Search

```bash
pip install pinecone-client pinecone-text
pip install pinecone pinecone-text
```

```python
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
"metadata": {},
"outputs": [],
"source": [
"%pip install --upgrade --quiet pinecone-client pinecone-text pinecone-notebooks"
"%pip install --upgrade --quiet pinecone pinecone-text pinecone-notebooks"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion docs/docs/tutorials/retrievers.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -245,7 +245,7 @@
"\n",
"import EmbeddingTabs from \"@theme/EmbeddingTabs\";\n",
"\n",
"<EmbeddingTabs customVarName=\"embeddings_model\" />"
"<EmbeddingTabs customVarName=\"embeddings\" />"
]
},
{
Expand Down
14 changes: 14 additions & 0 deletions docs/src/theme/ChatModelTabs.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ import CodeBlock from "@theme-original/CodeBlock";
* @property {string} [googleParams] - Parameters for Google chat model. Defaults to `model="gemini-pro"`
* @property {string} [togetherParams] - Parameters for Together chat model. Defaults to `model="mistralai/Mixtral-8x7B-Instruct-v0.1"`
* @property {string} [nvidiaParams] - Parameters for Nvidia NIM model. Defaults to `model="meta/llama3-70b-instruct"`
* @property {string} [databricksParams] - Parameters for Databricks model. Defaults to `endpoint="databricks-meta-llama-3-1-70b-instruct"`
* @property {string} [awsBedrockParams] - Parameters for AWS Bedrock chat model.
* @property {boolean} [hideOpenai] - Whether or not to hide OpenAI chat model.
* @property {boolean} [hideAnthropic] - Whether or not to hide Anthropic chat model.
Expand All @@ -27,6 +28,7 @@ import CodeBlock from "@theme-original/CodeBlock";
* @property {boolean} [hideAzure] - Whether or not to hide Microsoft Azure OpenAI chat model.
* @property {boolean} [hideNvidia] - Whether or not to hide NVIDIA NIM model.
* @property {boolean} [hideAWS] - Whether or not to hide AWS models.
* @property {boolean} [hideDatabricks] - Whether or not to hide Databricks models.
* @property {string} [customVarName] - Custom variable name for the model. Defaults to `model`.
*/

Expand All @@ -46,6 +48,7 @@ export default function ChatModelTabs(props) {
azureParams,
nvidiaParams,
awsBedrockParams,
databricksParams,
hideOpenai,
hideAnthropic,
hideCohere,
Expand All @@ -57,6 +60,7 @@ export default function ChatModelTabs(props) {
hideAzure,
hideNvidia,
hideAWS,
hideDatabricks,
customVarName,
} = props;

Expand All @@ -79,6 +83,7 @@ export default function ChatModelTabs(props) {
`\n azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],\n azure_deployment=os.environ["AZURE_OPENAI_DEPLOYMENT_NAME"],\n openai_api_version=os.environ["AZURE_OPENAI_API_VERSION"],\n`;
const nvidiaParamsOrDefault = nvidiaParams ?? `model="meta/llama3-70b-instruct"`
const awsBedrockParamsOrDefault = awsBedrockParams ?? `model="anthropic.claude-3-5-sonnet-20240620-v1:0",\n beta_use_converse_api=True`;
const databricksParamsOrDefault = databricksParams ?? `endpoint="databricks-meta-llama-3-1-70b-instruct"`

const llmVarName = customVarName ?? "model";

Expand Down Expand Up @@ -182,6 +187,15 @@ export default function ChatModelTabs(props) {
default: false,
shouldHide: hideTogether,
},
{
value: "Databricks",
label: "Databricks",
text: `from databricks_langchain import ChatDatabricks\n\nos.environ["DATABRICKS_HOST"] = "https://example.staging.cloud.databricks.com/serving-endpoints"\n\n${llmVarName} = ChatDatabricks(${databricksParamsOrDefault})`,
apiKeyName: "DATABRICKS_TOKEN",
packageName: "databricks-langchain",
default: false,
shouldHide: hideDatabricks,
},
];

return (
Expand Down
7 changes: 7 additions & 0 deletions libs/community/langchain_community/adapters/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,8 @@ def convert_dict_to_message(_dict: Mapping[str, Any]) -> BaseMessage:
additional_kwargs["function_call"] = dict(function_call)
if tool_calls := _dict.get("tool_calls"):
additional_kwargs["tool_calls"] = tool_calls
if context := _dict.get("context"):
additional_kwargs["context"] = context
return AIMessage(content=content, additional_kwargs=additional_kwargs)
elif role == "system":
return SystemMessage(content=_dict.get("content", ""))
Expand Down Expand Up @@ -135,6 +137,11 @@ def convert_message_to_dict(message: BaseMessage) -> dict:
# If tool calls only, content is None not empty string
if message_dict["content"] == "":
message_dict["content"] = None
if "context" in message.additional_kwargs:
message_dict["context"] = message.additional_kwargs["context"]
# If context only, content is None not empty string
if message_dict["content"] == "":
message_dict["content"] = None
elif isinstance(message, SystemMessage):
message_dict = {"role": "system", "content": message.content}
elif isinstance(message, FunctionMessage):
Expand Down
4 changes: 2 additions & 2 deletions libs/community/langchain_community/vectorstores/pinecone.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def _import_pinecone() -> Any:
except ImportError as e:
raise ImportError(
"Could not import pinecone python package. "
"Please install it with `pip install pinecone-client`."
"Please install it with `pip3 install pinecone`."
) from e
return pinecone

Expand All @@ -48,7 +48,7 @@ def _is_pinecone_v3() -> bool:
class Pinecone(VectorStore):
"""`Pinecone` vector store.
To use, you should have the ``pinecone-client`` python package installed.
To use, you should have the ``pinecone`` python package installed.
This version of Pinecone is deprecated. Please use `langchain_pinecone.Pinecone`
instead.
Expand Down
Loading

0 comments on commit fcbca18

Please sign in to comment.