Skip to content

Commit

Permalink
Merge branch 'main' into brace/tools-return-tool-message
Browse files Browse the repository at this point in the history
  • Loading branch information
bracesproul authored Jul 11, 2024
2 parents eca01c1 + 2eb699d commit 204a4f5
Show file tree
Hide file tree
Showing 122 changed files with 2,710 additions and 637 deletions.
6 changes: 5 additions & 1 deletion .github/workflows/standard-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ jobs:
GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }}
GROQ_API_KEY: ${{ secrets.GROQ_API_KEY }}
MISTRAL_API_KEY: ${{ secrets.MISTRAL_API_KEY }}
DISABLE_CONSOLE_LOGS: "true"

# The `@langchain/openai` package contains standard tests for ChatOpenAI and AzureChatOpenAI
# We want to run these separately, so we need to pass the exact path for each test, which means
Expand All @@ -50,6 +51,7 @@ jobs:
run: yarn workspace @langchain/openai test:single src/tests/chat_models_structured_output.int.test.ts src/tests/chat_models-extended.int.test.ts src/tests/chat_models-vision.int.test.ts src/tests/chat_models.int.test.ts src/tests/chat_models.standard.int.test.ts
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
DISABLE_CONSOLE_LOGS: "true"

standard-tests-azure-openai:
runs-on: ubuntu-latest
Expand All @@ -71,6 +73,7 @@ jobs:
AZURE_OPENAI_API_DEPLOYMENT_NAME: "chat"
AZURE_OPENAI_API_VERSION: ${{ secrets.AZURE_OPENAI_API_VERSION }}
AZURE_OPENAI_BASE_PATH: ${{ secrets.AZURE_OPENAI_BASE_PATH }}
DISABLE_CONSOLE_LOGS: "true"

standard-tests-bedrock:
runs-on: ubuntu-latest
Expand All @@ -90,4 +93,5 @@ jobs:
env:
BEDROCK_AWS_REGION: "us-east-1"
BEDROCK_AWS_SECRET_ACCESS_KEY: ${{ secrets.BEDROCK_AWS_SECRET_ACCESS_KEY }}
BEDROCK_AWS_ACCESS_KEY_ID: ${{ secrets.BEDROCK_AWS_ACCESS_KEY_ID }}
BEDROCK_AWS_ACCESS_KEY_ID: ${{ secrets.BEDROCK_AWS_ACCESS_KEY_ID }}
DISABLE_CONSOLE_LOGS: "true"
9 changes: 6 additions & 3 deletions .github/workflows/unit-tests-integrations.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,7 @@ on:
paths:
- 'langchain-core/**'
- 'libs/**/**'
workflow_dispatch: # Allows triggering the workflow manually in GitHub UI

workflow_dispatch: # Allows triggering the workflow manually in GitHub UI

# If another push to the same PR or branch happens while this workflow is still running,
# cancel the earlier run in favor of the next run.
Expand Down Expand Up @@ -48,6 +47,7 @@ jobs:
PACKAGES: "anthropic,azure-openai,cloudflare,cohere,core,community,exa,google-common,google-gauth,google-genai,google-vertexai,google-vertexai-web,google-webauth,groq,mistralai,mongo,nomic,openai,pinecone,qdrant,redis,textsplitters,weaviate,yandex,baidu-qianfan"
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}
matrix_length: ${{ steps.set-matrix.outputs.matrix_length }}
steps:
- id: set-matrix
run: |
Expand All @@ -71,14 +71,17 @@ jobs:
fi
done
matrix="$matrix]}"
matrix_length=$(echo $matrix | jq '.include | length')
echo "Matrix: $matrix"
echo "Matrix length: $matrix_length"
echo "matrix_length=$matrix_length" >> $GITHUB_OUTPUT
echo "matrix=$matrix" >> $GITHUB_OUTPUT
unit-tests:
name: Unit Tests
needs: prepare-matrix
# Only run this job if there are packages to test
if: ${{ fromJson(needs.prepare-matrix.outputs.matrix).include.length != 0 }}
if: needs.prepare-matrix.outputs.matrix_length > 0
runs-on: ubuntu-latest
strategy:
matrix: ${{ fromJson(needs.prepare-matrix.outputs.matrix) }}
Expand Down
16 changes: 8 additions & 8 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,16 +37,15 @@ To make creating packages like this easier, we offer the [`create-langchain-inte
$ npx create-langchain-integration
```

After creating the new integration package, you should add it to the [`unit-tests-integrations.yml`](./.github/workflows/unit-tests-integrations.yml) GitHub action workflow so that it is tested in CI. To do this, simply add the integration name inside the `jobs.unit-tests.strategy.matrix.package` array:
After creating the new integration package, you should add it to the [`unit-tests-integrations.yml`](./.github/workflows/unit-tests-integrations.yml) GitHub action workflow so that it is tested in CI. To do this,simply update the `env` section of the `prepare-matrix` job with your package name inside the `PACKAGES` variable:

```yaml
jobs:
unit-tests:
name: Unit Tests
strategy:
matrix:
package: [anthropic, azure-openai, cloudflare, <your package name>]
...
prepare-matrix:
needs: get-changed-files
runs-on: ubuntu-latest
env:
PACKAGES: "anthropic,azure-openai,cloudflare,<your-package>"
...
```

### Want to add a feature that's already in Python?
Expand Down Expand Up @@ -159,6 +158,7 @@ cd libs/langchain-community
```

### Setup

**Prerequisite**: Node version 18+ is required. Please check node version `node -v` and update it if required.

To get started, you will need to install the dependencies for the project. To do so, run:
Expand Down
2 changes: 1 addition & 1 deletion docs/core_docs/docs/concepts.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -434,7 +434,7 @@ An essential component of a conversation is being able to refer to information i
At bare minimum, a conversational system should be able to access some window of past messages directly.

The concept of `ChatHistory` refers to a class in LangChain which can be used to wrap an arbitrary chain.
This `ChatHistory` will keep track of inputs and outputs of the underlying chain, and append them as messages to a message database
This `ChatHistory` will keep track of inputs and outputs of the underlying chain, and append them as messages to a message database.
Future interactions will then load those messages and pass them into the chain as part of the input.

### Document
Expand Down
13 changes: 8 additions & 5 deletions docs/core_docs/docs/integrations/chat/cohere.mdx
Original file line number Diff line number Diff line change
@@ -1,16 +1,11 @@
---
sidebar_label: Cohere
sidebar_class_name: beta
---

import CodeBlock from "@theme/CodeBlock";

# ChatCohere

:::info
The Cohere Chat API is still in beta. This means Cohere may make breaking changes at any time.
:::

## Setup

In order to use the LangChain.js Cohere integration you'll need an API key.
Expand Down Expand Up @@ -48,6 +43,14 @@ import ChatStreamExample from "@examples/models/chat/cohere/chat_stream_cohere.t
You can see a LangSmith trace of this example [here](https://smith.langchain.com/public/36ae0564-b096-4ec1-9318-1f82fe705fe8/r)
:::

### Tools

The Cohere API supports tool calling, along with multi-hop-tool calling. The following example demonstrates how to call tools:

import ToolCallingExample from "@examples/models/chat/cohere/tool_calling.ts";

<CodeBlock language="typescript">{ToolCallingExample}</CodeBlock>

### Stateful conversation API

Cohere's chat API supports stateful conversations.
Expand Down
2 changes: 1 addition & 1 deletion docs/core_docs/docs/integrations/chat/index.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ The table shows, for each integration, which features have been implemented with
| ChatAnthropic ||||||
| ChatBaiduWenxin ||||||
| ChatCloudflareWorkersAI ||||||
| ChatCohere ||| | ||
| ChatCohere ||| | ||
| ChatFireworks ||||||
| ChatGoogleGenerativeAI ||||||
| ChatVertexAI ||||||
Expand Down
2 changes: 1 addition & 1 deletion docs/core_docs/docs/integrations/vectorstores/redis.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ import IntegrationInstallTooltip from "@mdx_components/integration_install_toolt
<IntegrationInstallTooltip></IntegrationInstallTooltip>

```bash npm2yarn
npm install @langchain/openai @langchain/community
npm install @langchain/openai @langchain/core @langchain/redis langchain
```

## Index docs
Expand Down
2 changes: 1 addition & 1 deletion docs/core_docs/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@
},
"devDependencies": {
"@babel/eslint-parser": "^7.18.2",
"@langchain/langgraph": "latest",
"@langchain/langgraph": "0.0.26",
"@langchain/scripts": "workspace:*",
"@swc/core": "^1.3.62",
"@types/cookie": "^0",
Expand Down
2 changes: 1 addition & 1 deletion examples/src/document_compressors/cohere_rerank.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ const docs = [

const cohereRerank = new CohereRerank({
apiKey: process.env.COHERE_API_KEY, // Default
model: "rerank-english-v2.0", // Default
model: "rerank-english-v2.0",
});

const rerankedDocuments = await cohereRerank.rerank(docs, query, {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ const docs = [
const cohereRerank = new CohereRerank({
apiKey: process.env.COHERE_API_KEY, // Default
topN: 3, // Default
model: "rerank-english-v2.0", // Default
model: "rerank-english-v2.0",
});

const rerankedDocuments = await cohereRerank.compressDocuments(docs, query);
Expand Down
2 changes: 1 addition & 1 deletion examples/src/embeddings/cohere.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { CohereEmbeddings } from "@langchain/cohere";

export const run = async () => {
const model = new CohereEmbeddings();
const model = new CohereEmbeddings({ model: "embed-english-v3.0" });
const res = await model.embedQuery(
"What would be a good company name a company that makes colorful socks?"
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ import { MemoryVectorStore } from "langchain/vectorstores/memory";
const model = new ChatAnthropic();
const vectorstore = await MemoryVectorStore.fromDocuments(
[{ pageContent: "mitochondria is the powerhouse of the cell", metadata: {} }],
new CohereEmbeddings()
new CohereEmbeddings({ model: "embed-english-v3.0" })
);
const retriever = vectorstore.asRetriever();
const template = `Answer the question based only on the following context:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ const collection = client.db(dbName).collection(collectionName);
const vectorstore = await MongoDBAtlasVectorSearch.fromTexts(
["Hello world", "Bye bye", "What's this?"],
[{ id: 2 }, { id: 1 }, { id: 3 }],
new CohereEmbeddings(),
new CohereEmbeddings({ model: "embed-english-v3.0" }),
{
collection,
indexName: "default", // The name of the Atlas search index. Defaults to "default"
Expand Down
15 changes: 9 additions & 6 deletions examples/src/indexes/vector_stores/mongodb_atlas_search.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,15 @@ const namespace = "langchain.test";
const [dbName, collectionName] = namespace.split(".");
const collection = client.db(dbName).collection(collectionName);

const vectorStore = new MongoDBAtlasVectorSearch(new CohereEmbeddings(), {
collection,
indexName: "default", // The name of the Atlas search index. Defaults to "default"
textKey: "text", // The name of the collection field containing the raw content. Defaults to "text"
embeddingKey: "embedding", // The name of the collection field containing the embedded text. Defaults to "embedding"
});
const vectorStore = new MongoDBAtlasVectorSearch(
new CohereEmbeddings({ model: "embed-english-v3.0" }),
{
collection,
indexName: "default", // The name of the Atlas search index. Defaults to "default"
textKey: "text", // The name of the collection field containing the raw content. Defaults to "text"
embeddingKey: "embedding", // The name of the collection field containing the embedded text. Defaults to "embedding"
}
);

const resultOne = await vectorStore.similaritySearch("Hello world", 1);
console.log(resultOne);
Expand Down
15 changes: 9 additions & 6 deletions examples/src/indexes/vector_stores/mongodb_metadata_filtering.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,15 @@ const namespace = "langchain.test";
const [dbName, collectionName] = namespace.split(".");
const collection = client.db(dbName).collection(collectionName);

const vectorStore = new MongoDBAtlasVectorSearch(new CohereEmbeddings(), {
collection,
indexName: "default", // The name of the Atlas search index. Defaults to "default"
textKey: "text", // The name of the collection field containing the raw content. Defaults to "text"
embeddingKey: "embedding", // The name of the collection field containing the embedded text. Defaults to "embedding"
});
const vectorStore = new MongoDBAtlasVectorSearch(
new CohereEmbeddings({ model: "embed-english-v3.0" }),
{
collection,
indexName: "default", // The name of the Atlas search index. Defaults to "default"
textKey: "text", // The name of the collection field containing the raw content. Defaults to "text"
embeddingKey: "embedding", // The name of the collection field containing the embedded text. Defaults to "embedding"
}
);

await vectorStore.addDocuments([
{
Expand Down
15 changes: 9 additions & 6 deletions examples/src/indexes/vector_stores/mongodb_mmr.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,15 @@ const namespace = "langchain.test";
const [dbName, collectionName] = namespace.split(".");
const collection = client.db(dbName).collection(collectionName);

const vectorStore = new MongoDBAtlasVectorSearch(new CohereEmbeddings(), {
collection,
indexName: "default", // The name of the Atlas search index. Defaults to "default"
textKey: "text", // The name of the collection field containing the raw content. Defaults to "text"
embeddingKey: "embedding", // The name of the collection field containing the embedded text. Defaults to "embedding"
});
const vectorStore = new MongoDBAtlasVectorSearch(
new CohereEmbeddings({ model: "embed-english-v3.0" }),
{
collection,
indexName: "default", // The name of the Atlas search index. Defaults to "default"
textKey: "text", // The name of the collection field containing the raw content. Defaults to "text"
embeddingKey: "embedding", // The name of the collection field containing the embedded text. Defaults to "embedding"
}
);

const resultOne = await vectorStore.maxMarginalRelevanceSearch("Hello world", {
k: 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ const config = {
};

const vercelPostgresStore = await VercelPostgres.initialize(
new CohereEmbeddings(),
new CohereEmbeddings({ model: "embed-english-v3.0" }),
config
);

Expand Down
45 changes: 23 additions & 22 deletions examples/src/models/chat/cohere/chat_cohere.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ import { ChatPromptTemplate } from "@langchain/core/prompts";

const model = new ChatCohere({
apiKey: process.env.COHERE_API_KEY, // Default
model: "command", // Default
});
const prompt = ChatPromptTemplate.fromMessages([
["ai", "You are a helpful assistant"],
Expand All @@ -14,26 +13,28 @@ const response = await chain.invoke({
input: "Hello there friend!",
});
console.log("response", response);
/**
response AIMessage {
lc_serializable: true,
lc_namespace: [ 'langchain_core', 'messages' ],
content: "Hi there! I'm not your friend, but I'm happy to help you in whatever way I can today. How are you doing? Is there anything I can assist you with? I am an AI chatbot capable of generating thorough responses, and I'm designed to have helpful, inclusive conversations with users. \n" +
'\n' +
"If you have any questions, feel free to ask away, and I'll do my best to provide you with helpful responses. \n" +
'\n' +
'Would you like me to help you with anything in particular right now?',
additional_kwargs: {
response_id: 'c6baa057-ef94-4bb0-9c25-3a424963a074',
generationId: 'd824fcdc-b922-4ae6-8d45-7b65a21cdd6a',
token_count: {
prompt_tokens: 66,
response_tokens: 104,
total_tokens: 170,
billed_tokens: 159
/*
response AIMessage {
content: 'Hello there! How can I help you today?',
name: undefined,
additional_kwargs: {
response_id: '51ff9e7e-7419-43db-a8e6-17db54805695',
generationId: 'f9b507f5-5296-40c5-834c-b1c09e24a0f6',
chatHistory: [ [Object], [Object], [Object] ],
finishReason: 'COMPLETE',
meta: { apiVersion: [Object], billedUnits: [Object], tokens: [Object] }
},
meta: { api_version: [Object], billed_units: [Object] },
tool_inputs: null
response_metadata: {
estimatedTokenUsage: { completionTokens: 10, promptTokens: 78, totalTokens: 88 },
response_id: '51ff9e7e-7419-43db-a8e6-17db54805695',
generationId: 'f9b507f5-5296-40c5-834c-b1c09e24a0f6',
chatHistory: [ [Object], [Object], [Object] ],
finishReason: 'COMPLETE',
meta: { apiVersion: [Object], billedUnits: [Object], tokens: [Object] }
},
id: undefined,
tool_calls: [],
invalid_tool_calls: [],
usage_metadata: { input_tokens: 78, output_tokens: 10, total_tokens: 88 }
}
}
*/
*/
26 changes: 13 additions & 13 deletions examples/src/models/chat/cohere/chat_stream_cohere.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ import { StringOutputParser } from "@langchain/core/output_parsers";

const model = new ChatCohere({
apiKey: process.env.COHERE_API_KEY, // Default
model: "command", // Default
});
const prompt = ChatPromptTemplate.fromMessages([
["ai", "You are a helpful assistant"],
Expand All @@ -23,15 +22,16 @@ for await (const item of response) {
}
console.log("stream tokens:", streamTokens);
console.log("stream iters:", streamIters);
/**
stream item:
stream item: Hello! I'm here to help answer any questions you
stream item: might have or assist you with any task you'd like to
stream item: accomplish. I can provide information
stream item: on a wide range of topics
stream item: , from math and science to history and literature. I can
stream item: also help you manage your schedule, set reminders, and
stream item: much more. Is there something specific you need help with? Let
stream item: me know!
stream item:
*/

/*
stream item:
stream item: Hello! I'm here to help answer any questions you
stream item: might have or assist you with any task you'd like to
stream item: accomplish. I can provide information
stream item: on a wide range of topics
stream item: , from math and science to history and literature. I can
stream item: also help you manage your schedule, set reminders, and
stream item: much more. Is there something specific you need help with? Let
stream item: me know!
stream item:
*/
Loading

0 comments on commit 204a4f5

Please sign in to comment.