Skip to content

Commit

Permalink
community[major]: DeepInfra llm and chat (#5672)
Browse files Browse the repository at this point in the history
* Init

* fix(type errors)

* feat(deepinfra embeddings)

* fix(default model)

* fix(deepinfra): axios is removed

* ref(deepinfra): remove redundant cast

* format(deepinfra)

* doc(deepinfra)

* doc(deepinfra)

* Update deepinfra.mdx

* Format

* feat(deepinfra): implement llm and chat.

* ref(deepinfra): lint and prettier

* ref(deepinfra): remove console.log

* fix(chatdeepinfra): body

* fix(import map): deepinfra

* fix(gitignore)

* revert(.gitignore)

* revert(.gitignore)

* Adds docs

---------

Co-authored-by: Jacob Lee <[email protected]>
  • Loading branch information
ovuruska and jacoblee93 authored Jun 11, 2024
1 parent e177b2f commit de3e618
Show file tree
Hide file tree
Showing 13 changed files with 435 additions and 1 deletion.
2 changes: 1 addition & 1 deletion docs/core_docs/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -176,4 +176,4 @@ docs/how_to/assign.mdx
docs/how_to/agent_executor.md
docs/how_to/agent_executor.mdx
docs/integrations/llms/mistral.md
docs/integrations/llms/mistral.mdx
docs/integrations/llms/mistral.mdx
25 changes: 25 additions & 0 deletions docs/core_docs/docs/integrations/chat/deep_infra.mdx
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
---
sidebar_label: Deep Infra
---

import CodeBlock from "@theme/CodeBlock";

# ChatDeepInfra

LangChain supports chat models hosted by [Deep Infra](https://deepinfra.com/) through the `ChatDeepInfra` wrapper.
First, you'll need to install the `@langchain/community` package:

import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";

<IntegrationInstallTooltip></IntegrationInstallTooltip>

```bash npm2yarn
npm install @langchain/community
```

You'll need to obtain an API key and set it as an environment variable named `DEEPINFRA_API_TOKEN`
(or pass it into the constructor), then call the model as shown below:

import Example from "@examples/models/chat/integration_deepinfra.ts";

<CodeBlock language="typescript">{Example}</CodeBlock>
25 changes: 25 additions & 0 deletions docs/core_docs/docs/integrations/llms/deep_infra.mdx
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
---
sidebar_label: Deep Infra
---

import CodeBlock from "@theme/CodeBlock";

# DeepInfra

LangChain supports LLMs hosted by [Deep Infra](https://deepinfra.com/) through the `DeepInfra` wrapper.
First, you'll need to install the `@langchain/community` package:

import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";

<IntegrationInstallTooltip></IntegrationInstallTooltip>

```bash npm2yarn
npm install @langchain/community
```

You'll need to obtain an API key and set it as an environment variable named `DEEPINFRA_API_TOKEN`
(or pass it into the constructor), then call the model as shown below:

import Example from "@examples/models/llm/deepinfra.ts";

<CodeBlock language="typescript">{Example}</CodeBlock>
17 changes: 17 additions & 0 deletions examples/src/models/chat/integration_deepinfra.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import { ChatDeepInfra } from "@langchain/community/chat_models/deepinfra";
import { HumanMessage } from "@langchain/core/messages";

const apiKey = process.env.DEEPINFRA_API_TOKEN;

const model = "meta-llama/Meta-Llama-3-70B-Instruct";

const chat = new ChatDeepInfra({
model,
apiKey,
});

const messages = [new HumanMessage("Hello")];

const res = await chat.invoke(messages);

console.log(res);
18 changes: 18 additions & 0 deletions examples/src/models/llm/deepinfra.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import { DeepInfraLLM } from "@langchain/community/llms/deepinfra";

const apiKey = process.env.DEEPINFRA_API_TOKEN;
const model = "meta-llama/Meta-Llama-3-70B-Instruct";

const llm = new DeepInfraLLM({
temperature: 0.7,
maxTokens: 20,
model,
apiKey,
maxRetries: 5,
});

const res = await llm.invoke(
"What is the next step in the process of making a good game?"
);

console.log({ res });
8 changes: 8 additions & 0 deletions libs/langchain-community/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -234,6 +234,10 @@ llms/cohere.cjs
llms/cohere.js
llms/cohere.d.ts
llms/cohere.d.cts
llms/deepinfra.cjs
llms/deepinfra.js
llms/deepinfra.d.ts
llms/deepinfra.d.cts
llms/fireworks.cjs
llms/fireworks.js
llms/fireworks.d.ts
Expand Down Expand Up @@ -510,6 +514,10 @@ chat_models/cloudflare_workersai.cjs
chat_models/cloudflare_workersai.js
chat_models/cloudflare_workersai.d.ts
chat_models/cloudflare_workersai.d.cts
chat_models/deepinfra.cjs
chat_models/deepinfra.js
chat_models/deepinfra.d.ts
chat_models/deepinfra.d.cts
chat_models/fireworks.cjs
chat_models/fireworks.js
chat_models/fireworks.d.ts
Expand Down
2 changes: 2 additions & 0 deletions libs/langchain-community/langchain.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,7 @@ export const config = {
"llms/bedrock/web": "llms/bedrock/web",
"llms/cloudflare_workersai": "llms/cloudflare_workersai",
"llms/cohere": "llms/cohere",
"llms/deepinfra": "llms/deepinfra",
"llms/fireworks": "llms/fireworks",
"llms/friendli": "llms/friendli",
"llms/googlepalm": "llms/googlepalm",
Expand Down Expand Up @@ -164,6 +165,7 @@ export const config = {
"chat_models/bedrock": "chat_models/bedrock/index",
"chat_models/bedrock/web": "chat_models/bedrock/web",
"chat_models/cloudflare_workersai": "chat_models/cloudflare_workersai",
"chat_models/deepinfra": "chat_models/deepinfra",
"chat_models/fireworks": "chat_models/fireworks",
"chat_models/friendli": "chat_models/friendli",
"chat_models/googlevertexai": "chat_models/googlevertexai/index",
Expand Down
26 changes: 26 additions & 0 deletions libs/langchain-community/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -1231,6 +1231,15 @@
"import": "./llms/cohere.js",
"require": "./llms/cohere.cjs"
},
"./llms/deepinfra": {
"types": {
"import": "./llms/deepinfra.d.ts",
"require": "./llms/deepinfra.d.cts",
"default": "./llms/deepinfra.d.ts"
},
"import": "./llms/deepinfra.js",
"require": "./llms/deepinfra.cjs"
},
"./llms/fireworks": {
"types": {
"import": "./llms/fireworks.d.ts",
Expand Down Expand Up @@ -1852,6 +1861,15 @@
"import": "./chat_models/cloudflare_workersai.js",
"require": "./chat_models/cloudflare_workersai.cjs"
},
"./chat_models/deepinfra": {
"types": {
"import": "./chat_models/deepinfra.d.ts",
"require": "./chat_models/deepinfra.d.cts",
"default": "./chat_models/deepinfra.d.ts"
},
"import": "./chat_models/deepinfra.js",
"require": "./chat_models/deepinfra.cjs"
},
"./chat_models/fireworks": {
"types": {
"import": "./chat_models/fireworks.d.ts",
Expand Down Expand Up @@ -3235,6 +3253,10 @@
"llms/cohere.js",
"llms/cohere.d.ts",
"llms/cohere.d.cts",
"llms/deepinfra.cjs",
"llms/deepinfra.js",
"llms/deepinfra.d.ts",
"llms/deepinfra.d.cts",
"llms/fireworks.cjs",
"llms/fireworks.js",
"llms/fireworks.d.ts",
Expand Down Expand Up @@ -3511,6 +3533,10 @@
"chat_models/cloudflare_workersai.js",
"chat_models/cloudflare_workersai.d.ts",
"chat_models/cloudflare_workersai.d.cts",
"chat_models/deepinfra.cjs",
"chat_models/deepinfra.js",
"chat_models/deepinfra.d.ts",
"chat_models/deepinfra.d.cts",
"chat_models/fireworks.cjs",
"chat_models/fireworks.js",
"chat_models/fireworks.d.ts",
Expand Down
Loading

0 comments on commit de3e618

Please sign in to comment.