Skip to content

Commit

Permalink
Add ollama model option #255
Browse files Browse the repository at this point in the history
- fix docker compose file (user can change ollama conatainer port)
- fix readme docs(add --env-file option)
- add usable model
  • Loading branch information
sihyeong671 committed Aug 18, 2024
1 parent 2dadca1 commit 0d2d77a
Show file tree
Hide file tree
Showing 6 changed files with 23 additions and 8 deletions.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ We offer two options. Choose the one that best suits your needs:
2. Run `./backend/docker/docker-compose-full.yml`.

```bash
docker-compose -f ./backend/docker/docker-compose-full.yml up -d
docker-compose -f ./backend/docker/docker-compose-full.yml --env-file ./backend/.env.development up -d
```

3. Run the Frontend application:
Expand Down Expand Up @@ -88,7 +88,7 @@ We offer two options. Choose the one that best suits your needs:
2. Run `./backend/docker/docker-compose.yml`.

```bash
docker-compose -f ./backend/docker/docker-compose.yml up -d
docker-compose -f ./backend/docker/docker-compose.yml --env-file ./backend/.env.development up -d
```

3. Run the Backend application:
Expand Down
8 changes: 6 additions & 2 deletions backend/.env.development
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,10 @@ DATABASE_URL=mongodb://localhost:27017/codepair

# GITHUB_CLIENT_ID: Client ID for authenticating with GitHub.
# To obtain a client ID, create an OAuth app at: https://docs.github.com/en/apps/oauth-apps/building-oauth-apps/creating-an-oauth-app
GITHUB_CLIENT_ID=""
GITHUB_CLIENT_ID=your_github_client_id_here
# GITHUB_CLIENT_SECRET: Client secret for authenticating with GitHub.
# To obtain a client ID, create an OAuth app at: https://docs.github.com/en/apps/oauth-apps/building-oauth-apps/creating-an-oauth-app
GITHUB_CLIENT_SECRET=""
GITHUB_CLIENT_SECRET=your_github_client_secret_here
# GITHUB_CALLBACK_URL: Callback URL for handling GitHub authentication response.
# Format: https://<backend_url>/auth/login/github
# Example: http://localhost:3000/auth/login/github (For development mode)
Expand Down Expand Up @@ -41,6 +41,10 @@ YORKIE_PROJECT_SECRET_KEY=""
# you can find llm model in https://ollama.com/library
# If set to false, OPENAI_API_KEY is not required.
YORKIE_INTELLIGENCE="gemma2:2b"

# OLLAMA_HOST_PORT: yorkie-intelligence conatiner port
OLLAMA_HOST_PORT=11434

# OPENAI_API_KEY: API key for using the gpt-3.5-turbo model by Yorkie Intelligence.
# To obtain an API key, visit OpenAI: https://help.openai.com/en/articles/4936850-where-do-i-find-my-api-key
OPENAI_API_KEY=your_openai_api_key_here
Expand Down
2 changes: 1 addition & 1 deletion backend/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ This project is the backend part of the CodePair service developed using NestJS.
3. Run `.backend/docker/docker-compose.yml`.

```bash
docker-compose -f ./backend/docker/docker-compose.yml up -d
docker-compose -f ./backend/docker/docker-compose.yml --env-file ./backend/.env.development up -d
```

4. Run the Backend application:
Expand Down
2 changes: 1 addition & 1 deletion backend/docker/docker-compose-full.yml
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ services:
image: "ollama/ollama:latest"
restart: always
ports:
- "11434:11434"
- ${OLLAMA_HOST_PORT}:11434

mongo:
build:
Expand Down
2 changes: 1 addition & 1 deletion backend/docker/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ services:
image: "ollama/ollama:latest"
restart: always
ports:
- "11434:11434"
- ${OLLAMA_HOST_PORT}:11434

mongo:
build:
Expand Down
13 changes: 12 additions & 1 deletion backend/src/langchain/langchain.module.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,21 @@ import { BaseChatModel } from "@langchain/core/language_models/chat_models";
const chatModelFactory = {
provide: "ChatModel",
useFactory: () => {
let modelList: string[] = [
"lamma3.1",
"gemma2",
"gemma2:2b",
"phi3",
"mistral",
"neural-chat",
"starling-lm",
"solar",
];
const modelType = process.env.YORKIE_INTELLIGENCE;
if (modelType === "gemma2:2b") {
if (modelType in modelList) {
return new ChatOllama({
model: modelType,
baseUrl: process.env.OLLAMA_HOST_PORT,
checkOrPullModel: true,
streaming: true,
});
Expand Down

0 comments on commit 0d2d77a

Please sign in to comment.