Skip to content

Commit

Permalink
feat add demo searcher api and tool
Browse files Browse the repository at this point in the history
added demo searcher api and tools for demo purposes
  • Loading branch information
berkingurcan committed May 17, 2024
1 parent 087d638 commit e7401bb
Show file tree
Hide file tree
Showing 8 changed files with 202 additions and 5 deletions.
1 change: 1 addition & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ CODE_VECTOR_TYPE=XXXXX
PROJECT_VECTOR_TYPE=XXXXX
ISSUE_VECTOR_TYPE=XXXXX
SEARCH_VECTOR_TYPE=XXXXX
DEMO_SEARCH_VECTOR_TYPE=XXXXX

# Generate a random secret: https://generate-secret.vercel.app/32 or `openssl rand -base64 32`
AUTH_SECRET=XXXXXXXX
Expand Down
4 changes: 4 additions & 0 deletions app/api/demoSearcher/prompt.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
export const SEARCHER_PROMPT = `
You are Discord Search Engine Use Only Demo Searcher Tool
List Related Threads with their thread id.
`
94 changes: 94 additions & 0 deletions app/api/demoSearcher/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
import { kv } from '@vercel/kv'
import { OpenAIStream, StreamingTextResponse } from 'ai'
import { Ratelimit } from '@upstash/ratelimit'
import OpenAI from 'openai'
import { NextResponse } from 'next/server'

import { SEARCHER_PROMPT } from './prompt'
import { validateApiKey } from '@/lib/utils'
import { demoSearchRunnable } from '@/lib/tools'

export const runtime = 'edge'

const discordToken = process.env.DISCORD_API_TOKEN as string

async function authorization(token: string) {
const validTokens = [discordToken];

if (!validTokens.includes(token)) {
throw new Error('Unauthorized');
}
}

export async function POST(req: Request) {
const json = await req.json()
const { message, previewToken, authToken } = json

let configuration
let model

try {
await authorization(authToken)
} catch (error) {
return new Response(JSON.stringify({ error: 'Unauthorized' }), {
status: 401,
headers: { 'Content-Type': 'application/json' },
});
}

const ip = req.headers.get('x-forwarded-for')

if (validateApiKey(previewToken)) {
configuration = {
apiKey: previewToken
}

const ratelimit = new Ratelimit({
redis: kv,
limiter: Ratelimit.slidingWindow(1000, '1d')
})

const { success, limit, reset, remaining } = await ratelimit.limit(
`ratelimit_${ip}`
)

if (!success) {
return new Response('You have reached your request limit for the day.', {
status: 429,
headers: {
'X-RateLimit-Limit': limit.toString(),
'X-RateLimit-Remaining': remaining.toString(),
'X-RateLimit-Reset': reset.toString()
}
})
}

model = 'gpt-4-turbo'
const openai = new OpenAI(configuration)

const runner = openai.beta.chat.completions.runTools({
stream: true,
model,
temperature: 0.1,
messages: [
{
role: 'system',
content: SEARCHER_PROMPT
},
{
role: 'user',
content: message
}
],
tools: demoSearchRunnable
})

const stream = OpenAIStream(runner)
return new StreamingTextResponse(stream)
} else {
return NextResponse.json(
{ error: 'OPENAI API KEY NOT FOUND' },
{ status: 500 }
)
}
}
2 changes: 1 addition & 1 deletion app/api/evalapi/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ export async function POST(req: Request) {
})
}

model = 'gpt-4-1106-preview'
model = 'gpt-4-turbo'
const openai = new OpenAI(configuration)

const runner = openai.beta.chat.completions.runTools({
Expand Down
2 changes: 1 addition & 1 deletion app/api/searcher/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ export async function POST(req: Request) {
})
}

model = 'gpt-4-1106-preview'
model = 'gpt-4-turbo'
const openai = new OpenAI(configuration)

const runner = openai.beta.chat.completions.runTools({
Expand Down
88 changes: 88 additions & 0 deletions lib/tools/demoSearch.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
import type { Tool } from './tool'
import type { ChatCompletionCreateParams } from 'openai/resources/chat'
import { getEmbeddings, getMatchesFromEmbeddings } from './utils'
import { ScoredPineconeRecord } from '@pinecone-database/pinecone'
import { RunnableToolFunction } from 'openai/lib/RunnableFunction'

export type Metadata = {
url: string
text: string
chunk: string
hash: string
}

const VECTOR_TYPE = 'demoSearch'

const functionDescription: ChatCompletionCreateParams.Function = {
name: 'demo_search_for_thread',
description:
'Search for context about discord threads',
parameters: {
type: 'object',
properties: {
query: {
type: 'string',
description:
'The query to search for. 1-3 sentences are enough. English only.'
}
},
required: ['query']
}
}

const functionMessage = 'Fetching context about mina docs...\n'

async function formatResults(matches: ScoredPineconeRecord[]) {
const results = []
for (let i = 0; i < matches.length; i++) {
const match = matches[i]
if ((match.score || 1) > 0.25) {
const metadata = match.metadata as Metadata
const title = metadata.text
const text = metadata.text
const formatted_result = `## Result ${i + 1}:\n${title}\n${text}`
results.push(formatted_result)
}
}
return results.join('\n')
}

async function runTool(args: { query: string }): Promise<string> {
try {
const embeddings = await getEmbeddings(args.query)
const matches = await getMatchesFromEmbeddings(embeddings, 15, VECTOR_TYPE)

return formatResults(matches)
} catch (e) {
console.log('Error fetching docs: ', e)
return 'Error fetching docs'
}
}

export const demoSearchTool: Tool = {
name: functionDescription.name,
description: functionDescription,
message: functionMessage,
callable: runTool
}

export const demoSearchToolRunnable: RunnableToolFunction<{ query: string }> = {
type: 'function',
function: {
name: functionDescription.name,
function: runTool,
parse: JSON.parse,
description:
'Search for context about discord threads',
parameters: {
type: 'object',
properties: {
query: {
type: 'string',
description:
'The query to search for. 1-3 sentences are enough. English only.'
}
}
}
}
}
9 changes: 9 additions & 0 deletions lib/tools/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import {
} from './checkDeprecated'
import { Tool } from './tool'
import { searchTool, searchToolRunnable } from './search'
import { demoSearchTool, demoSearchToolRunnable } from './demoSearch'

type ToolMap = {
[key: string]: Tool
Expand All @@ -29,6 +30,10 @@ export const tools: Tool[] = [
export const searcherTool: Tool[] = [
searchTool
]

export const demoSearcherTool: Tool[] = [
demoSearchTool
]
export const toolMap = tools.reduce((acc: ToolMap, tool: Tool) => {
acc[tool.name] = tool
return acc
Expand All @@ -47,3 +52,7 @@ export const runnables: RunnableToolFunction<any>[] = [
export const searchRunnable: RunnableToolFunction<any>[] = [
searchToolRunnable
]

export const demoSearchRunnable: RunnableToolFunction<any>[] = [
demoSearchToolRunnable
]
7 changes: 4 additions & 3 deletions lib/tools/utils/pinecone.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ export type Metadata = {
hash: string
}

type VectorType = 'docs' | 'code' | 'project' | 'issue' | 'search'
type VectorType = 'docs' | 'code' | 'project' | 'issue' | 'search' | 'demoSearch'

function getVectorType(vector_type: string): string | undefined {
if (!isVectorType(vector_type)) {
Expand All @@ -21,14 +21,15 @@ function getVectorType(vector_type: string): string | undefined {
code: process.env.CODE_VECTOR_TYPE,
project: process.env.PROJECT_VECTOR_TYPE,
issue: process.env.ISSUE_VECTOR_TYPE,
search: process.env.SEARCH_VECTOR_TYPE
search: process.env.SEARCH_VECTOR_TYPE,
demoSearch: process.env.DEMO_SEARCH_TYPE
}

return vectorTypeMap[vector_type]
}

function isVectorType(type: string): type is VectorType {
return ['docs', 'code', 'project', 'issue', 'search'].includes(type)
return ['docs', 'code', 'project', 'issue', 'search', 'demoSearch'].includes(type)
}

const getMatchesFromEmbeddings = async (
Expand Down

0 comments on commit e7401bb

Please sign in to comment.