diff --git a/examples/job_info_extractor/README.md b/examples/job_info_extractor/README.md index 757455e2ca..4b0b51cc61 100644 --- a/examples/job_info_extractor/README.md +++ b/examples/job_info_extractor/README.md @@ -1,9 +1,49 @@ -# Using this template +# Extraction using OpenAI Functions and Langchain" -Please make sure to create a `.env` file with your OpenAI API key before running the app. -OPENAI_API_KEY=sk-xxxxxxx -You can find your keys here: -https://platform.openai.com/account/api-keys +This templates is designed to extracts job information (company name, job +title, salary range) from a job description. It uses OpenAI Functions and +Langchain. It runs with agenta. +[Agenta](https://github.com/agenta-ai/agenta) is an open-source LLMOps +platform that allows you to 1) quickly experiment and compare +configuration for LLM apps 2) evaluate prompts and workflows 3) deploy +applications easily. + +## How to use +### 0. Prerequisites +- Install the agenta CLI +```bash +pip install agenta-cli +``` +- Either create an account in [agenta cloud](https://cloud.agenta.ai/) or +[self-host agenta](/self-host/host-locally) + +### 1. Clone the repository + +```bash +git clone https://github.com/Agenta-AI/job_extractor_template +``` + +### 2. Initialize the project + +```bash +agenta init +``` + +### 3. Setup your openAI API key +Create a .env file by copying the .env.example file and add your openAI +API key to it. +```bash +OPENAI_API_KEY=sk-xxxxxxxxxxxxxxxxxxxxxxxx +``` + +### 4. Deploy the application to agenta + +```bash +agenta variant serve app.py +``` + +### 5. Experiment with the prompts in a playground and evaluate different variants in agenta + +https://github.com/Agenta-AI/job_extractor_template/assets/4510758/30271188-8d46-4d02-8207-ddb60ad0e284 -Go back to the [Getting started tutorial](https://docs.agenta.ai/getting-started) to continue \ No newline at end of file diff --git a/examples/job_info_extractor/app.py b/examples/job_info_extractor/app.py index 4ccd38a583..8d4dd21941 100644 --- a/examples/job_info_extractor/app.py +++ b/examples/job_info_extractor/app.py @@ -11,11 +11,17 @@ from pydantic import BaseModel, Field -default_prompt = "What is a good name for a company that makes {product}?" +CHAT_LLM_GPT = [ + "gpt-3.5-turbo-16k-0613", + "gpt-3.5-turbo-16k", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-0301", + "gpt-3.5-turbo", + "gpt-4", +] ag.init() ag.config.default( - prompt_template=ag.TextParam(default_prompt), system_message=ag.TextParam( "You are a world class algorithm for extracting information in structured formats." ), @@ -26,10 +32,11 @@ company_desc_message=ag.TextParam("The name of the company"), position_desc_message=ag.TextParam("The name of the position"), salary_range_desc_message=ag.TextParam("The salary range of the position"), - temperature=ag.FloatParam(0.5), - top_p=ag.FloatParam(1.0), + temperature=ag.FloatParam(0.9), + top_p=ag.FloatParam(0.9), presence_penalty=ag.FloatParam(0.0), frequency_penalty=ag.FloatParam(0.0), + model=ag.MultipleChoiceParam("gpt-3.5-turbo-0613", CHAT_LLM_GPT), ) @@ -50,7 +57,7 @@ def generate( ) -> str: """Extract information from a job description""" llm = ChatOpenAI( - model="gpt-3.5-turbo-0613", + model=ag.config.model, temperature=ag.config.temperature, top_p=ag.config.top_p, presence_penalty=ag.config.presence_penalty,