From ed7a1218cb014c635160f57c48e21d6031316a5d Mon Sep 17 00:00:00 2001 From: Mahmoud Mabrouk Date: Fri, 29 Dec 2023 12:45:15 +0100 Subject: [PATCH] Update default configuration functions by register_default --- docs/depractated/howto/use-a-custom-llm.mdx | 4 ++-- docs/developer_guides/sdk/config_datatypes.mdx | 8 ++++---- docs/developer_guides/sdk/config_default.mdx | 15 ++++++++++----- docs/developer_guides/sdk/quick_start.mdx | 2 +- .../tutorials/build-rag-application.mdx | 2 +- .../tutorials/deploy-mistral-model.mdx | 4 ++-- .../tutorials/first-app-with-langchain.mdx | 4 ++-- 7 files changed, 22 insertions(+), 17 deletions(-) diff --git a/docs/depractated/howto/use-a-custom-llm.mdx b/docs/depractated/howto/use-a-custom-llm.mdx index 668177e95c..6d8972b3c0 100644 --- a/docs/depractated/howto/use-a-custom-llm.mdx +++ b/docs/depractated/howto/use-a-custom-llm.mdx @@ -15,7 +15,7 @@ import requests default_prompt = "Please write a joke about {subject}" url = "https:///generate" -ag.config.default(prompt=default_prompt, +ag.config.register_default(prompt=default_prompt, temperature=0.8) @ag.entrypoint @@ -31,7 +31,7 @@ def generate(subject:str)->str: The above code is a simple LLM app that generates jokes about a given subject, using a vLLM hosted model. It is structured as follows: -`ag.config.default` sets the default values for the configuration of the LLM application. In this example, the default prompt is "Please write a joke about {subject}", and the temperature is set at 0.8. +`ag.config.register_default` sets the default values for the configuration of the LLM application. In this example, the default prompt is "Please write a joke about {subject}", and the temperature is set at 0.8. The `@ag.entrypoint` decorator marks the function that will be called. The function `generate` accepts a subject as input and returns a joke as output. It calls the vLLM hosted model using the requests library. diff --git a/docs/developer_guides/sdk/config_datatypes.mdx b/docs/developer_guides/sdk/config_datatypes.mdx index 98a261b93a..88f3577295 100644 --- a/docs/developer_guides/sdk/config_datatypes.mdx +++ b/docs/developer_guides/sdk/config_datatypes.mdx @@ -12,7 +12,7 @@ There are multiple data types that can be used for the parameters in the configu This display a text area widget in the playground. ```python -agenta.config.default(prompt_system = ag.TextParam("You are an expert in geography."), +agenta.config.register_default(prompt_system = ag.TextParam("You are an expert in geography."), prompt_user = ag.TextParam("What is the capital of {country}?")) ``` @@ -21,7 +21,7 @@ agenta.config.default(prompt_system = ag.TextParam("You are an expert in geograp This displays a slider widget in the playground with a step 1 (integers). ```python -agenta.config.default(intval1 = ag.IntParam(default=1, minval=0, maxval=2), +agenta.config.register_default(intval1 = ag.IntParam(default=1, minval=0, maxval=2), intval2 = ag.IntParam(1) ``` @@ -31,7 +31,7 @@ This displays a slider widget in the playground with a step 0.1 (float). ```python -agenta.config.default(temperature = ag.IntParam(default=0.5, minval=0, maxval=2), +agenta.config.register_default(temperature = ag.IntParam(default=0.5, minval=0, maxval=2), temperature2 = ag.IntParam(0.5) ``` @@ -41,7 +41,7 @@ This displays a binary switch in the playground. ```python -agenta.config.default(temperature = ag.IntParam(default=0.5, minval=0, maxval=2), +agenta.config.register_default(temperature = ag.IntParam(default=0.5, minval=0, maxval=2), force_json = BinaryParam()) ``` diff --git a/docs/developer_guides/sdk/config_default.mdx b/docs/developer_guides/sdk/config_default.mdx index fdfea3b474..00d0823226 100644 --- a/docs/developer_guides/sdk/config_default.mdx +++ b/docs/developer_guides/sdk/config_default.mdx @@ -1,22 +1,27 @@ --- -title: "config.default()" +title: "config.register_default()" description: "Register the default configuration for your application" --- -`agenta.config.default(**kwargs)` +```python +agenta.config.register_default(**kwargs) +#alias +agenta.config.default(**kwargs) +``` +``` Set the default configuration for your variant. For instance if you set ```python -agenta.config.default(prompt = ag.TextParam("Hello world")) +agenta.config.register_default(prompt = ag.TextParam("Hello world")) ``` This will set the default value of the prompt to "Hello World". This means that the default configuration (the configuration in the variant variant_name.default) will have the value "Hello World". -agenta.config.default should be used to set the parameters that you are planning to test and iterate on in the playground. For instance if your application has two different prompts in addition to temperature, you might want to use the following: +agenta.config.register_default should be used to set the parameters that you are planning to test and iterate on in the playground. For instance if your application has two different prompts in addition to temperature, you might want to use the following: ```python -agenta.config.default(prompt1 = ag.TextParam("my prompt1"), +agenta.config.register_default(prompt1 = ag.TextParam("my prompt1"), prompt2 = ag.TextParam("my prompt2"), temperature = ag.FloatParam(0.5), temperature2 = ag.FloatParam(0.5)) diff --git a/docs/developer_guides/sdk/quick_start.mdx b/docs/developer_guides/sdk/quick_start.mdx index 3907013df6..fbf4e6b399 100644 --- a/docs/developer_guides/sdk/quick_start.mdx +++ b/docs/developer_guides/sdk/quick_start.mdx @@ -8,7 +8,7 @@ The agenta SDK allows you to experiment with AI applications with LLMs (and in t The most commenly used functions are: - `agenta.init` - initialize your variant -- `agenta.config.default` - set the default configuration +- `agenta.config.register_default` - set the default configuration diff --git a/docs/developer_guides/tutorials/build-rag-application.mdx b/docs/developer_guides/tutorials/build-rag-application.mdx index 3061d12c5b..5297df5bc6 100644 --- a/docs/developer_guides/tutorials/build-rag-application.mdx +++ b/docs/developer_guides/tutorials/build-rag-application.mdx @@ -264,7 +264,7 @@ from llama_index.llms import OpenAI from llama_index.text_splitter import TokenTextSplitter ag.init() -ag.config.default( +ag.config.register_default( chunk_size=ag.IntParam(1024, 256, 4096), chunk_overlap=ag.IntParam(20, 0, 100), temperature=ag.IntParam(0.9, 0.0, 1.0), diff --git a/docs/developer_guides/tutorials/deploy-mistral-model.mdx b/docs/developer_guides/tutorials/deploy-mistral-model.mdx index 721de9b884..da7e6730fc 100644 --- a/docs/developer_guides/tutorials/deploy-mistral-model.mdx +++ b/docs/developer_guides/tutorials/deploy-mistral-model.mdx @@ -39,7 +39,7 @@ API_URL = "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-v0.1 headers = {"Authorization": "Bearer [Your_Token]"} ag.init() -ag.config.default( +ag.config.register_default( prompt_template=ag.TextParam("Summarize the following text: {text}"), ) @@ -116,7 +116,7 @@ agenta variant serve app.py headers = {"Authorization": f"Bearer [Your_Token]"} ag.init() - ag.config.default( + ag.config.register_default( prompt_template=ag.TextParam("Summarize the following text: {text}") ) diff --git a/docs/developer_guides/tutorials/first-app-with-langchain.mdx b/docs/developer_guides/tutorials/first-app-with-langchain.mdx index 8100c77649..b6ae630570 100644 --- a/docs/developer_guides/tutorials/first-app-with-langchain.mdx +++ b/docs/developer_guides/tutorials/first-app-with-langchain.mdx @@ -92,7 +92,7 @@ default_prompt = """ startup idea: {startup_idea}""" ag.init() -ag.config.default(prompt_template=default_prompt, temperature=0.5) +ag.config.register_default(prompt_template=default_prompt, temperature=0.5) @ag.entrypoint def generate( @@ -113,7 +113,7 @@ Let's examine how we modified the original code. ```python ag.init() -ag.config.default(prompt_template=ag.TextParam(default_prompt), temperature=ag.FloatParam(0.5)) +ag.config.register_default(prompt_template=ag.TextParam(default_prompt), temperature=ag.FloatParam(0.5)) ``` These two lines initialize agenta, then set a default configuration for the app.