Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feat - Implemented BinaryParam SDK type #1052

Merged
merged 11 commits into from
Dec 19, 2023
1 change: 1 addition & 0 deletions agenta-cli/agenta/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
MessagesInput,
TextParam,
FileInputURL,
BinaryParam,
)
from .sdk.utils.preinit import PreInitObject
from .sdk.agenta_init import Config, init
Expand Down
1 change: 1 addition & 0 deletions agenta-cli/agenta/sdk/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
TextParam,
MessagesInput,
FileInputURL,
BinaryParam,
)
from .agenta_init import Config, init

Expand Down
5 changes: 5 additions & 0 deletions agenta-cli/agenta/sdk/agenta_decorator.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
TextParam,
MessagesInput,
FileInputURL,
BinaryParam,
)

app = FastAPI()
Expand Down Expand Up @@ -316,6 +317,7 @@ def override_schema(openapi_schema: dict, func_name: str, endpoint: str, params:
- The default value for DictInput instance
- The default value for MessagesParam instance
- The default value for FileInputURL instance
- The default value for BinaryParam instance
- ... [PLEASE ADD AT EACH CHANGE]

Args:
Expand Down Expand Up @@ -388,3 +390,6 @@ def find_in_schema(schema: dict, param_name: str, xparam: str):
):
subschema = find_in_schema(schema_to_override, param_name, "file_url")
subschema["default"] = "https://example.com"
if isinstance(param_val, BinaryParam):
subschema = find_in_schema(schema_to_override, param_name, "bool")
subschema["default"] = param_val.default
29 changes: 28 additions & 1 deletion agenta-cli/agenta/sdk/types.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import json
from typing import Any, Dict, List

from pydantic import BaseModel, Extra, HttpUrl
from pydantic import BaseModel, Extra, HttpUrl, Field


class InFile:
Expand Down Expand Up @@ -29,6 +29,33 @@ def __modify_schema__(cls, field_schema):
field_schema.update({"x-parameter": "text"})


class BoolMeta(type):
"""
This meta class handles the behavior of a boolean without
directly inheriting from it (avoiding the conflict
that comes from inheriting bool).
"""

def __new__(cls, name: str, bases: tuple, namespace: dict):
if "default" in namespace and namespace["default"] not in [0, 1]:
raise ValueError("Must provide either 0 or 1")
namespace["default"] = bool(namespace.get("default", 0))
instance = super().__new__(cls, name, bases, namespace)
instance.default = 0
return instance


class BinaryParam(int, metaclass=BoolMeta):
@classmethod
def __modify_schema__(cls, field_schema):
field_schema.update(
{
"x-parameter": "bool",
"type": "boolean",
}
)


class IntParam(int):
def __new__(cls, default: int = 6, minval: float = 1, maxval: float = 10):
instance = super().__new__(cls, default)
Expand Down
27 changes: 20 additions & 7 deletions agenta-web/src/components/Playground/Views/ParametersCards.tsx
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import {Row, Card, Slider, Select, InputNumber, Col, Input, Button} from "antd"
import React from "react"
import {Parameter, InputParameter} from "@/lib/Types"
import {renameVariables} from "@/lib/helpers/utils"
import {createUseStyles} from "react-jss"
import {renameVariables} from "@/lib/helpers/utils"
import {Parameter, InputParameter} from "@/lib/Types"
import {Row, Card, Slider, Select, InputNumber, Col, Input, Button, Switch} from "antd"

const useStyles = createUseStyles({
row1: {
Expand Down Expand Up @@ -72,6 +72,10 @@ export const ModelParameters: React.FC<ModelParametersProps> = ({
handleParamChange,
}) => {
const classes = useStyles()
const handleCheckboxChange = (paramName: string, checked: boolean) => {
const value = checked ? 1 : 0
handleParamChange(paramName, value)
aybruhm marked this conversation as resolved.
Show resolved Hide resolved
}
return (
<>
{optParams?.some((param) => !param.input && param.type === "number") && (
Expand All @@ -80,10 +84,11 @@ export const ModelParameters: React.FC<ModelParametersProps> = ({
{optParams
?.filter(
(param) =>
!param.input &&
(param.type === "number" ||
param.type === "integer" ||
param.type === "array"),
(!param.input &&
(param.type === "number" ||
param.type === "integer" ||
param.type === "array")) ||
param.type === "boolean",
)
.map((param, index) => (
<Row key={index} className={classes.row2}>
Expand Down Expand Up @@ -136,6 +141,14 @@ export const ModelParameters: React.FC<ModelParametersProps> = ({
))}
</Select>
)}
{param.type === "boolean" && (
<Switch
defaultValue={param.default}
onChange={(checked: boolean) =>
aybruhm marked this conversation as resolved.
Show resolved Hide resolved
handleCheckboxChange(param.name, checked)
}
/>
)}
</Col>
<Col>
{param.type === "number" && (
Expand Down
2 changes: 2 additions & 0 deletions agenta-web/src/lib/helpers/openapi_parser.ts
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,8 @@ const determineType = (xParam: any): string => {
return "number"
case "dict":
return "object"
case "bool":
return "boolean"
case "int":
return "integer"
case "file_url":
Expand Down
43 changes: 43 additions & 0 deletions examples/chat_json_format/app.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
import agenta as ag
from agenta.sdk.types import BinaryParam
from openai import OpenAI

client = OpenAI()

SYSTEM_PROMPT = "You have expertise in offering technical ideas to startups. Responses should be in json."
GPT_FORMAT_RESPONSE = ["gpt-3.5-turbo-1106", "gpt-4-1106-preview"]
CHAT_LLM_GPT = [
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-0301",
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-16k-0613",
"gpt-4",
] + GPT_FORMAT_RESPONSE

ag.init()
ag.config.default(
temperature=ag.FloatParam(0.2),
model=ag.MultipleChoiceParam("gpt-3.5-turbo", CHAT_LLM_GPT),
max_tokens=ag.IntParam(-1, -1, 4000),
prompt_system=ag.TextParam(SYSTEM_PROMPT),
force_json_response=BinaryParam(),
)


@ag.entrypoint
def chat(inputs: ag.MessagesInput = ag.MessagesInput()):
messages = [{"role": "system", "content": ag.config.prompt_system}] + inputs
max_tokens = ag.config.max_tokens if ag.config.max_tokens != -1 else None
response_format = (
{"type": "json_object"}
if ag.config.force_json_response and ag.config.model in GPT_FORMAT_RESPONSE
else {"type": "text"}
)
chat_completion = client.chat.completions.create(
model=ag.config.model,
messages=messages,
temperature=ag.config.temperature,
max_tokens=max_tokens,
response_format=response_format,
)
return chat_completion.choices[0].message.content
2 changes: 2 additions & 0 deletions examples/chat_json_format/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
agenta
openai
Loading