From d3e61ddd891fe7803d9f1e7ac6bc5fc28c19a8e4 Mon Sep 17 00:00:00 2001 From: Xiaoyun Zhang Date: Tue, 17 Dec 2024 10:04:34 +0900 Subject: [PATCH] Release 0.0.12 (#125) * update * update * update --- eng/MetaInfo.props | 2 +- src/StepWise.Core/Step.cs | 2 +- src/StepWise.Core/StepWiseEngine.cs | 6 + .../components/chat-controlbar.tsx | 48 +++++--- stepwise-studio/components/chat-history.tsx | 4 +- .../components/claude-configure-card.tsx | 6 - stepwise-studio/components/control-bar.tsx | 70 ++++++------ .../components/llm-configuration.tsx | 7 ++ stepwise-studio/components/llm-selector.tsx | 104 +++++++++++++----- .../components/openai-configure-card.tsx | 90 ++++++++------- stepwise-studio/components/step-node.tsx | 2 +- stepwise-studio/components/workflow.tsx | 3 +- stepwise-studio/hooks/useStepRunHistory.tsx | 45 +++++++- stepwise-studio/hooks/useWorkflowEngine.tsx | 34 ++++-- stepwise-studio/pages/_app.tsx | 9 +- website/release_notes/0_0.md | 3 + 16 files changed, 290 insertions(+), 145 deletions(-) diff --git a/eng/MetaInfo.props b/eng/MetaInfo.props index 81ef244..adfcc36 100644 --- a/eng/MetaInfo.props +++ b/eng/MetaInfo.props @@ -1,7 +1,7 @@ - 0.0.11 + 0.0.12 LittleLittleCloud git false diff --git a/src/StepWise.Core/Step.cs b/src/StepWise.Core/Step.cs index 4eada06..fdb629f 100644 --- a/src/StepWise.Core/Step.cs +++ b/src/StepWise.Core/Step.cs @@ -407,7 +407,7 @@ public override string ToString() { if (this.Step is null && this.StepRunType == StepRunType.Variable) { - return $"{_result!.Name}[{_result!.Generation}]"; + return $"{_result!.Name}[{_result!.Generation}]:{_result!.Value}"; } // format [gen] stepName([gen]input1, [gen]input2, ...) diff --git a/src/StepWise.Core/StepWiseEngine.cs b/src/StepWise.Core/StepWiseEngine.cs index b47452c..44f45d2 100644 --- a/src/StepWise.Core/StepWiseEngine.cs +++ b/src/StepWise.Core/StepWiseEngine.cs @@ -267,6 +267,12 @@ private async Task ExecuteSingleStepAsync( _stepRunQueue.Add(variable); } + + // TODO + // maybe we should support the scenario when the res is null + // in this case, we should do the following: + // - clear the existing value in the context + // - add all the steps that depend on the value to the task queue again } catch (InvalidOperationException ioe) when (ioe.Message.Contains("The collection has been marked as complete with regards to additions")) { diff --git a/stepwise-studio/components/chat-controlbar.tsx b/stepwise-studio/components/chat-controlbar.tsx index f05a7b7..0381f3a 100644 --- a/stepwise-studio/components/chat-controlbar.tsx +++ b/stepwise-studio/components/chat-controlbar.tsx @@ -10,7 +10,7 @@ import { } from "./chat-history"; import { useStepwiseServerConfiguration } from "@/hooks/useVersion"; import { useAuth0 } from "@auth0/auth0-react"; -import { LLMSelector, useLLMSelectorStore } from "./llm-selector"; +import { LLMSelector, OpenAI_LLM, useLLMSelectorStore } from "./llm-selector"; import { useOpenAIConfiguration } from "./openai-configure-card"; import OpenAI from "openai"; import Image from "next/image"; @@ -41,13 +41,8 @@ export const ChatControlBar: React.FC = () => { const message = useChatBoxStore((state) => state.message); const chatHistory = useChatHistoryStore((state) => state.messages); const selectedLLM = useLLMSelectorStore((state) => state.selectedLLM); - const claudeLLMs = useClaudeConfiguration((state) => state.LLMTypes); - const openaiLLMs = useOpenAIConfiguration((state) => state.LLMTypes); - const openAIApiKey = useOpenAIConfiguration((state) => state.apiKey); - const claudeApiKey = useClaudeConfiguration((state) => state.apiKey); const setMessage = useChatBoxStore((state) => state.setMessage); const addMessage = useChatHistoryStore((state) => state.addMessage); - const deleteMessage = useChatHistoryStore((state) => state.deleteMessage); const configuration = useStepwiseServerConfiguration(); const [busy, setBusy] = React.useState(false); const { user } = useAuth0(); @@ -68,6 +63,11 @@ export const ChatControlBar: React.FC = () => { stepRunHistory: StepRunDTO[], chatHistory: ChatMessageContent[], ) => { + if (selectedLLM === undefined) { + toast.error("Please select a language model"); + return; + } + if (message !== "") { let userMessage: ChatMessage; if (configuration?.enableAuth0Authentication) { @@ -106,6 +106,8 @@ You are a helpful workflow assistant. Your name is ${llmName}. You are currently assisting user with the workflow ${workflow.name}. You can either invoke the workflow or provide assistance with the steps in the workflow. +When invoking a step in workflow, you don't need to consider whether it's pre-requisite steps are executed or not. The workflow engine will take care of it. So you can directly invoke the step. + Each workflow is associated with a context which contains the intermediate results of the workflow execution. ## current context: @@ -116,12 +118,17 @@ ${ .map((v) => `${v.result?.name}: ${v.result?.displayValue}`) .join("\n") } + +You don't need to provide the arguments if they are already available in the context. You can override the context variables by providing the arguments explicitly. `; const steps = workflow.steps; - if (openaiLLMs.find((f) => f === selectedLLM) && openAIApiKey) { + if ( + selectedLLM?.type === "OpenAI" && + (selectedLLM as OpenAI_LLM).apiKey + ) { const openAIClient = new OpenAI({ - apiKey: openAIApiKey, + apiKey: (selectedLLM as OpenAI_LLM).apiKey, dangerouslyAllowBrowser: true, }); @@ -147,7 +154,7 @@ ${ id: msg.id, function: { name: msg.name, - arguments: msg.arguments, + arguments: msg.argument, } as ChatCompletionMessageToolCall.Function, }, ] as ChatCompletionMessageToolCall[], @@ -175,7 +182,8 @@ ${ "Number", "Boolean", "String[]", - "Integer", + "Int32", + "Int64", "Float", "Double", ]; @@ -187,6 +195,8 @@ ${ Boolean: "boolean", "String[]": "array", Integer: "integer", + Int32: "integer", + Int64: "integer", Float: "number", Double: "number", }; @@ -198,6 +208,8 @@ ${ Boolean: undefined, "String[]": "string", Integer: undefined, + Int32: undefined, + Int64: undefined, Float: undefined, Double: undefined, }; @@ -242,7 +254,7 @@ ${ const chatCompletion = await openAIClient.chat.completions.create({ messages: [systemMessage, ...openAIChatHistory], - model: selectedLLM as ChatModel, + model: (selectedLLM as OpenAI_LLM).modelId, tool_choice: "auto", tools: tools, parallel_tool_calls: false, @@ -296,11 +308,13 @@ ${ }) .filter((v) => v !== undefined); + console.log(argumentsArray); + // merge the arguments with the context variables - // remove the context variables that are overriden by the arguments - const mergedVariables = argumentsArray.filter( + // and override the context variables with the arguments + const mergedVariables = contextVariables.filter( (v) => - !contextVariables.find( + !argumentsArray.find( (a) => a.result?.name === v.result?.name, ), ); @@ -309,15 +323,15 @@ ${ type: "tool", id: tool.id, name: toolName, - arguments: argumentJson, + argument: argumentJson, displayValue: "", values: [], isExecuting: true, }; addMessage(toolMessage); const newStepRunHistory = await executeStep(step, [ - ...contextVariables, ...mergedVariables, + ...argumentsArray, ]); if (newStepRunHistory.length > 0) { @@ -452,7 +466,7 @@ ${ chatHistory, ) } - disabled={busy || message === ""} + disabled={busy || message === "" || selectedLLM === undefined} tooltip="Send message (Ctrl + Enter)" > diff --git a/stepwise-studio/components/chat-history.tsx b/stepwise-studio/components/chat-history.tsx index dc7f7fd..4e15bd8 100644 --- a/stepwise-studio/components/chat-history.tsx +++ b/stepwise-studio/components/chat-history.tsx @@ -33,7 +33,7 @@ export interface ChatTool { type: "tool"; id?: string; name: string; - arguments: string; + argument: string; displayValue: string; values?: VariableDTO[]; isExecuting: boolean; // whether the tool is currently executing @@ -138,6 +138,7 @@ export const ChatToolCard: React.FC = ({ displayValue, values, isExecuting, + argument, }) => { const [collapsed, setCollapsed] = React.useState(true); const [executing, setExecuting] = React.useState(isExecuting); @@ -166,6 +167,7 @@ export const ChatToolCard: React.FC = ({ {!collapsed && values && (
+ {argument} {values.map((value, index) => ( ))} diff --git a/stepwise-studio/components/claude-configure-card.tsx b/stepwise-studio/components/claude-configure-card.tsx index 74cc0b0..2a1c4a3 100644 --- a/stepwise-studio/components/claude-configure-card.tsx +++ b/stepwise-studio/components/claude-configure-card.tsx @@ -28,9 +28,6 @@ export const useClaudeConfiguration = create( (set, get) => ({ apiKey: undefined, setApiKey: (apiKey: string) => { - get().LLMTypes.forEach((llm) => { - useLLMSelectorStore.getState().addLLM(llm); - }); set({ apiKey }); }, readApiKeyFromStorage: () => { @@ -48,9 +45,6 @@ export const useClaudeConfiguration = create( } }, clearApiKey: () => { - get().LLMTypes.forEach((llm) => { - useLLMSelectorStore.getState().deleteLLM(llm); - }); set({ apiKey: undefined }); }, LLMTypes: [ diff --git a/stepwise-studio/components/control-bar.tsx b/stepwise-studio/components/control-bar.tsx index c7a548b..7433736 100644 --- a/stepwise-studio/components/control-bar.tsx +++ b/stepwise-studio/components/control-bar.tsx @@ -5,7 +5,7 @@ // | | | | : | : | import { ChangeEvent, FC, useEffect, useState } from "react"; -import { buttonVariants } from "./ui/button"; +import { Button, buttonVariants } from "./ui/button"; import { cn } from "@/lib/utils"; import { GithubIcon, @@ -50,13 +50,10 @@ export const ControlBar: FC = (props) => {
{isMobile && } - - - - +
{/* vertical divider */}
diff --git a/stepwise-studio/components/llm-configuration.tsx b/stepwise-studio/components/llm-configuration.tsx index ca14ec3..eef6878 100644 --- a/stepwise-studio/components/llm-configuration.tsx +++ b/stepwise-studio/components/llm-configuration.tsx @@ -1,10 +1,17 @@ import { ClaudeConfigCard } from "./claude-configure-card"; +import { useLLMSelectorStore } from "./llm-selector"; import { OpenAIConfigCard } from "./openai-configure-card"; export const LLMConfiguration: React.FC = () => { + const availableLLMs = useLLMSelectorStore((state) => state.availableLLMs); return (
+ {availableLLMs.map((llm) => { + if (llm.type === "AOAI") { + return ; + } + })}
); }; diff --git a/stepwise-studio/components/llm-selector.tsx b/stepwise-studio/components/llm-selector.tsx index c356135..45ef39a 100644 --- a/stepwise-studio/components/llm-selector.tsx +++ b/stepwise-studio/components/llm-selector.tsx @@ -7,42 +7,92 @@ import { SelectTrigger, SelectValue, } from "./ui/select"; -import { useOpenAIConfiguration } from "./openai-configure-card"; -import { useClaudeConfiguration } from "./claude-configure-card"; import { Model } from "@anthropic-ai/sdk/resources/messages.mjs"; import { ChatModel } from "openai/resources/index.mjs"; -export type LLMType = ChatModel | Model; +import { toast } from "sonner"; +export type LLM = { + name: string; // should be identical. + type: LLMType; +}; + +export interface AOAI_LLM extends LLM { + type: "AOAI"; + apiKey: string; + endPoint: string; + deployName: string; +} + +export interface OpenAI_LLM extends LLM { + type: "OpenAI"; + apiKey?: string; + modelId: ChatModel; +} + +export type LLMType = ChatModel | Model | "AOAI" | "OpenAI"; export interface LLMState { - availableLLMs: Set; - selectedLLM?: LLMType; - selectLLM: (llm: LLMType) => void; - addLLM: (llm: LLMType) => void; - deleteLLM: (llm: LLMType) => void; + availableLLMs: LLM[]; + selectedLLM?: LLM; + selectLLM: (llm: LLM) => void; + addOrUpdateLLM: (llm: LLM) => void; + deleteLLM: (llm: LLM) => void; clearSelectedLLM: () => void; + saveAvailableLLMsToStorage: () => void; + loadAvailableLLMsFromStorage: () => void; } -export const useLLMSelectorStore = create((set) => ({ - availableLLMs: new Set([]), +export const useLLMSelectorStore = create((set, get) => ({ + availableLLMs: [], selectedLLM: undefined, selectLLM: (llm) => set(() => ({ selectedLLM: llm })), - addLLM: (llm) => + addOrUpdateLLM: (llmToAdd) => set((state) => { - state.availableLLMs.add(llm); - return { availableLLMs: state.availableLLMs }; + const llms = state.availableLLMs.filter( + (llm) => + llm.name !== llmToAdd.name || llm.type !== llmToAdd.type, + ); + + toast.success("LLM added!"); + + if ( + state.selectedLLM?.name === llmToAdd.name && + state.selectedLLM?.type === llmToAdd.type + ) { + set({ selectedLLM: llmToAdd }); + } + + return { availableLLMs: [...llms, llmToAdd] }; }), - loadLLMs: () => {}, clearSelectedLLM: () => set(() => ({ selectedLLM: undefined })), - deleteLLM: (llm) => + deleteLLM: (llmToRemove) => set((state) => { - state.availableLLMs.delete(llm); - return { availableLLMs: state.availableLLMs }; + const index = state.availableLLMs.findIndex( + (llm) => + llmToRemove.type === llm.type && + llmToRemove.name === llm.name, + ); + if (index !== -1) { + state.availableLLMs.splice(index, 1); + } + + return { availableLLMs: [...state.availableLLMs] }; }), + saveAvailableLLMsToStorage: () => { + const llms = JSON.stringify( + useLLMSelectorStore.getState().availableLLMs, + ); + localStorage.setItem("stepwise-llms", llms); + }, + loadAvailableLLMsFromStorage: () => { + const llms = localStorage.getItem("stepwise-llms"); + if (llms) { + set({ availableLLMs: JSON.parse(llms) }); + } + }, })); export const LLMSelector: React.FC = () => { const availableLLMs = useLLMSelectorStore((state) => state.availableLLMs); - const openaiApi = useOpenAIConfiguration((state) => state.apiKey); const selectLLM = useLLMSelectorStore((state) => state.selectLLM); const selectedLLM = useLLMSelectorStore((state) => state.selectedLLM); const clearSelectedLLM = useLLMSelectorStore( @@ -50,22 +100,26 @@ export const LLMSelector: React.FC = () => { ); useEffect(() => { - if (selectedLLM === undefined && availableLLMs.size > 0) { - selectLLM(availableLLMs.values().next().value as LLMType); + if (selectedLLM === undefined && availableLLMs.length > 0) { + selectLLM(availableLLMs[0]); + } + + if (availableLLMs.length === 0) { + clearSelectedLLM(); } }, [availableLLMs]); - return availableLLMs.size === 0 ? ( + return availableLLMs.length === 0 || selectedLLM === undefined ? (

No LLMs available

) : (
diff --git a/stepwise-studio/components/openai-configure-card.tsx b/stepwise-studio/components/openai-configure-card.tsx index 0c2e68e..0116c7b 100644 --- a/stepwise-studio/components/openai-configure-card.tsx +++ b/stepwise-studio/components/openai-configure-card.tsx @@ -12,45 +12,37 @@ import { import { Input } from "./ui/input"; import { create } from "zustand"; import { toast } from "sonner"; -import { LLMType, useLLMSelectorStore } from "./llm-selector"; +import { LLM, LLMType, OpenAI_LLM, useLLMSelectorStore } from "./llm-selector"; export interface OpenAIConfigurationState { apiKey?: string; setApiKey: (apiKey: string) => void; - readApiKeyFromStorage: () => void; - saveApiKeyToStorage: () => void; - removeApiKeyFromStorage: () => void; - clearApiKey: () => void; - LLMTypes: LLMType[]; + LLMs: OpenAI_LLM[]; } export const useOpenAIConfiguration = create( (set, get) => ({ apiKey: undefined, setApiKey: (apiKey: string) => { - get().LLMTypes.forEach((llm) => { - useLLMSelectorStore.getState().addLLM(llm); - }); set({ apiKey }); }, - readApiKeyFromStorage: () => { - const apiKey = localStorage.getItem("stepwise-openai-api-key"); - if (apiKey) { - get().setApiKey(apiKey); - } - }, - saveApiKeyToStorage: () => { - if (get().apiKey) { - localStorage.setItem("stepwise-openai-api-key", get().apiKey!); - } - }, - clearApiKey: () => { - get().LLMTypes.forEach((llm) => { - useLLMSelectorStore.getState().deleteLLM(llm); - }); - set({ apiKey: undefined }); - }, - LLMTypes: ["gpt-4o", "gpt-3.5-turbo", "gpt-4"], + LLMs: [ + { + modelId: "gpt-4o", + name: "gpt-4o", + type: "OpenAI", + }, + { + modelId: "gpt-4", + name: "gpt-4", + type: "OpenAI", + }, + { + modelId: "gpt-3.5-turbo", + name: "gpt-3.5-turbo", + type: "OpenAI", + }, + ], removeApiKeyFromStorage: () => { localStorage.removeItem("stepwise-openai-api-key"); }, @@ -58,27 +50,47 @@ export const useOpenAIConfiguration = create( ); export const OpenAIConfigCard: React.FC = () => { + const { apiKey, setApiKey, LLMs } = useOpenAIConfiguration(); + const { - apiKey, - setApiKey, - saveApiKeyToStorage, - clearApiKey, - removeApiKeyFromStorage, - } = useOpenAIConfiguration(); + addOrUpdateLLM, + deleteLLM, + availableLLMs, + saveAvailableLLMsToStorage, + loadAvailableLLMsFromStorage, + } = useLLMSelectorStore(); const [showKey, setShowKey] = useState(false); + useEffect(() => { + const firstOpenAI = availableLLMs.find((llm) => llm.type === "OpenAI"); + if (firstOpenAI && (firstOpenAI as OpenAI_LLM).apiKey) { + setApiKey((firstOpenAI as OpenAI_LLM).apiKey!); + } + }, [availableLLMs]); + const handleSave = async () => { if (!apiKey) { - // clear the API key - clearApiKey(); - removeApiKeyFromStorage(); + const llmsToRemove = availableLLMs.filter( + (llm) => llm.type === "OpenAI", + ); + llmsToRemove.forEach((llm) => { + deleteLLM(llm); + }); + + saveAvailableLLMsToStorage(); toast.info("OpenAI API key cleared"); return; } else { - // Save the API key to local storage - saveApiKeyToStorage(); - setApiKey(apiKey); + const llmsToAdd = LLMs.map((llm) => { + return { ...llm, apiKey: apiKey } as OpenAI_LLM; + }); + + llmsToAdd.forEach((llm) => { + addOrUpdateLLM(llm); + }); + + saveAvailableLLMsToStorage(); toast.success("OpenAI API key saved successfully"); } }; diff --git a/stepwise-studio/components/step-node.tsx b/stepwise-studio/components/step-node.tsx index 46d7157..3764c9a 100644 --- a/stepwise-studio/components/step-node.tsx +++ b/stepwise-studio/components/step-node.tsx @@ -414,7 +414,7 @@ const StepNode: React.FC> = (prop) => { {/* appear when hover */}