diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index ed73302..d1e7abf 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -8,6 +8,10 @@ on: branches: - main +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + jobs: lint-test: runs-on: ubuntu-latest @@ -40,7 +44,7 @@ jobs: - name: Run chart-testing (lint) if: steps.list-changed.outputs.changed == 'true' - run: ct lint --debug --target-branch main + run: ct lint --debug --target-branch main --validate-yaml false - name: Create kind cluster if: steps.list-changed.outputs.changed == 'true' @@ -48,7 +52,7 @@ jobs: - name: Run chart-testing (install) if: steps.list-changed.outputs.changed == 'true' - run: ct install --debug --target-branch main + run: ct install --debug --target-branch main --chart-repos bitnami=https://charts.bitnami.com/bitnami release: needs: lint-test diff --git a/charts/langflow-ide/Chart.yaml b/charts/langflow-ide/Chart.yaml index 743662f..727e199 100644 --- a/charts/langflow-ide/Chart.yaml +++ b/charts/langflow-ide/Chart.yaml @@ -2,9 +2,11 @@ apiVersion: v2 name: langflow-ide description: Helm chart for Langflow IDE type: application -version: 0.0.1 -appVersion: 1.0-alpha - +version: 0.1.0 +appVersion: latest +maintainers: + - name: Langflow + email: contact@langflow.org dependencies: - name: postgresql version: 15.x diff --git a/charts/langflow-ide/values.yaml b/charts/langflow-ide/values.yaml index b60b68e..5de03f4 100644 --- a/charts/langflow-ide/values.yaml +++ b/charts/langflow-ide/values.yaml @@ -26,8 +26,8 @@ ingress: # hosts: # - chart-example.local -langflow: - global: +langflow: + global: image: pullPolicy: IfNotPresent tag: "" @@ -127,8 +127,6 @@ langflow: # kind: Managed # cachingmode: ReadOnly storageClass: {} - - frontend: enabled: true @@ -171,4 +169,4 @@ secretProvider: enabled: false postgresql: - enabled: false \ No newline at end of file + enabled: false diff --git a/charts/langflow-runtime/Chart.yaml b/charts/langflow-runtime/Chart.yaml index 0a5d50e..bdba7b8 100644 --- a/charts/langflow-runtime/Chart.yaml +++ b/charts/langflow-runtime/Chart.yaml @@ -2,5 +2,8 @@ apiVersion: v2 name: langflow-runtime description: A helm chart for running LangFlow flows as a service type: application -version: 0.0.1 -appVersion: "1.0-alpha" +version: 0.1.0 +appVersion: latest +maintainers: + - name: Langflow + email: contact@langflow.org diff --git a/charts/langflow-runtime/README.md b/charts/langflow-runtime/README.md index fca14a0..4567723 100644 --- a/charts/langflow-runtime/README.md +++ b/charts/langflow-runtime/README.md @@ -1,6 +1,6 @@ -# LangFlow runtime chart +# Langflow runtime chart -Deploy LangFlow flows to Kubernetes with this Helm chart. +Deploy Langflow flows to Kubernetes with this Helm chart. Using a dedicated deployment for a set of flows is fundamental in production environments in order to have a granular resource control. @@ -13,10 +13,8 @@ There are two ways to import a flow: ```yaml downloadFlows: flows: - - url: https://raw.githubusercontent.com/langflow-ai/langflow/dev/tests/data/BasicChatwithPromptandHistory.json -# basicAuth: "myuser:mypassword" -# headers: -# Authorization: "Bearer my-key" + - url: https://raw.githubusercontent.com/datastax/langflow-charts/main/examples/flows/basic-prompting-hello-world.json + endpoint: hello-world ``` 2. **Packaging the flow as docker image**: You can add a flow from to a docker image based on Langflow runtime and refer to it in the chart. @@ -24,15 +22,40 @@ downloadFlows: ## Deploy the flow -Install the chart (using option 1): +Since the basic prompting needs an OpenAI Key, we need to create a secret with the key: +``` +kubectl create secret generic langflow-secrets --from-literal=openai-key=sk-xxxx +``` +This command will create a secret named `langflow-secrets` with the key `openai-key` containing your secret value. + +We need to create a custom `values.yaml` file to: +1. Refer to the flow we want to deploy +2. Plug the secret we created to the Langflow deployment + +(custom-values.yaml) +```yaml +downloadFlows: + flows: + - url: https://raw.githubusercontent.com/datastax/langflow-charts/main/examples/flows/basic-prompting-hello-world.json + endpoint: hello-world + +env: + - name: LANGFLOW_LOG_LEVEL + value: "INFO" + - name: OPENAI_API_KEY + valueFrom: + secretKeyRef: + name: langflow-secrets + key: openai-key +``` +See the full file at [basic-prompting-hello-world.yaml](https://raw.githubusercontent.com/datastax/langflow-charts/main/examples/flows/langflow-runtime/basic-prompting-hello-world.yaml) + +Now we can deploy the chart (using option 1): ```bash helm repo add langflow https://langflow-ai.github.io/langflow-helm-charts helm repo update -helm install langflow-runtime langflow/langflow-runtime \ - --set "downloadFlows.flows[0].uuid=4ca07770-c0e4-487c-ad42-77c6039ce02e" \ - --set "downloadFlows.flows[0].url=https://raw.githubusercontent.com/datastax/langflow-charts/main/examples/langflow-runtime/just-chat/justchat.json" \ - --set replicaCount=1 +helm install langflow-runtime langflow/langflow-runtime --values custom-values.yaml ``` Tunnel the service to localhost: @@ -41,22 +64,66 @@ Tunnel the service to localhost: kubectl port-forward svc/langflow-langflow-runtime 7860:7860 ``` -Call the flow API endpoint: +Call the flow API endpoint using `hello-world` as flow name: ```bash curl -X POST \ - "http://localhost:7860/api/v1/run/4ca07770-c0e4-487c-ad42-77c6039ce02e?stream=false" \ + "http://localhost:7860/api/v1/run/hello-world?stream=false" \ -H 'Content-Type: application/json'\ -d '{ - "input_value": "message", + "input_value": "Hello there!", "output_type": "chat", - "input_type": "chat", - "tweaks": { - "ChatInput-1BPcY": {}, - "ChatOutput-J1bsS": {} - } + "input_type": "chat" }' ``` + +## Upgrade Langflow version +To change the Langflow version or use a custom docker image, you can modify the `image` parameter in the chart. + +```yaml +image: + repository: "langflowai/langflow-backend" + tag: 1.x.y +``` + +## Download flows options +The `downloadFlows` section in the `values.yaml` file allows you to download flows from remote locations. +You can specify the following options: +* `url`: The URL of the flow. Must point to a JSON file. +* `endpoint`: Override the endpoint of the flow. By default, the endpoint is the UUID of the flow or anything you set in the flow (`endpoint_name` key). +* `uuid`: Override the UUID of the flow. If not specified, the UUID will be extracted from the flow file. +* `basicAuth`: Basic authentication credentials in the form `username:password`. +* `headers`: Custom headers to add to the request. For example, to add Authorization header for downloading from private repositories. + +## Langflow secrets +The `env` section in the `values.yaml` file allows you to set environment variables for the Langflow deployment. +The recommended way to set sensitive information is to use Kubernetes secrets. +You can reference a secret in the `values.yaml` file by using the `valueFrom` key. + +```yaml +env: + - name: OPENAI_API_KEY + valueFrom: + secretKeyRef: + name: langflow-secrets + key: openai-key + - name: ASTRA_DB_APPLICATION_TOKEN + valueFrom: + secretKeyRef: + name: langflow-secrets + key: astra-token +``` +where: +* `name`: refer to the environment variable name used by your flow. +* `valueFrom.secretKeyRef.name`: refers to the kubernetes secret name. +* `valueFrom.secretKeyRef.key`: refers to the key in the secret. +For example, to create a matching secret with the above example you can use the following command: + +``` +kubectl create secret generic langflow-secrets --from-literal=openai-key=sk-xxxx --from-literal=astra-token=AstraCS:xxx +``` + + ## Scale the flows In order to add more resources to the flows container, you could decide to scale: @@ -68,12 +135,11 @@ In order to add more resources to the flows container, you could decide to scale To scale horizontally you only need to modify the `replicaCount` parameter in the chart. -```yaml -helm install langflow-runtime langflow/langflow-runtime \ - --set "downloadFlows.flows[0].uuid=4ca07770-c0e4-487c-ad42-77c6039ce02e" \ - --set "downloadFlows.flows[0].url=https://raw.githubusercontent.com/datastax/langflow-charts/main/examples/langflow-runtime/just-chat/justchat.json" \ - --set replicaCount=5 ``` +replicaCount: 5 +``` + +Please note that if your flow relies on shared state (e.g. builtin chat memory), you will need to setup a shared database. ### Scale vertically @@ -89,13 +155,3 @@ resources: cpu: 100m memory: 128Mi ``` - - -To scale vertically you only need modify the `resources` - -``` -helm install langflow-runtime langflow/langflow-runtime \ - --set "downloadFlows.flows[0].uuid=4ca07770-c0e4-487c-ad42-77c6039ce02e" \ - --set "downloadFlows.flows[0].url=https://raw.githubusercontent.com/datastax/langflow-charts/main/examples/langflow-runtime/just-chat/justchat.json" \ - --set replicaCount=5 -``` diff --git a/charts/langflow-runtime/templates/deployment.yaml b/charts/langflow-runtime/templates/deployment.yaml index 55f208c..45048fe 100644 --- a/charts/langflow-runtime/templates/deployment.yaml +++ b/charts/langflow-runtime/templates/deployment.yaml @@ -54,8 +54,9 @@ spec: set -e && mkdir -p {{ .Values.downloadFlows.path }} && {{- range .Values.downloadFlows.flows }} - echo "Downloading flows from {{ .url }}" && - curl --fail -o {{ $.Values.downloadFlows.path }}/{{ .uuid | default (.url | sha256sum | trunc 8) }}.json \ + {{- $targetFile := printf "%s/%s.json" $.Values.downloadFlows.path (.uuid | default (.url | sha256sum | trunc 8)) -}} + echo "Downloading flows from {{ .url }} to {{ $targetFile }}" && + curl --fail -o '{{ $targetFile }}' \ {{- if .basicAuth }} -u "{{ .basicAuth }}" \ {{- end }} @@ -64,7 +65,10 @@ spec: -H "{{ $key }}: {{ $value }}" \ {{- end }} {{- end }} - {{ .url }} && + '{{ .url }}' && + {{- if .endpoint }} + python -c 'import json, sys;f = sys.argv[1]; data = json.load(open(f));data["endpoint_name"]="{{ .endpoint }}";json.dump(data, open(f, "w"))' '{{ $targetFile }}' && + {{- end }} {{- end }} echo 'Flows downloaded' && langflow run --backend-only --host 0.0.0.0 --port {{ .Values.service.port }} diff --git a/charts/langflow-runtime/values.yaml b/charts/langflow-runtime/values.yaml index 38c859d..cbe9952 100644 --- a/charts/langflow-runtime/values.yaml +++ b/charts/langflow-runtime/values.yaml @@ -44,10 +44,10 @@ securityContext: {} env: - name: LANGFLOW_LOG_LEVEL value: "INFO" -# - name: openai_key_var +# - name: OPENAI_API_KEY # valueFrom: # secretKeyRef: -# name: openai-key +# name: langflow-secrets # key: openai-key @@ -108,4 +108,4 @@ nodeSelector: {} tolerations: [] -affinity: {} \ No newline at end of file +affinity: {} diff --git a/examples/flows/basic-prompting-hello-world.json b/examples/flows/basic-prompting-hello-world.json new file mode 100644 index 0000000..c253654 --- /dev/null +++ b/examples/flows/basic-prompting-hello-world.json @@ -0,0 +1,924 @@ +{ + "id": "26139860-a134-4437-992d-aea4fe09f820", + "data": { + "nodes": [ + { + "data": { + "description": "Get chat inputs from the Playground.", + "display_name": "Chat Input", + "id": "ChatInput-HsC25", + "node": { + "template": { + "_type": "Component", + "files": { + "trace_as_metadata": true, + "file_path": "", + "fileTypes": [ + "txt", + "md", + "mdx", + "csv", + "json", + "yaml", + "yml", + "xml", + "html", + "htm", + "pdf", + "docx", + "py", + "sh", + "sql", + "js", + "ts", + "tsx", + "jpg", + "jpeg", + "png", + "bmp", + "image" + ], + "list": true, + "required": false, + "placeholder": "", + "show": true, + "value": "", + "name": "files", + "display_name": "Files", + "advanced": true, + "dynamic": false, + "info": "Files to be sent with the message.", + "title_case": false, + "type": "file" + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"User\",\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=\"User\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "input_value": { + "trace_as_input": true, + "multiline": true, + "trace_as_metadata": true, + "load_from_db": false, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "value": "", + "name": "input_value", + "display_name": "Text", + "advanced": false, + "input_types": [ + "Message" + ], + "dynamic": false, + "info": "Message to be passed as input.", + "title_case": false, + "type": "str" + }, + "sender": { + "trace_as_metadata": true, + "options": [ + "Machine", + "User" + ], + "required": false, + "placeholder": "", + "show": true, + "value": "User", + "name": "sender", + "display_name": "Sender Type", + "advanced": true, + "dynamic": false, + "info": "Type of sender.", + "title_case": false, + "type": "str" + }, + "sender_name": { + "trace_as_input": true, + "trace_as_metadata": true, + "load_from_db": false, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "value": "User", + "name": "sender_name", + "display_name": "Sender Name", + "advanced": true, + "input_types": [ + "Message" + ], + "dynamic": false, + "info": "Name of the sender.", + "title_case": false, + "type": "str" + }, + "session_id": { + "trace_as_input": true, + "trace_as_metadata": true, + "load_from_db": false, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "value": "", + "name": "session_id", + "display_name": "Session ID", + "advanced": true, + "input_types": [ + "Message" + ], + "dynamic": false, + "info": "Session ID for the message.", + "title_case": false, + "type": "str" + }, + "should_store_message": { + "trace_as_metadata": true, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "value": true, + "name": "should_store_message", + "display_name": "Store Messages", + "advanced": true, + "dynamic": false, + "info": "Store the message in the history.", + "title_case": false, + "type": "bool" + } + }, + "description": "Get chat inputs from the Playground.", + "icon": "ChatInput", + "base_classes": [ + "Message" + ], + "display_name": "Chat Input", + "documentation": "", + "custom_fields": {}, + "output_types": [], + "pinned": false, + "conditional_paths": [], + "frozen": false, + "outputs": [ + { + "types": [ + "Message" + ], + "selected": "Message", + "name": "message", + "display_name": "Message", + "method": "message_response", + "value": "__UNDEFINED__", + "cache": true + } + ], + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "files" + ], + "beta": false, + "edited": false + }, + "type": "ChatInput" + }, + "dragging": false, + "height": 309, + "id": "ChatInput-HsC25", + "position": { + "x": -493.6459512396177, + "y": 1083.200545525551 + }, + "positionAbsolute": { + "x": -493.6459512396177, + "y": 1083.200545525551 + }, + "selected": false, + "type": "genericNode", + "width": 384 + }, + { + "data": { + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "id": "Prompt-ECt88", + "node": { + "template": { + "_type": "Component", + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "template": { + "trace_as_input": true, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "value": "Answer the user as if you were a pirate.\n\nUser: {user_input}\n\nAnswer: ", + "name": "template", + "display_name": "Template", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false, + "type": "prompt", + "load_from_db": false + }, + "user_input": { + "field_type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "user_input", + "display_name": "user_input", + "advanced": false, + "input_types": [ + "Message", + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "type": "str" + } + }, + "description": "Create a prompt template with dynamic variables.", + "icon": "prompts", + "base_classes": [ + "Message" + ], + "display_name": "Prompt", + "documentation": "", + "custom_fields": { + "template": [ + "user_input" + ] + }, + "output_types": [], + "pinned": false, + "conditional_paths": [], + "frozen": false, + "outputs": [ + { + "types": [ + "Message" + ], + "selected": "Message", + "name": "prompt", + "display_name": "Prompt Message", + "method": "build_prompt", + "value": "__UNDEFINED__", + "cache": true + } + ], + "field_order": [ + "template" + ], + "beta": false, + "edited": false + }, + "type": "Prompt" + }, + "dragging": false, + "height": 423, + "id": "Prompt-ECt88", + "position": { + "x": 56.354011530798516, + "y": 1157.2005405164796 + }, + "positionAbsolute": { + "x": 56.354011530798516, + "y": 1157.2005405164796 + }, + "selected": false, + "type": "genericNode", + "width": 384 + }, + { + "data": { + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "id": "ChatOutput-d8TL1", + "node": { + "template": { + "_type": "Component", + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"Machine\",\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\", display_name=\"Sender Name\", info=\"Name of the sender.\", value=\"AI\", advanced=True\n ),\n MessageTextInput(\n name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "data_template": { + "trace_as_input": true, + "trace_as_metadata": true, + "load_from_db": false, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "value": "{text}", + "name": "data_template", + "display_name": "Data Template", + "advanced": true, + "input_types": [ + "Message" + ], + "dynamic": false, + "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", + "title_case": false, + "type": "str" + }, + "input_value": { + "trace_as_input": true, + "trace_as_metadata": true, + "load_from_db": false, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "value": "", + "name": "input_value", + "display_name": "Text", + "advanced": false, + "input_types": [ + "Message" + ], + "dynamic": false, + "info": "Message to be passed as output.", + "title_case": false, + "type": "str" + }, + "sender": { + "trace_as_metadata": true, + "options": [ + "Machine", + "User" + ], + "required": false, + "placeholder": "", + "show": true, + "value": "Machine", + "name": "sender", + "display_name": "Sender Type", + "advanced": true, + "dynamic": false, + "info": "Type of sender.", + "title_case": false, + "type": "str" + }, + "sender_name": { + "trace_as_input": true, + "trace_as_metadata": true, + "load_from_db": false, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "value": "AI", + "name": "sender_name", + "display_name": "Sender Name", + "advanced": true, + "input_types": [ + "Message" + ], + "dynamic": false, + "info": "Name of the sender.", + "title_case": false, + "type": "str" + }, + "session_id": { + "trace_as_input": true, + "trace_as_metadata": true, + "load_from_db": false, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "value": "", + "name": "session_id", + "display_name": "Session ID", + "advanced": true, + "input_types": [ + "Message" + ], + "dynamic": false, + "info": "Session ID for the message.", + "title_case": false, + "type": "str" + }, + "should_store_message": { + "trace_as_metadata": true, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "value": true, + "name": "should_store_message", + "display_name": "Store Messages", + "advanced": true, + "dynamic": false, + "info": "Store the message in the history.", + "title_case": false, + "type": "bool" + } + }, + "description": "Display a chat message in the Playground.", + "icon": "ChatOutput", + "base_classes": [ + "Message" + ], + "display_name": "Chat Output", + "documentation": "", + "custom_fields": {}, + "output_types": [], + "pinned": false, + "conditional_paths": [], + "frozen": false, + "outputs": [ + { + "types": [ + "Message" + ], + "selected": "Message", + "name": "message", + "display_name": "Message", + "method": "message_response", + "value": "__UNDEFINED__", + "cache": true + } + ], + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "data_template" + ], + "beta": false, + "edited": false + }, + "type": "ChatOutput" + }, + "dragging": false, + "height": 309, + "id": "ChatOutput-d8TL1", + "position": { + "x": 1219.477374823274, + "y": 1200.950216973985 + }, + "positionAbsolute": { + "x": 1219.477374823274, + "y": 1200.950216973985 + }, + "selected": false, + "type": "genericNode", + "width": 384 + }, + { + "data": { + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "id": "OpenAIModel-ywskh", + "node": { + "template": { + "_type": "Component", + "api_key": { + "load_from_db": true, + "required": false, + "placeholder": "", + "show": true, + "value": "", + "name": "api_key", + "display_name": "OpenAI API Key", + "advanced": false, + "input_types": [], + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "title_case": false, + "password": true, + "type": "str" + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\", display_name=\"Model Name\", advanced=False, options=MODEL_NAMES, value=MODEL_NAMES[0]\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "input_value": { + "trace_as_input": true, + "trace_as_metadata": true, + "load_from_db": false, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "value": "", + "name": "input_value", + "display_name": "Input", + "advanced": false, + "input_types": [ + "Message" + ], + "dynamic": false, + "info": "", + "title_case": false, + "type": "str" + }, + "json_mode": { + "trace_as_metadata": true, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "value": false, + "name": "json_mode", + "display_name": "JSON Mode", + "advanced": true, + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "title_case": false, + "type": "bool" + }, + "max_tokens": { + "trace_as_metadata": true, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "value": "", + "name": "max_tokens", + "display_name": "Max Tokens", + "advanced": true, + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "title_case": false, + "type": "int" + }, + "model_kwargs": { + "trace_as_input": true, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "value": {}, + "name": "model_kwargs", + "display_name": "Model Kwargs", + "advanced": true, + "dynamic": false, + "info": "", + "title_case": false, + "type": "dict" + }, + "model_name": { + "trace_as_metadata": true, + "options": [ + "gpt-4o-mini", + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" + ], + "required": false, + "placeholder": "", + "show": true, + "value": "gpt-4o", + "name": "model_name", + "display_name": "Model Name", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false, + "type": "str", + "load_from_db": false + }, + "openai_api_base": { + "trace_as_metadata": true, + "load_from_db": false, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "value": "", + "name": "openai_api_base", + "display_name": "OpenAI API Base", + "advanced": true, + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "title_case": false, + "type": "str" + }, + "output_schema": { + "trace_as_input": true, + "list": true, + "required": false, + "placeholder": "", + "show": true, + "value": {}, + "name": "output_schema", + "display_name": "Schema", + "advanced": true, + "dynamic": false, + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.", + "title_case": false, + "type": "dict" + }, + "seed": { + "trace_as_metadata": true, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "value": 1, + "name": "seed", + "display_name": "Seed", + "advanced": true, + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "title_case": false, + "type": "int" + }, + "stream": { + "trace_as_metadata": true, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "value": false, + "name": "stream", + "display_name": "Stream", + "advanced": true, + "dynamic": false, + "info": "Stream the response from the model. Streaming works only in Chat.", + "title_case": false, + "type": "bool" + }, + "system_message": { + "trace_as_input": true, + "trace_as_metadata": true, + "load_from_db": false, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "value": "", + "name": "system_message", + "display_name": "System Message", + "advanced": true, + "input_types": [ + "Message" + ], + "dynamic": false, + "info": "System message to pass to the model.", + "title_case": false, + "type": "str" + }, + "temperature": { + "trace_as_metadata": true, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "value": 0.1, + "name": "temperature", + "display_name": "Temperature", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false, + "type": "float" + } + }, + "description": "Generates text using OpenAI LLMs.", + "icon": "OpenAI", + "base_classes": [ + "LanguageModel", + "Message" + ], + "display_name": "OpenAI", + "documentation": "", + "custom_fields": {}, + "output_types": [], + "pinned": false, + "conditional_paths": [], + "frozen": false, + "outputs": [ + { + "types": [ + "Message" + ], + "selected": "Message", + "name": "text_output", + "display_name": "Text", + "method": "text_response", + "value": "__UNDEFINED__", + "cache": true + }, + { + "types": [ + "LanguageModel" + ], + "selected": "LanguageModel", + "name": "model_output", + "display_name": "Language Model", + "method": "build_model", + "value": "__UNDEFINED__", + "cache": true + } + ], + "field_order": [ + "input_value", + "system_message", + "stream", + "max_tokens", + "model_kwargs", + "json_mode", + "output_schema", + "model_name", + "openai_api_base", + "api_key", + "temperature", + "seed" + ], + "beta": false, + "edited": false + }, + "type": "OpenAIModel" + }, + "dragging": false, + "height": 623, + "id": "OpenAIModel-ywskh", + "position": { + "x": 664.0296638933031, + "y": 1026.5966174731725 + }, + "positionAbsolute": { + "x": 664.0296638933031, + "y": 1026.5966174731725 + }, + "selected": false, + "type": "genericNode", + "width": 384 + } + ], + "edges": [ + { + "className": "", + "data": { + "sourceHandle": { + "dataType": "ChatInput", + "id": "ChatInput-HsC25", + "name": "message", + "output_types": [ + "Message" + ] + }, + "targetHandle": { + "fieldName": "user_input", + "id": "Prompt-ECt88", + "inputTypes": [ + "Message", + "Text" + ], + "type": "str" + } + }, + "id": "reactflow__edge-ChatInput-HsC25{œdataTypeœ:œChatInputœ,œidœ:œChatInput-HsC25œ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-ECt88{œfieldNameœ:œuser_inputœ,œidœ:œPrompt-ECt88œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "ChatInput-HsC25", + "sourceHandle": "{œdataTypeœ:œChatInputœ,œidœ:œChatInput-HsC25œ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}", + "target": "Prompt-ECt88", + "targetHandle": "{œfieldNameœ:œuser_inputœ,œidœ:œPrompt-ECt88œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}" + }, + { + "className": "", + "data": { + "sourceHandle": { + "dataType": "Prompt", + "id": "Prompt-ECt88", + "name": "prompt", + "output_types": [ + "Message" + ] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "OpenAIModel-ywskh", + "inputTypes": [ + "Message" + ], + "type": "str" + } + }, + "id": "reactflow__edge-Prompt-ECt88{œdataTypeœ:œPromptœ,œidœ:œPrompt-ECt88œ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-ywskh{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-ywskhœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-ECt88", + "sourceHandle": "{œdataTypeœ:œPromptœ,œidœ:œPrompt-ECt88œ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}", + "target": "OpenAIModel-ywskh", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-ywskhœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}" + }, + { + "className": "", + "data": { + "sourceHandle": { + "dataType": "OpenAIModel", + "id": "OpenAIModel-ywskh", + "name": "text_output", + "output_types": [ + "Message" + ] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-d8TL1", + "inputTypes": [ + "Message" + ], + "type": "str" + } + }, + "id": "reactflow__edge-OpenAIModel-ywskh{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-ywskhœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-d8TL1{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-d8TL1œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "OpenAIModel-ywskh", + "sourceHandle": "{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-ywskhœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}", + "target": "ChatOutput-d8TL1", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-d8TL1œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}" + } + ], + "viewport": { + "x": 451.5472138901009, + "y": -612.9130871798467, + "zoom": 0.7627642232271605 + } + }, + "description": "This flow will get you experimenting with the basics of the UI, the Chat and the Prompt component. \n\nTry changing the Template in it to see how the model behaves. \nYou can change it to this and a Text Input into the `type_of_person` variable : \"Answer the user as if you were a pirate.\n\nUser: {user_input}\n\nAnswer: \" ", + "name": "Basic Prompting (Hello, World)", + "last_tested_version": "1.0.12", + "endpoint_name": null, + "is_component": false +} \ No newline at end of file diff --git a/examples/flows/justchat.json b/examples/flows/justchat.json deleted file mode 100644 index 3a7bcf2..0000000 --- a/examples/flows/justchat.json +++ /dev/null @@ -1,362 +0,0 @@ -{ - "id": "4ca07770-c0e4-487c-ad42-77c6039ce02e", - "data": { - "nodes": [ - { - "id": "ChatInput-FKsPa", - "type": "genericNode", - "position": { "x": 393, "y": 150.625 }, - "data": { - "type": "ChatInput", - "node": { - "template": { - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Message\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build_no_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n )\n", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "input_value": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "input_value", - "display_name": "Message", - "advanced": false, - "input_types": [], - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "value": "asasa" - }, - "return_record": { - "type": "bool", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "return_record", - "display_name": "Return Record", - "advanced": true, - "dynamic": false, - "info": "Return the message as a record containing the sender, sender_name, and session_id.", - "load_from_db": false, - "title_case": false - }, - "sender": { - "type": "str", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "value": "User", - "fileTypes": [], - "file_path": "", - "password": false, - "options": ["Machine", "User"], - "name": "sender", - "display_name": "Sender Type", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": ["Text"] - }, - "sender_name": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": "User", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "sender_name", - "display_name": "Sender Name", - "advanced": false, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": ["Text"] - }, - "session_id": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "session_id", - "display_name": "Session ID", - "advanced": true, - "dynamic": false, - "info": "If provided, the message will be stored in the memory.", - "load_from_db": false, - "title_case": false, - "input_types": ["Text"] - }, - "_type": "CustomComponent" - }, - "description": "Get chat inputs from the Playground.", - "icon": "ChatInput", - "base_classes": ["object", "Record", "str", "Text"], - "display_name": "Chat Input", - "documentation": "", - "custom_fields": { - "sender": null, - "sender_name": null, - "input_value": null, - "session_id": null, - "return_record": null - }, - "output_types": ["Text", "Record"], - "field_formatters": {}, - "frozen": false, - "field_order": [], - "beta": false - }, - "id": "ChatInput-FKsPa" - }, - "selected": false, - "width": 384, - "height": 375 - }, - { - "id": "ChatOutput-KPOpD", - "type": "genericNode", - "position": { "x": 1032, "y": 283.625 }, - "data": { - "type": "ChatOutput", - "node": { - "template": { - "code": { - "type": "code", - "required": true, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n ) -> Union[Text, Record]:\n return super().build_with_record(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template or \"\",\n )\n", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "code", - "advanced": true, - "dynamic": true, - "info": "", - "load_from_db": false, - "title_case": false - }, - "input_value": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "input_value", - "display_name": "Message", - "advanced": false, - "input_types": ["Text"], - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false - }, - "record_template": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": true, - "value": "{text}", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "record_template", - "display_name": "Record Template", - "advanced": true, - "dynamic": false, - "info": "In case of Message being a Record, this template will be used to convert it to text.", - "load_from_db": false, - "title_case": false, - "input_types": ["Text"] - }, - "return_record": { - "type": "bool", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "return_record", - "display_name": "Return Record", - "advanced": true, - "dynamic": false, - "info": "Return the message as a record containing the sender, sender_name, and session_id.", - "load_from_db": false, - "title_case": false - }, - "sender": { - "type": "str", - "required": false, - "placeholder": "", - "list": true, - "show": true, - "multiline": false, - "value": "Machine", - "fileTypes": [], - "file_path": "", - "password": false, - "options": ["Machine", "User"], - "name": "sender", - "display_name": "Sender Type", - "advanced": true, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": ["Text"] - }, - "sender_name": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "value": "AI", - "fileTypes": [], - "file_path": "", - "password": false, - "name": "sender_name", - "display_name": "Sender Name", - "advanced": false, - "dynamic": false, - "info": "", - "load_from_db": false, - "title_case": false, - "input_types": ["Text"] - }, - "session_id": { - "type": "str", - "required": false, - "placeholder": "", - "list": false, - "show": true, - "multiline": false, - "fileTypes": [], - "file_path": "", - "password": false, - "name": "session_id", - "display_name": "Session ID", - "advanced": true, - "dynamic": false, - "info": "If provided, the message will be stored in the memory.", - "load_from_db": false, - "title_case": false, - "input_types": ["Text"] - }, - "_type": "CustomComponent" - }, - "description": "Display a chat message in the Playground.", - "icon": "ChatOutput", - "base_classes": ["object", "Record", "str", "Text"], - "display_name": "Chat Output", - "documentation": "", - "custom_fields": { - "sender": null, - "sender_name": null, - "input_value": null, - "session_id": null, - "return_record": null, - "record_template": null - }, - "output_types": ["Text", "Record"], - "field_formatters": {}, - "frozen": false, - "field_order": [], - "beta": false - }, - "id": "ChatOutput-KPOpD" - }, - "selected": false, - "width": 384, - "height": 383 - } - ], - "edges": [ - { - "source": "ChatInput-FKsPa", - "sourceHandle": "{œbaseClassesœ:[œobjectœ,œRecordœ,œstrœ,œTextœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-FKsPaœ}", - "target": "ChatOutput-KPOpD", - "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-KPOpDœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}", - "data": { - "targetHandle": { - "fieldName": "input_value", - "id": "ChatOutput-KPOpD", - "inputTypes": ["Text"], - "type": "str" - }, - "sourceHandle": { - "baseClasses": ["object", "Record", "str", "Text"], - "dataType": "ChatInput", - "id": "ChatInput-FKsPa" - } - }, - "style": { "stroke": "#555" }, - "className": "stroke-foreground stroke-connection", - "id": "reactflow__edge-ChatInput-FKsPa{œbaseClassesœ:[œobjectœ,œRecordœ,œstrœ,œTextœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-FKsPaœ}-ChatOutput-KPOpD{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-KPOpDœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}" - } - ], - "viewport": { - "x": -107.7632978980871, - "y": -17.72772770977791, - "zoom": 1.2660822170323138 - } - }, - "description": "Craft Language Connections Here.", - "name": "justchat", - "last_tested_version": "0.0.3", - "is_component": false -} diff --git a/examples/langflow-runtime/basic-prompting-hello-world.yaml b/examples/langflow-runtime/basic-prompting-hello-world.yaml new file mode 100644 index 0000000..2eda608 --- /dev/null +++ b/examples/langflow-runtime/basic-prompting-hello-world.yaml @@ -0,0 +1,13 @@ +downloadFlows: + flows: + - url: https://raw.githubusercontent.com/datastax/langflow-charts/main/examples/flows/basic-prompting-hello-world.json + endpoint: hello-world + +env: + - name: LANGFLOW_LOG_LEVEL + value: "INFO" + - name: OPENAI_API_KEY + valueFrom: + secretKeyRef: + name: langflow-secrets + key: openai-key \ No newline at end of file diff --git a/examples/langflow-runtime/docker/README.md b/examples/langflow-runtime/docker/README.md index 8acdf89..3d10cfe 100644 --- a/examples/langflow-runtime/docker/README.md +++ b/examples/langflow-runtime/docker/README.md @@ -4,11 +4,11 @@ You can package the flow as a docker image and refer to it in the chart. ```bash # Download the flows -wget https://raw.githubusercontent.com/datastax/langflow-charts/main/examples/flows/justchat.json +wget https://raw.githubusercontent.com/datastax/langflow-charts/main/examples/flows/basic-prompting-hello-world.json # Build the docker image locally -docker build -t myuser/langflow-just-chat:1.0.0 . +docker build -t myuser/langflow-hello-world:1.0.0 . # Push the image to DockerHub -docker push myuser/langflow-just-chat:1.0.0 +docker push myuser/langflow-hello-world:1.0.0 ``` The use the runtime chart to deploy the application: @@ -17,6 +17,6 @@ The use the runtime chart to deploy the application: helm repo add langflow https://langflow-ai.github.io/langflow-helm-charts helm repo update helm install langflow-runtime langflow/langflow-runtime \ - --set "image.repository=myuser/langflow-just-chat" \ - --set "image.tag=1.0.0" \ + --set "image.repository=myuser/langflow-hello-world" \ + --set "image.tag=1.0.0" ```