diff --git a/docs/assets/title-and-description-markdown-cluster-workflow-template.png b/docs/assets/title-and-description-markdown-cluster-workflow-template.png new file mode 100644 index 000000000000..7f8b31aba6df Binary files /dev/null and b/docs/assets/title-and-description-markdown-cluster-workflow-template.png differ diff --git a/docs/assets/workflow-title-and-description-markdown-complex.png b/docs/assets/title-and-description-markdown-complex-workflow.png similarity index 100% rename from docs/assets/workflow-title-and-description-markdown-complex.png rename to docs/assets/title-and-description-markdown-complex-workflow.png diff --git a/docs/assets/title-and-description-markdown-cron-workflow.png b/docs/assets/title-and-description-markdown-cron-workflow.png new file mode 100644 index 000000000000..99ccd921fcb1 Binary files /dev/null and b/docs/assets/title-and-description-markdown-cron-workflow.png differ diff --git a/docs/assets/title-and-description-markdown-workflow-template.png b/docs/assets/title-and-description-markdown-workflow-template.png new file mode 100644 index 000000000000..f8564afaac5d Binary files /dev/null and b/docs/assets/title-and-description-markdown-workflow-template.png differ diff --git a/docs/assets/workflow-title-and-description-markdown.png b/docs/assets/title-and-description-markdown-workflow.png similarity index 100% rename from docs/assets/workflow-title-and-description-markdown.png rename to docs/assets/title-and-description-markdown-workflow.png diff --git a/docs/assets/workflow-title-and-description.png b/docs/assets/title-and-description-workflow.png similarity index 100% rename from docs/assets/workflow-title-and-description.png rename to docs/assets/title-and-description-workflow.png diff --git a/docs/title-and-description.md b/docs/title-and-description.md index 9d49082e6a0b..3e236be266af 100644 --- a/docs/title-and-description.md +++ b/docs/title-and-description.md @@ -15,7 +15,7 @@ metadata: ``` The above manifest will render as a row like the below image: -![Title and Description Example](assets/workflow-title-and-description.png) +![Title and Description Example](assets/title-and-description-workflow.png) ## Embedded Markdown @@ -34,7 +34,7 @@ metadata: ``` The above manifest will render as a row like the below image: -![Markdown Example](assets/workflow-title-and-description-markdown.png) +![Markdown Example](assets/title-and-description-markdown-workflow.png) Below are a few more examples: @@ -76,4 +76,67 @@ metadata: ``` The above examples will render as rows like the below image: -![More Markdown Examples](assets/workflow-title-and-description-markdown-complex.png) +![More Markdown Examples](assets/title-and-description-markdown-complex-workflow.png) + +### For `ClusterWorkflowTemplates` + +> v3.6 and after + +You can also add the `workflows.argoproj.io/title` and `workflows.argoproj.io/description` annotations with embedded markdown to a `ClusterWorkflowTemplate` to display in the list: + +```yaml +apiVersion: argoproj.io/v1alpha1 +kind: ClusterWorkflowTemplate +metadata: + name: my-cluster-workflow-template + annotations: + workflows.argoproj.io/title: '**Test Title**' + workflows.argoproj.io/description: | + `This is a simple hello world example.` + You can also run it in Python: https://couler-proj.github.io/couler/examples/#hello-world +``` + +The above manifest will render as a row like the below image: +![ClusterWorkflowTemplate Example](assets/title-and-description-markdown-cluster-workflow-template.png) + +### For `CronWorkflows` + +> v3.6 and after + +You can also add the `workflows.argoproj.io/title` and `workflows.argoproj.io/description` annotations with embedded markdown to a `CronWorkflow` to display in the list: + +```yaml +apiVersion: argoproj.io/v1alpha1 +kind: CronWorkflow +metadata: + name: my-cron-workflow + annotations: + workflows.argoproj.io/title: '**Test Title**' + workflows.argoproj.io/description: | + `This is a simple hello world example.` + You can also run it in Python: https://couler-proj.github.io/couler/examples/#hello-world +``` + +The above manifest will render as a row like the below image: +![CronWorkflow Example](assets/title-and-description-markdown-cron-workflow.png) + +### For `WorkflowTemplates` + +> v3.6 and after + +You can also add the `workflows.argoproj.io/title` and `workflows.argoproj.io/description` annotations with embedded markdown to a `WorkflowTemplate` to display in the list: + +```yaml +apiVersion: argoproj.io/v1alpha1 +kind: WorkflowTemplate +metadata: + name: my-workflow-template + annotations: + workflows.argoproj.io/title: '**Test Title**' + workflows.argoproj.io/description: | + `This is a simple hello world example.` + You can also run it in Python: https://couler-proj.github.io/couler/examples/#hello-world +``` + +The above manifest will render as a row like the below image: +![WorkflowTemplate Example](assets/title-and-description-markdown-workflow-template.png) diff --git a/examples/README.md b/examples/README.md index e9491eb7cd58..657b00d66a94 100644 --- a/examples/README.md +++ b/examples/README.md @@ -1,3 +1,3 @@ # Documentation by Example -This has been moved to [the docs](https://argo-workflows.readthedocs.io/en/latest/walk-through/). +This directory contains various examples and is referenced by [the docs site](https://argo-workflows.readthedocs.io). diff --git a/go.mod b/go.mod index 1e299252d5c7..30719b0457ff 100644 --- a/go.mod +++ b/go.mod @@ -212,7 +212,7 @@ require ( github.com/go-logr/logr v1.4.1 // indirect github.com/go-openapi/jsonpointer v0.20.2 // indirect github.com/go-openapi/swag v0.22.6 // indirect - github.com/golang-jwt/jwt/v4 v4.5.0 // indirect + github.com/golang-jwt/jwt/v4 v4.5.1 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/mock v1.6.0 github.com/google/btree v1.0.1 // indirect diff --git a/go.sum b/go.sum index d9d8befc0a2d..25d52e0ee925 100644 --- a/go.sum +++ b/go.sum @@ -331,8 +331,8 @@ github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang-jwt/jwt/v4 v4.0.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= github.com/golang-jwt/jwt/v4 v4.2.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= -github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg= -github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= +github.com/golang-jwt/jwt/v4 v4.5.1 h1:JdqV9zKUdtaa9gdPlywC3aeoEsR681PlKC+4F5gQgeo= +github.com/golang-jwt/jwt/v4 v4.5.1/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk= github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= diff --git a/mkdocs.yml b/mkdocs.yml index 2f69fbfb1fa5..25f2564ea380 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -1,4 +1,5 @@ site_name: Argo Workflows - The workflow engine for Kubernetes +site_url: !ENV [READTHEDOCS_CANONICAL_URL, https://argo-workflows.readthedocs.io/en/latest/] # https://docs.readthedocs.io/en/stable/canonical-urls.html#how-to-specify-the-canonical-url, https://www.mkdocs.org/user-guide/configuration/#environment-variables repo_url: https://github.com/argoproj/argo-workflows edit_uri: https://github.com/argoproj/argo-workflows/edit/main/docs strict: true diff --git a/test/e2e/hooks_test.go b/test/e2e/hooks_test.go index d10e06cb5491..23cb235ce161 100644 --- a/test/e2e/hooks_test.go +++ b/test/e2e/hooks_test.go @@ -5,6 +5,7 @@ package e2e import ( "strings" "testing" + "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" @@ -805,7 +806,7 @@ spec: args: ["sleep", "5"] `).When(). SubmitWorkflow(). - WaitForWorkflow(fixtures.ToBeCompleted). + WaitForWorkflow(fixtures.ToBeCompleted, 2*time.Minute). WaitForWorkflow(fixtures.Condition(func(wf *v1alpha1.Workflow) (bool, string) { onExitNodeName = common.GenerateOnExitNodeName(wf.ObjectMeta.Name) onExitNode := wf.Status.Nodes.FindByDisplayName(onExitNodeName) diff --git a/test/e2e/signals_test.go b/test/e2e/signals_test.go index 132e99a60c49..13a10020b91c 100644 --- a/test/e2e/signals_test.go +++ b/test/e2e/signals_test.go @@ -150,7 +150,7 @@ func (s *SignalsSuite) TestSignaledContainerSet() { Workflow("@testdata/signaled-container-set-workflow.yaml"). When(). SubmitWorkflow(). - WaitForWorkflow(). + WaitForWorkflow(killDuration). Then(). ExpectWorkflow(func(t *testing.T, metadata *metav1.ObjectMeta, status *wfv1.WorkflowStatus) { assert.Equal(t, wfv1.WorkflowFailed, status.Phase) diff --git a/ui/src/cluster-workflow-templates/cluster-workflow-template-creator.tsx b/ui/src/cluster-workflow-templates/cluster-workflow-template-creator.tsx index 1bd88a07f137..df3eb397294d 100644 --- a/ui/src/cluster-workflow-templates/cluster-workflow-template-creator.tsx +++ b/ui/src/cluster-workflow-templates/cluster-workflow-template-creator.tsx @@ -8,10 +8,11 @@ import {UploadButton} from '../shared/components/upload-button'; import {exampleClusterWorkflowTemplate} from '../shared/examples'; import {ClusterWorkflowTemplate} from '../shared/models'; import {services} from '../shared/services'; -import {ClusterWorkflowTemplateEditor} from './cluster-workflow-template-editor'; +import {useEditableObject} from '../shared/use-editable-object'; +import {WorkflowTemplateEditor} from '../workflow-templates/workflow-template-editor'; export function ClusterWorkflowTemplateCreator({onCreate}: {onCreate: (workflow: ClusterWorkflowTemplate) => void}) { - const [template, setTemplate] = useState(exampleClusterWorkflowTemplate()); + const {object: template, setObject: setTemplate, serialization, lang, setLang} = useEditableObject(exampleClusterWorkflowTemplate()); const [error, setError] = useState(); return ( <> @@ -31,7 +32,7 @@ export function ClusterWorkflowTemplateCreator({onCreate}: {onCreate: (workflow: - +
.
diff --git a/ui/src/cluster-workflow-templates/cluster-workflow-template-details.tsx b/ui/src/cluster-workflow-templates/cluster-workflow-template-details.tsx index 9a3b6a96904f..8930097cdd5e 100644 --- a/ui/src/cluster-workflow-templates/cluster-workflow-template-details.tsx +++ b/ui/src/cluster-workflow-templates/cluster-workflow-template-details.tsx @@ -18,9 +18,9 @@ import {services} from '../shared/services'; import {useCollectEvent} from '../shared/use-collect-event'; import {useEditableObject} from '../shared/use-editable-object'; import {useQueryParams} from '../shared/use-query-params'; +import {WorkflowTemplateEditor} from '../workflow-templates/workflow-template-editor'; import {SubmitWorkflowPanel} from '../workflows/components/submit-workflow-panel'; import {WorkflowDetailsList} from '../workflows/components/workflow-details-list/workflow-details-list'; -import {ClusterWorkflowTemplateEditor} from './cluster-workflow-template-editor'; import '../workflows/components/workflow-details/workflow-details.scss'; @@ -37,7 +37,7 @@ export function ClusterWorkflowTemplateDetails({history, location, match}: Route const [columns, setColumns] = useState([]); const [error, setError] = useState(); - const [template, edited, setTemplate, resetTemplate] = useEditableObject(); + const {object: template, setObject: setTemplate, resetObject: resetTemplate, serialization, edited, lang, setLang} = useEditableObject(); useEffect( useQueryParams(history, p => { @@ -138,7 +138,16 @@ export function ClusterWorkflowTemplateDetails({history, location, match}: Route {!template ? ( ) : ( - + )} {template && ( diff --git a/ui/src/cluster-workflow-templates/cluster-workflow-template-editor.tsx b/ui/src/cluster-workflow-templates/cluster-workflow-template-editor.tsx deleted file mode 100644 index a0241ffea71e..000000000000 --- a/ui/src/cluster-workflow-templates/cluster-workflow-template-editor.tsx +++ /dev/null @@ -1,58 +0,0 @@ -import {Tabs} from 'argo-ui/src/components/tabs/tabs'; -import * as React from 'react'; - -import {LabelsAndAnnotationsEditor} from '../shared/components/editors/labels-and-annotations-editor'; -import {MetadataEditor} from '../shared/components/editors/metadata-editor'; -import {WorkflowParametersEditor} from '../shared/components/editors/workflow-parameters-editor'; -import {ObjectEditor} from '../shared/components/object-editor'; -import {WorkflowTemplate} from '../shared/models'; - -export function ClusterWorkflowTemplateEditor({ - onChange, - template, - onError, - onTabSelected, - selectedTabKey -}: { - template: WorkflowTemplate; - onChange: (template: WorkflowTemplate) => void; - onError: (error: Error) => void; - onTabSelected?: (tab: string) => void; - selectedTabKey?: string; -}) { - return ( - onChange({...x})} /> - }, - { - key: 'spec', - title: 'Spec', - content: onChange({...template, spec})} onError={onError} /> - }, - { - key: 'metadata', - title: 'MetaData', - content: onChange({...template, metadata})} /> - }, - { - key: 'workflow-metadata', - title: 'Workflow MetaData', - content: ( - onChange({...template, spec: {...template.spec, workflowMetadata}})} - /> - ) - } - ]} - /> - ); -} diff --git a/ui/src/cluster-workflow-templates/cluster-workflow-template-list.scss b/ui/src/cluster-workflow-templates/cluster-workflow-template-list.scss index 2293c1f83a06..72f21557118d 100644 --- a/ui/src/cluster-workflow-templates/cluster-workflow-template-list.scss +++ b/ui/src/cluster-workflow-templates/cluster-workflow-template-list.scss @@ -5,4 +5,22 @@ &__row:hover { box-shadow: 1px 2px 3px rgba($argo-color-gray-9, .1), 0 0 0 1px rgba($argo-color-teal-5, .5); } -} \ No newline at end of file +} + +.cluster-workflow-templates-list { + padding: 1em; + + &__row-container { + a { + color: $argo-color-gray-6; + } + + a:hover { + color: $argo-color-teal-5; + } + } +} + +.row.pt-60 { + padding-top: 60px; +} diff --git a/ui/src/cluster-workflow-templates/cluster-workflow-template-list.tsx b/ui/src/cluster-workflow-templates/cluster-workflow-template-list.tsx index 4b03930bad5a..c83f52a9c6e2 100644 --- a/ui/src/cluster-workflow-templates/cluster-workflow-template-list.tsx +++ b/ui/src/cluster-workflow-templates/cluster-workflow-template-list.tsx @@ -19,6 +19,7 @@ import {useCollectEvent} from '../shared/use-collect-event'; import {useQueryParams} from '../shared/use-query-params'; import useTimestamp, {TIMESTAMP_KEYS} from '../shared/use-timestamp'; import {ClusterWorkflowTemplateCreator} from './cluster-workflow-template-creator'; +import {ClusterWorkflowTemplateMarkdown} from './cluster-workflow-template-markdown'; import './cluster-workflow-template-list.scss'; @@ -83,15 +84,19 @@ export function ClusterWorkflowTemplateList({history, location}: RouteComponentP {templates.map(t => ( - -
- +
+
+
+ +
+ + + +
+ +
-
{t.metadata.name}
-
- -
- +
))}
diff --git a/ui/src/cluster-workflow-templates/cluster-workflow-template-markdown.scss b/ui/src/cluster-workflow-templates/cluster-workflow-template-markdown.scss new file mode 100644 index 000000000000..fca441264981 --- /dev/null +++ b/ui/src/cluster-workflow-templates/cluster-workflow-template-markdown.scss @@ -0,0 +1,8 @@ +@import 'node_modules/argo-ui/src/styles/config'; + +.wf-rows-name { + line-height: 1.5em; + display: inline-block; + vertical-align: middle; + white-space: pre-line; +} diff --git a/ui/src/cluster-workflow-templates/cluster-workflow-template-markdown.tsx b/ui/src/cluster-workflow-templates/cluster-workflow-template-markdown.tsx new file mode 100644 index 000000000000..59b0f7041b34 --- /dev/null +++ b/ui/src/cluster-workflow-templates/cluster-workflow-template-markdown.tsx @@ -0,0 +1,22 @@ +import * as React from 'react'; + +import {ANNOTATION_DESCRIPTION, ANNOTATION_TITLE} from '../shared/annotations'; +import {SuspenseReactMarkdownGfm} from '../shared/components/suspense-react-markdown-gfm'; +import {ClusterWorkflowTemplate} from '../shared/models'; + +require('./cluster-workflow-template-markdown.scss'); + +interface ClusterWorkflowTemplateMarkdownProps { + workflow: ClusterWorkflowTemplate; +} + +export function ClusterWorkflowTemplateMarkdown(props: ClusterWorkflowTemplateMarkdownProps) { + const wf = props.workflow; + // title + description vars + const title = wf.metadata.annotations?.[ANNOTATION_TITLE] ?? wf.metadata.name; + const description = (wf.metadata.annotations?.[ANNOTATION_DESCRIPTION] && `\n${wf.metadata.annotations[ANNOTATION_DESCRIPTION]}`) || ''; + const hasAnnotation = title !== wf.metadata.name || description !== ''; + const markdown = `${title}${description}`; + + return
{hasAnnotation ? : markdown}
; +} diff --git a/ui/src/cron-workflows/cron-workflow-creator.tsx b/ui/src/cron-workflows/cron-workflow-creator.tsx index 095eda7ddeca..bb09fb75fa0d 100644 --- a/ui/src/cron-workflows/cron-workflow-creator.tsx +++ b/ui/src/cron-workflows/cron-workflow-creator.tsx @@ -9,10 +9,11 @@ import {exampleCronWorkflow} from '../shared/examples'; import {CronWorkflow} from '../shared/models'; import * as nsUtils from '../shared/namespaces'; import {services} from '../shared/services'; +import {useEditableObject} from '../shared/use-editable-object'; import {CronWorkflowEditor} from './cron-workflow-editor'; export function CronWorkflowCreator({onCreate, namespace}: {namespace: string; onCreate: (cronWorkflow: CronWorkflow) => void}) { - const [cronWorkflow, setCronWorkflow] = useState(exampleCronWorkflow(nsUtils.getNamespaceWithDefault(namespace))); + const {object: cronWorkflow, setObject: setCronWorkflow, serialization, lang, setLang} = useEditableObject(exampleCronWorkflow(nsUtils.getNamespaceWithDefault(namespace))); const [error, setError] = useState(); return ( <> @@ -32,7 +33,7 @@ export function CronWorkflowCreator({onCreate, namespace}: {namespace: string; o - +

.

diff --git a/ui/src/cron-workflows/cron-workflow-details.tsx b/ui/src/cron-workflows/cron-workflow-details.tsx index ca7eaf1f2ef2..566ccf766a4c 100644 --- a/ui/src/cron-workflows/cron-workflow-details.tsx +++ b/ui/src/cron-workflows/cron-workflow-details.tsx @@ -36,7 +36,7 @@ export function CronWorkflowDetails({match, location, history}: RouteComponentPr const [workflows, setWorkflows] = useState([]); const [columns, setColumns] = useState([]); - const [cronWorkflow, edited, setCronWorkflow, resetCronWorkflow] = useEditableObject(); + const {object: cronWorkflow, setObject: setCronWorkflow, resetObject: resetCronWorkflow, serialization, edited, lang, setLang} = useEditableObject(); const [error, setError] = useState(); useEffect( @@ -207,7 +207,16 @@ export function CronWorkflowDetails({match, location, history}: RouteComponentPr {!cronWorkflow ? ( ) : ( - + )} setSidePanel(null)}> {sidePanel === 'share' && } diff --git a/ui/src/cron-workflows/cron-workflow-editor.tsx b/ui/src/cron-workflows/cron-workflow-editor.tsx index e453157e34e0..fcad11038366 100644 --- a/ui/src/cron-workflows/cron-workflow-editor.tsx +++ b/ui/src/cron-workflows/cron-workflow-editor.tsx @@ -5,6 +5,7 @@ import {LabelsAndAnnotationsEditor} from '../shared/components/editors/labels-an import {MetadataEditor} from '../shared/components/editors/metadata-editor'; import {WorkflowParametersEditor} from '../shared/components/editors/workflow-parameters-editor'; import {ObjectEditor} from '../shared/components/object-editor'; +import type {Lang} from '../shared/components/object-parser'; import {CronWorkflow} from '../shared/models'; import {CronWorkflowSpecEditor} from './cron-workflow-spec-editior'; import {CronWorkflowStatusViewer} from './cron-workflow-status-viewer'; @@ -14,10 +15,16 @@ export function CronWorkflowEditor({ onTabSelected, onError, onChange, - cronWorkflow + onLangChange, + cronWorkflow, + serialization, + lang }: { cronWorkflow: CronWorkflow; - onChange: (cronWorkflow: CronWorkflow) => void; + serialization: string; + lang: Lang; + onChange: (cronWorkflow: string | CronWorkflow) => void; + onLangChange: (lang: Lang) => void; onError: (error: Error) => void; onTabSelected?: (tab: string) => void; selectedTabKey?: string; @@ -41,7 +48,16 @@ export function CronWorkflowEditor({ { key: 'manifest', title: 'Manifest', - content: onChange({...x})} /> + content: ( + + ) }, { key: 'cron', diff --git a/ui/src/cron-workflows/cron-workflow-list.scss b/ui/src/cron-workflows/cron-workflow-list.scss index e9dd2862a52b..3c8d9f7edd2c 100644 --- a/ui/src/cron-workflows/cron-workflow-list.scss +++ b/ui/src/cron-workflows/cron-workflow-list.scss @@ -4,4 +4,22 @@ &__row:hover { box-shadow: 1px 2px 3px rgba($argo-color-gray-9, .1), 0 0 0 1px rgba($argo-color-teal-5, .5); } -} \ No newline at end of file +} + +.cron-workflows-list { + padding: 1em; + + &__row-container { + a { + color: $argo-color-gray-6; + } + + a:hover { + color: $argo-color-teal-5; + } + } +} + +.row.pt-60 { + padding-top: 60px; +} diff --git a/ui/src/cron-workflows/cron-workflow-list.tsx b/ui/src/cron-workflows/cron-workflow-list.tsx index 4ff8ed3ff647..dbbd6ba27967 100644 --- a/ui/src/cron-workflows/cron-workflow-list.tsx +++ b/ui/src/cron-workflows/cron-workflow-list.tsx @@ -1,23 +1,20 @@ import {Page} from 'argo-ui/src/components/page/page'; import {SlidingPanel} from 'argo-ui/src/components/sliding-panel/sliding-panel'; -import {Ticker} from 'argo-ui/src/components/ticker'; import * as React from 'react'; import {useContext, useEffect, useState} from 'react'; -import {Link, RouteComponentProps} from 'react-router-dom'; +import {RouteComponentProps} from 'react-router-dom'; -import {ANNOTATION_DESCRIPTION, ANNOTATION_TITLE} from '../shared/annotations'; import {uiUrl} from '../shared/base'; import {ErrorNotice} from '../shared/components/error-notice'; import {ExampleManifests} from '../shared/components/example-manifests'; import {InfoIcon} from '../shared/components/fa-icons'; import {Loading} from '../shared/components/loading'; -import {Timestamp, TimestampSwitch} from '../shared/components/timestamp'; +import {TimestampSwitch} from '../shared/components/timestamp'; import {ZeroState} from '../shared/components/zero-state'; import {Context} from '../shared/context'; -import {getNextScheduledTime} from '../shared/cron'; import {Footnote} from '../shared/footnote'; import {historyUrl} from '../shared/history'; -import {CronWorkflow, CronWorkflowSpec} from '../shared/models'; +import {CronWorkflow} from '../shared/models'; import * as nsUtils from '../shared/namespaces'; import {services} from '../shared/services'; import {useCollectEvent} from '../shared/use-collect-event'; @@ -25,7 +22,7 @@ import {useQueryParams} from '../shared/use-query-params'; import useTimestamp, {TIMESTAMP_KEYS} from '../shared/use-timestamp'; import {CronWorkflowCreator} from './cron-workflow-creator'; import {CronWorkflowFilters} from './cron-workflow-filters'; -import {PrettySchedule} from './pretty-schedule'; +import {CronWorkflowRow} from './cron-workflow-row'; import './cron-workflow-list.scss'; @@ -156,56 +153,16 @@ export function CronWorkflowList({match, location, history}: RouteComponentProps /> - {cronWorkflows.map(w => ( - -
{w.spec.suspend ? : }
-
- {w.metadata.annotations?.[ANNOTATION_TITLE] ?? w.metadata.name} - {w.metadata.annotations?.[ANNOTATION_DESCRIPTION] ?

{w.metadata.annotations[ANNOTATION_DESCRIPTION]}

: null} -
-
{w.metadata.namespace}
-
{w.spec.timezone}
-
- {w.spec.schedule - ? w.spec.schedule - : w.spec.schedules.map(schedule => ( - <> - {schedule} -
- - ))} -
-
- {w.spec.schedule ? ( - - ) : ( - <> - {w.spec.schedules.map(schedule => ( - <> - -
- - ))} - - )} -
-
- -
-
- {w.spec.suspend ? ( - '' - ) : ( - - {() => } - - )} -
- - ))} + {cronWorkflows.map(w => { + return ( + + ); + })} Cron workflows are workflows that run on a preset schedule. Next scheduled run assumes workflow-controller is in UTC.{' '} @@ -221,18 +178,3 @@ export function CronWorkflowList({match, location, history}: RouteComponentProps ); } - -function getSpecNextScheduledTime(spec: CronWorkflowSpec): Date { - if (spec.schedule) { - return getNextScheduledTime(spec.schedule, spec.timezone); - } - - let out: Date; - spec.schedules.forEach(schedule => { - const next = getNextScheduledTime(schedule, spec.timezone); - if (!out || next.getTime() < out.getTime()) { - out = next; - } - }); - return out; -} diff --git a/ui/src/cron-workflows/cron-workflow-row.scss b/ui/src/cron-workflows/cron-workflow-row.scss new file mode 100644 index 000000000000..fca441264981 --- /dev/null +++ b/ui/src/cron-workflows/cron-workflow-row.scss @@ -0,0 +1,8 @@ +@import 'node_modules/argo-ui/src/styles/config'; + +.wf-rows-name { + line-height: 1.5em; + display: inline-block; + vertical-align: middle; + white-space: pre-line; +} diff --git a/ui/src/cron-workflows/cron-workflow-row.tsx b/ui/src/cron-workflows/cron-workflow-row.tsx new file mode 100644 index 000000000000..d1fb5080a5ad --- /dev/null +++ b/ui/src/cron-workflows/cron-workflow-row.tsx @@ -0,0 +1,90 @@ +import {Ticker} from 'argo-ui/src/index'; +import * as React from 'react'; +import {Link} from 'react-router-dom'; + +import {ANNOTATION_DESCRIPTION, ANNOTATION_TITLE} from '../shared/annotations'; +import {uiUrl} from '../shared/base'; +import {SuspenseReactMarkdownGfm} from '../shared/components/suspense-react-markdown-gfm'; +import {Timestamp} from '../shared/components/timestamp'; +import {getNextScheduledTime} from '../shared/cron'; +import {CronWorkflow, CronWorkflowSpec} from '../shared/models'; +import {PrettySchedule} from './pretty-schedule'; + +require('./cron-workflow-row.scss'); + +interface CronWorkflowRowProps { + workflow: CronWorkflow; + displayISOFormatCreation: boolean; + displayISOFormatNextScheduled: boolean; +} + +export function CronWorkflowRow(props: CronWorkflowRowProps) { + const wf = props.workflow; + // title + description vars + const title = wf.metadata.annotations?.[ANNOTATION_TITLE] ?? wf.metadata.name; + const description = (wf.metadata.annotations?.[ANNOTATION_DESCRIPTION] && `\n${wf.metadata.annotations[ANNOTATION_DESCRIPTION]}`) || ''; + const hasAnnotation = title !== wf.metadata.name || description !== ''; + const markdown = `${title}${description}`; + + return ( +
+
+
{wf.spec.suspend ? : }
+ +
{hasAnnotation ? : markdown}
+ +
{wf.metadata.namespace}
+
{wf.spec.timezone}
+
+ {wf.spec.schedule != '' + ? wf.spec.schedule + : wf.spec.schedules.map(schedule => ( + <> + {schedule} +
+ + ))} +
+
+ {wf.spec.schedule != '' ? ( + + ) : ( + <> + {wf.spec.schedules.map(schedule => ( + <> + +
+ + ))} + + )} +
+
+ +
+
+ {wf.spec.suspend ? ( + '' + ) : ( + {() => } + )} +
+
+
+ ); +} + +function getCronNextScheduledTime(spec: CronWorkflowSpec): Date { + if (spec.schedule != '') { + return getNextScheduledTime(spec.schedule, spec.timezone); + } + + let out: Date; + spec.schedules.forEach(schedule => { + const next = getNextScheduledTime(schedule, spec.timezone); + if (!out || next.getTime() < out.getTime()) { + out = next; + } + }); + return out; +} diff --git a/ui/src/event-flow/event-flow-page.tsx b/ui/src/event-flow/event-flow-page.tsx index 95c74fe8a191..bdbb07e9cf29 100644 --- a/ui/src/event-flow/event-flow-page.tsx +++ b/ui/src/event-flow/event-flow-page.tsx @@ -15,7 +15,7 @@ import {GraphPanel} from '../shared/components/graph/graph-panel'; import {Node} from '../shared/components/graph/types'; import {Links} from '../shared/components/links'; import {NamespaceFilter} from '../shared/components/namespace-filter'; -import {ResourceEditor} from '../shared/components/resource-editor/resource-editor'; +import {SerializingObjectEditor} from '../shared/components/object-editor'; import {ZeroState} from '../shared/components/zero-state'; import {Context} from '../shared/context'; import {Footnote} from '../shared/footnote'; @@ -317,7 +317,7 @@ export function EventFlowPage({history, location, match}: RouteComponentProps + content: }, { title: 'LOGS', diff --git a/ui/src/event-sources/event-source-creator.tsx b/ui/src/event-sources/event-source-creator.tsx index aff598faf78a..6fc89e4c2580 100644 --- a/ui/src/event-sources/event-source-creator.tsx +++ b/ui/src/event-sources/event-source-creator.tsx @@ -8,10 +8,11 @@ import {exampleEventSource} from '../shared/examples'; import {EventSource} from '../shared/models'; import * as nsUtils from '../shared/namespaces'; import {services} from '../shared/services'; +import {useEditableObject} from '../shared/use-editable-object'; import {EventSourceEditor} from './event-source-editor'; export function EventSourceCreator({onCreate, namespace}: {namespace: string; onCreate: (eventSource: EventSource) => void}) { - const [eventSource, setEventSource] = useState(exampleEventSource(nsUtils.getNamespaceWithDefault(namespace))); + const {object: eventSource, setObject: setEventSource, serialization, lang, setLang} = useEditableObject(exampleEventSource(nsUtils.getNamespaceWithDefault(namespace))); const [error, setError] = useState(); return ( <> @@ -31,7 +32,7 @@ export function EventSourceCreator({onCreate, namespace}: {namespace: string; on - +

Example event sources diff --git a/ui/src/event-sources/event-source-details.tsx b/ui/src/event-sources/event-source-details.tsx index a31d5ba66451..0b6cbc1a17a0 100644 --- a/ui/src/event-sources/event-source-details.tsx +++ b/ui/src/event-sources/event-source-details.tsx @@ -54,7 +54,7 @@ export function EventSourceDetails({history, location, match}: RouteComponentPro ); const [error, setError] = useState(); - const [eventSource, edited, setEventSource, resetEventSource] = useEditableObject(); + const {object: eventSource, setObject: setEventSource, resetObject: resetEventSource, serialization, edited, lang, setLang} = useEditableObject(); const selected = (() => { if (!selectedNode) { @@ -139,7 +139,16 @@ export function EventSourceDetails({history, location, match}: RouteComponentPro {!eventSource ? ( ) : ( - + )} setSelectedNode(null)}> diff --git a/ui/src/event-sources/event-source-editor.tsx b/ui/src/event-sources/event-source-editor.tsx index a5310d3d2bb5..f05ea6c4801d 100644 --- a/ui/src/event-sources/event-source-editor.tsx +++ b/ui/src/event-sources/event-source-editor.tsx @@ -3,16 +3,23 @@ import * as React from 'react'; import {MetadataEditor} from '../shared/components/editors/metadata-editor'; import {ObjectEditor} from '../shared/components/object-editor'; +import type {Lang} from '../shared/components/object-parser'; import {EventSource} from '../shared/models'; export function EventSourceEditor({ onChange, + onLangChange, onTabSelected, selectedTabKey, - eventSource + eventSource, + serialization, + lang }: { eventSource: EventSource; - onChange: (template: EventSource) => void; + serialization: string; + lang: Lang; + onChange: (template: string | EventSource) => void; + onLangChange: (lang: Lang) => void; onError: (error: Error) => void; onTabSelected?: (tab: string) => void; selectedTabKey?: string; @@ -27,7 +34,16 @@ export function EventSourceEditor({ { key: 'manifest', title: 'Manifest', - content: onChange({...x})} /> + content: ( + + ) }, { diff --git a/ui/src/sensors/sensor-creator.tsx b/ui/src/sensors/sensor-creator.tsx index 25d2a0950e7b..8de396a92d86 100644 --- a/ui/src/sensors/sensor-creator.tsx +++ b/ui/src/sensors/sensor-creator.tsx @@ -8,10 +8,11 @@ import {exampleSensor} from '../shared/examples'; import {Sensor} from '../shared/models'; import * as nsUtils from '../shared/namespaces'; import {services} from '../shared/services'; +import {useEditableObject} from '../shared/use-editable-object'; import {SensorEditor} from './sensor-editor'; export function SensorCreator({namespace, onCreate}: {namespace: string; onCreate: (sensor: Sensor) => void}) { - const [sensor, setSensor] = useState(exampleSensor(nsUtils.getNamespaceWithDefault(namespace))); + const {object: sensor, setObject: setSensor, serialization, lang, setLang} = useEditableObject(exampleSensor(nsUtils.getNamespaceWithDefault(namespace))); const [error, setError] = useState(); return ( <> @@ -31,7 +32,7 @@ export function SensorCreator({namespace, onCreate}: {namespace: string; onCreat - +

Example sensors diff --git a/ui/src/sensors/sensor-details.tsx b/ui/src/sensors/sensor-details.tsx index e4388bb760a9..e8919dc113ee 100644 --- a/ui/src/sensors/sensor-details.tsx +++ b/ui/src/sensors/sensor-details.tsx @@ -30,7 +30,7 @@ export function SensorDetails({match, location, history}: RouteComponentProps(queryParams.get('tab')); - const [sensor, edited, setSensor, resetSensor] = useEditableObject(); + const {object: sensor, setObject: setSensor, resetObject: resetSensor, serialization, edited, lang, setLang} = useEditableObject(); const [selectedLogNode, setSelectedLogNode] = useState(queryParams.get('selectedLogNode')); const [error, setError] = useState(); @@ -125,7 +125,20 @@ export function SensorDetails({match, location, history}: RouteComponentProps <> - {!sensor ? : } + {!sensor ? ( + + ) : ( + + )} {!!selectedLogNode && ( void; + serialization: string; + lang: Lang; + onChange: (template: string | Sensor) => void; + onLangChange: (lang: Lang) => void; onError: (error: Error) => void; onTabSelected?: (tab: string) => void; selectedTabKey?: string; @@ -27,7 +34,9 @@ export function SensorEditor({ { key: 'manifest', title: 'Manifest', - content: onChange({...x})} /> + content: ( + + ) }, { key: 'metadata', diff --git a/ui/src/shared/components/_react-markdown-gfm.tsx b/ui/src/shared/components/_react-markdown-gfm.tsx new file mode 100644 index 000000000000..f0d2c1e9eae1 --- /dev/null +++ b/ui/src/shared/components/_react-markdown-gfm.tsx @@ -0,0 +1,27 @@ +// Use the lazy loaded suspense variant instead +import React from 'react'; +import ReactMarkdown from 'react-markdown'; +import remarkGfm from 'remark-gfm'; + +import {openLinkWithKey} from './links'; + +export function ReactMarkdownGfm({markdown}: {markdown: string}) { + return ( + + {markdown} + + ); +} +export default ReactMarkdownGfm; // for lazy loading + +function NestedAnchor(props: React.ComponentProps<'a'>) { + return ( + { + ev.preventDefault(); // don't bubble up + openLinkWithKey(props.href); // eslint-disable-line react/prop-types -- it's not interpreting the prop types correctly + }} + /> + ); +} diff --git a/ui/src/shared/components/linkified-text.tsx b/ui/src/shared/components/linkified-text.tsx index 233bbd6ba5a7..ca8860791920 100644 --- a/ui/src/shared/components/linkified-text.tsx +++ b/ui/src/shared/components/linkified-text.tsx @@ -33,5 +33,5 @@ export default function LinkifiedText({text}: Props) { parts.push({text.slice(lastIndex)}); } - return <>{parts}; + return {parts}; } diff --git a/ui/src/shared/components/object-editor.tsx b/ui/src/shared/components/object-editor.tsx index ea79be418e83..c9c3cb37bbd5 100644 --- a/ui/src/shared/components/object-editor.tsx +++ b/ui/src/shared/components/object-editor.tsx @@ -3,44 +3,31 @@ import {useEffect, useRef, useState} from 'react'; import MonacoEditor from 'react-monaco-editor'; import {uiUrl} from '../base'; -import {ScopedLocalStorage} from '../scoped-local-storage'; +import {useEditableObject} from '../use-editable-object'; import {Button} from './button'; -import {parse, stringify} from './object-parser'; +import type {Lang} from './object-parser'; import {PhaseIcon} from './phase-icon'; import {SuspenseMonacoEditor} from './suspense-monaco-editor'; interface Props { type?: string; value: T; - buttons?: React.ReactNode; - onChange?: (value: T) => void; + lang: Lang; + text: string; + onLangChange: (lang: Lang) => void; + onChange?: (value: string) => void; } -const defaultLang = 'yaml'; - -export function ObjectEditor({type, value, buttons, onChange}: Props) { - const storage = new ScopedLocalStorage('object-editor'); +export function ObjectEditor({type, value, text, lang, onChange, onLangChange}: Props) { const [error, setError] = useState(); - const [lang, setLang] = useState(storage.getItem('lang', defaultLang)); - const [text, setText] = useState(stringify(value, lang)); const editor = useRef(null); - useEffect(() => storage.setItem('lang', lang, defaultLang), [lang]); - useEffect(() => setText(stringify(value, lang)), [value]); - useEffect(() => setText(stringify(parse(text), lang)), [lang]); useEffect(() => { - if (!editor.current) { + if (!editor.current || text === editor.current.editor.getValue()) { return; } - - // we ONLY want to change the text, if the normalized version has changed, this prevents white-space changes - // from resulting in a significant change - const editorText = stringify(parse(editor.current.editor.getValue()), lang); - const editorLang = editor.current.editor.getValue().startsWith('{') ? 'json' : 'yaml'; - if (text !== editorText || lang !== editorLang) { - editor.current.editor.setValue(stringify(parse(text), lang)); - } - }, [editor, text, lang]); + editor.current.editor.setValue(text); + }, [editor, text]); useEffect(() => { if (!type || lang !== 'json') { @@ -83,7 +70,7 @@ export function ObjectEditor({type, value, buttons, onChange}: Props) { return ( <>

({type, value, buttons, onChange}: Props) { onChange={v => { if (onChange) { try { - onChange(parse(v)); + onChange(v); setError(null); } catch (e) { setError(e); @@ -153,3 +139,9 @@ export function ObjectEditor({type, value, buttons, onChange}: Props) { ); } + +/** Wrapper for ObjectEditor that automatically handles serializing/deserializing the object using useEditableObject() */ +export function SerializingObjectEditor({type, value}: {type?: string; value: T}) { + const {object, setObject, serialization, lang, setLang} = useEditableObject(value); + return ; +} diff --git a/ui/src/shared/components/object-parser.test.ts b/ui/src/shared/components/object-parser.test.ts index f787f293d5ff..357f0e0caa41 100644 --- a/ui/src/shared/components/object-parser.test.ts +++ b/ui/src/shared/components/object-parser.test.ts @@ -1,6 +1,15 @@ import {exampleWorkflowTemplate} from '../examples'; import {parse, stringify} from './object-parser'; +describe('parse and stringify are inverses', () => { + it('handles creationTimestamp', () => { + const testWorkflowTemplate = exampleWorkflowTemplate('test'); + testWorkflowTemplate.metadata.creationTimestamp = '2024-11-17T06:56:52Z'; + expect(parse(stringify(testWorkflowTemplate, 'yaml'))).toEqual(testWorkflowTemplate); + expect(parse(stringify(testWorkflowTemplate, 'json'))).toEqual(testWorkflowTemplate); + }); +}); + describe('parse', () => { it('handles a valid JSON string', () => { expect(parse('{}')).toEqual({}); diff --git a/ui/src/shared/components/object-parser.ts b/ui/src/shared/components/object-parser.ts index 7b4800abf660..c0c3ceaa85f3 100644 --- a/ui/src/shared/components/object-parser.ts +++ b/ui/src/shared/components/object-parser.ts @@ -1,17 +1,18 @@ import YAML from 'yaml'; +export type Lang = 'json' | 'yaml'; + +// Default is YAML 1.2, but Kubernetes uses YAML 1.1, which leads to subtle bugs. +// See https://github.com/argoproj/argo-workflows/issues/12205#issuecomment-2111572189 +const yamlVersion = '1.1'; + export function parse(value: string): T { if (value.startsWith('{')) { return JSON.parse(value); } - return YAML.parse(value, { - // Default is YAML 1.2, but Kubernetes uses YAML 1.1, which leads to subtle bugs. - // See https://github.com/argoproj/argo-workflows/issues/12205#issuecomment-2111572189 - version: '1.1', - strict: false - }) as T; + return YAML.parse(value, {version: yamlVersion, strict: false}) as T; } -export function stringify(value: T, type: string) { - return type === 'yaml' ? YAML.stringify(value, {aliasDuplicateObjects: false}) : JSON.stringify(value, null, ' '); +export function stringify(value: T, type: Lang) { + return type === 'yaml' ? YAML.stringify(value, {aliasDuplicateObjects: false, version: yamlVersion, strict: false}) : JSON.stringify(value, null, ' '); } diff --git a/ui/src/shared/components/resource-editor/resource-editor.tsx b/ui/src/shared/components/resource-editor/resource-editor.tsx deleted file mode 100644 index 52f7132c067a..000000000000 --- a/ui/src/shared/components/resource-editor/resource-editor.tsx +++ /dev/null @@ -1,69 +0,0 @@ -import * as kubernetes from 'argo-ui/src/models/kubernetes'; -import * as React from 'react'; -import {useState} from 'react'; - -import {Button} from '../button'; -import {ErrorNotice} from '../error-notice'; -import {ObjectEditor} from '../object-editor'; -import {UploadButton} from '../upload-button'; - -interface Props { - kind?: string; - upload?: boolean; - namespace?: string; - title?: string; - value: T; - editing?: boolean; - onSubmit?: (value: T) => Promise; -} - -export function ResourceEditor(props: Props) { - const [editing, setEditing] = useState(props.editing); - const [value, setValue] = useState(props.value); - const [error, setError] = useState(); - - async function submit() { - try { - if (value.metadata && !value.metadata.namespace && props.namespace) { - value.metadata.namespace = props.namespace; - } - await props.onSubmit(value); - setError(null); - } catch (newError) { - setError(newError); - } - } - - return ( - <> - {props.title &&

{props.title}

} - - - {editing ? ( - <> - {props.upload && onUpload={setValue} onError={setError} />} - {props.onSubmit && ( - - )} - - ) : ( - props.onSubmit && ( - - ) - )} - - } - onChange={setValue} - /> - - ); -} diff --git a/ui/src/shared/components/resource-editor/resource.scss b/ui/src/shared/components/resource-editor/resource.scss deleted file mode 100644 index 71ffef0c5a57..000000000000 --- a/ui/src/shared/components/resource-editor/resource.scss +++ /dev/null @@ -1,20 +0,0 @@ -@import 'node_modules/argo-ui/src/styles/config'; - -.resource { - padding-top: 1em; - white-space: pre; - color: $argo-color-gray-7; - width: 100%; - min-height: 300px; -} - -.resource-editor-panel { - &__editor { - border: 1px solid $argo-color-gray-3; - border-radius: 5px; - margin-top: 1em; - padding-bottom: 1em; - overflow: hidden; - } -} - diff --git a/ui/src/shared/components/suspense-react-markdown-gfm.tsx b/ui/src/shared/components/suspense-react-markdown-gfm.tsx new file mode 100644 index 000000000000..6de943c8ebbb --- /dev/null +++ b/ui/src/shared/components/suspense-react-markdown-gfm.tsx @@ -0,0 +1,16 @@ +import React from 'react'; + +import {Loading} from './loading'; + +// lazy load ReactMarkdown (and remark-gfm) as it is a large optional component (which can be split into a separate bundle) +const LazyReactMarkdownGfm = React.lazy(() => { + return import(/* webpackChunkName: "react-markdown-plus-gfm" */ './_react-markdown-gfm'); +}); + +export function SuspenseReactMarkdownGfm(props: {markdown: string}) { + return ( + }> + + + ); +} diff --git a/ui/src/shared/use-editable-object.test.ts b/ui/src/shared/use-editable-object.test.ts new file mode 100644 index 000000000000..11acd670927a --- /dev/null +++ b/ui/src/shared/use-editable-object.test.ts @@ -0,0 +1,105 @@ +import {createInitialState, reducer} from './use-editable-object'; + +describe('createInitialState', () => { + test('without object', () => { + expect(createInitialState()).toEqual({ + object: undefined, + serialization: null, + lang: 'yaml', + initialSerialization: null, + edited: false + }); + }); + + test('with object', () => { + expect(createInitialState({a: 1})).toEqual({ + object: {a: 1}, + serialization: 'a: 1\n', + lang: 'yaml', + initialSerialization: 'a: 1\n', + edited: false + }); + }); +}); + +describe('reducer', () => { + const testState = { + object: {a: 1}, + serialization: 'a: 1\n', + lang: 'yaml', + initialSerialization: 'a: 1\n', + edited: false + } as const; + + test('setLang unedited', () => { + const newState = reducer(testState, {type: 'setLang', payload: 'json'}); + expect(newState).toEqual({ + object: {a: 1}, + serialization: '{\n "a": 1\n}', + lang: 'json', + initialSerialization: '{\n "a": 1\n}', + edited: false + }); + }); + + test('setLang edited', () => { + const newState = reducer( + { + ...testState, + edited: true + }, + {type: 'setLang', payload: 'json'} + ); + expect(newState).toEqual({ + object: {a: 1}, + serialization: '{\n "a": 1\n}', + lang: 'json', + initialSerialization: 'a: 1\n', + edited: true + }); + }); + + test('setObject with string', () => { + const newState = reducer(testState, {type: 'setObject', payload: 'a: 2'}); + expect(newState).toEqual({ + object: {a: 2}, + serialization: 'a: 2', + lang: 'yaml', + initialSerialization: 'a: 1\n', + edited: true + }); + }); + + test('setObject with object', () => { + const newState = reducer(testState, {type: 'setObject', payload: {a: 2}}); + expect(newState).toEqual({ + object: {a: 2}, + serialization: 'a: 2\n', + lang: 'yaml', + initialSerialization: 'a: 1\n', + edited: true + }); + }); + + test('resetObject with string', () => { + const newState = reducer(testState, {type: 'resetObject', payload: 'a: 2'}); + expect(newState).toEqual({ + object: {a: 2}, + serialization: 'a: 2', + lang: 'yaml', + initialSerialization: 'a: 2', + edited: false + }); + }); + + test('resetObject with object', () => { + const newState = reducer(testState, {type: 'resetObject', payload: {a: 2}}); + expect(newState).toEqual({ + object: {a: 2}, + serialization: 'a: 2\n', + lang: 'yaml', + initialSerialization: 'a: 2\n', + edited: false + }); + }); +}); diff --git a/ui/src/shared/use-editable-object.ts b/ui/src/shared/use-editable-object.ts index bb18287af8a8..a04d3174b4ca 100644 --- a/ui/src/shared/use-editable-object.ts +++ b/ui/src/shared/use-editable-object.ts @@ -1,21 +1,80 @@ -import {useState} from 'react'; +import {useReducer} from 'react'; -/** - * useEditableObject is a React hook to manage the state of object that can be edited and updated. - * Uses ref comparisons to determine whether the resource has been edited. - */ -export function useEditableObject(initial?: T): [T, boolean, React.Dispatch, (value: T) => void] { - const [value, setValue] = useState(initial); - const [initialValue, setInitialValue] = useState(initial); +import {Lang, parse, stringify} from '../shared/components/object-parser'; +import {ScopedLocalStorage} from '../shared/scoped-local-storage'; + +type Action = {type: 'setLang'; payload: Lang} | {type: 'setObject'; payload: string | T} | {type: 'resetObject'; payload: string | T}; + +interface State { + /** The parsed form of the object, kept in sync with "serialization" */ + object: T; + /** The stringified form of the object, kept in sync with "object" */ + serialization: string; + /** The serialization language used (YAML or JSON) */ + lang: Lang; + /** The initial stringified form of the object. Used to check if it was edited */ + initialSerialization: string; + /** Whether any changes have been made */ + edited: boolean; +} - // Note: This is a pure reference comparison instead of a deep comparison for performance - // reasons, since changes are latency-sensitive. - const edited = value !== initialValue; +const defaultLang = 'yaml'; +const storage = new ScopedLocalStorage('object-editor'); - function resetValue(value: T) { - setValue(value); - setInitialValue(value); +export function reducer(state: State, action: Action) { + const newState = {...state}; + switch (action.type) { + case 'resetObject': + case 'setObject': + if (typeof action.payload === 'string') { + newState.object = parse(action.payload); + newState.serialization = action.payload; + } else { + newState.object = action.payload; + newState.serialization = stringify(action.payload, newState.lang); + } + if (action.type === 'resetObject') { + newState.initialSerialization = newState.serialization; + } + newState.edited = newState.initialSerialization !== newState.serialization; + return newState; + case 'setLang': + newState.lang = action.payload; + storage.setItem('lang', newState.lang, defaultLang); + newState.serialization = stringify(newState.object, newState.lang); + if (!newState.edited) { + newState.initialSerialization = newState.serialization; + } + return newState; } +} + +export function createInitialState(object?: T): State { + const lang = storage.getItem('lang', defaultLang); + const serialization = object ? stringify(object, lang) : null; + return { + object, + serialization, + lang, + initialSerialization: serialization, + edited: false + }; +} - return [value, edited, setValue, resetValue]; +/** + * useEditableObject is a React hook to manage the state of an object that can be serialized and edited, encapsulating the logic to + * parse/stringify the object as necessary. + */ +export function useEditableObject(object?: T): State & { + setObject: (value: string | T) => void; + resetObject: (value: string | T) => void; + setLang: (lang: Lang) => void; +} { + const [state, dispatch] = useReducer(reducer, object, createInitialState); + return { + ...state, + setObject: value => dispatch({type: 'setObject', payload: value}), + resetObject: value => dispatch({type: 'resetObject', payload: value}), + setLang: value => dispatch({type: 'setLang', payload: value}) + }; } diff --git a/ui/src/workflow-event-bindings/workflow-event-bindings.tsx b/ui/src/workflow-event-bindings/workflow-event-bindings.tsx index be0ca33793d2..b3556c41bfca 100644 --- a/ui/src/workflow-event-bindings/workflow-event-bindings.tsx +++ b/ui/src/workflow-event-bindings/workflow-event-bindings.tsx @@ -11,7 +11,7 @@ import {GraphPanel} from '../shared/components/graph/graph-panel'; import {Graph} from '../shared/components/graph/types'; import {Loading} from '../shared/components/loading'; import {NamespaceFilter} from '../shared/components/namespace-filter'; -import {ResourceEditor} from '../shared/components/resource-editor/resource-editor'; +import {SerializingObjectEditor} from '../shared/components/object-editor'; import {ZeroState} from '../shared/components/zero-state'; import {Context} from '../shared/context'; import {Footnote} from '../shared/footnote'; @@ -147,7 +147,7 @@ export function WorkflowEventBindings({match, location, history}: RouteComponent {introductionText} {learnMore}. setSelectedWorkflowEventBinding(null)}> - {selected && } + {selected && } )} diff --git a/ui/src/workflow-templates/workflow-template-creator.tsx b/ui/src/workflow-templates/workflow-template-creator.tsx index 0e8c33001999..34a3f7cecdd7 100644 --- a/ui/src/workflow-templates/workflow-template-creator.tsx +++ b/ui/src/workflow-templates/workflow-template-creator.tsx @@ -9,10 +9,11 @@ import {exampleWorkflowTemplate} from '../shared/examples'; import {WorkflowTemplate} from '../shared/models'; import * as nsUtils from '../shared/namespaces'; import {services} from '../shared/services'; +import {useEditableObject} from '../shared/use-editable-object'; import {WorkflowTemplateEditor} from './workflow-template-editor'; export function WorkflowTemplateCreator({namespace, onCreate}: {namespace: string; onCreate: (workflow: WorkflowTemplate) => void}) { - const [template, setTemplate] = useState(exampleWorkflowTemplate(nsUtils.getNamespaceWithDefault(namespace))); + const {object: template, setObject: setTemplate, serialization, lang, setLang} = useEditableObject(exampleWorkflowTemplate(nsUtils.getNamespaceWithDefault(namespace))); const [error, setError] = useState(); return ( <> @@ -32,7 +33,7 @@ export function WorkflowTemplateCreator({namespace, onCreate}: {namespace: strin
- +

.

diff --git a/ui/src/workflow-templates/workflow-template-details.tsx b/ui/src/workflow-templates/workflow-template-details.tsx index 8a8d869c3050..cdf3bd0778a2 100644 --- a/ui/src/workflow-templates/workflow-template-details.tsx +++ b/ui/src/workflow-templates/workflow-template-details.tsx @@ -34,8 +34,7 @@ export function WorkflowTemplateDetails({history, location, match}: RouteCompone const [tab, setTab] = useState(queryParams.get('tab')); const [workflows, setWorkflows] = useState([]); const [columns, setColumns] = useState([]); - - const [template, edited, setTemplate, resetTemplate] = useEditableObject(); + const {object: template, setObject: setTemplate, resetObject: resetTemplate, serialization, edited, lang, setLang} = useEditableObject(); const [error, setError] = useState(); useEffect( @@ -133,7 +132,20 @@ export function WorkflowTemplateDetails({history, location, match}: RouteCompone }}> <> - {!template ? : } + {!template ? ( + + ) : ( + + )} {template && ( setSidePanel(null)} isMiddle={sidePanel === 'submit'}> diff --git a/ui/src/workflow-templates/workflow-template-editor.tsx b/ui/src/workflow-templates/workflow-template-editor.tsx index c273df8ccc18..83a23e64450d 100644 --- a/ui/src/workflow-templates/workflow-template-editor.tsx +++ b/ui/src/workflow-templates/workflow-template-editor.tsx @@ -5,19 +5,26 @@ import {LabelsAndAnnotationsEditor} from '../shared/components/editors/labels-an import {MetadataEditor} from '../shared/components/editors/metadata-editor'; import {WorkflowParametersEditor} from '../shared/components/editors/workflow-parameters-editor'; import {ObjectEditor} from '../shared/components/object-editor'; +import type {Lang} from '../shared/components/object-parser'; import {WorkflowTemplate} from '../shared/models'; export function WorkflowTemplateEditor({ onChange, + onLangChange, onError, onTabSelected, selectedTabKey, - template + template, + serialization, + lang }: { template: WorkflowTemplate; - onChange: (template: WorkflowTemplate) => void; + serialization: string; + lang: Lang; + onChange: (template: string | WorkflowTemplate) => void; onError: (error: Error) => void; onTabSelected?: (tab: string) => void; + onLangChange: (lang: Lang) => void; selectedTabKey?: string; }) { return ( @@ -30,7 +37,16 @@ export function WorkflowTemplateEditor({ { key: 'manifest', title: 'Manifest', - content: onChange({...x})} /> + content: ( + + ) }, { key: 'spec', diff --git a/ui/src/workflow-templates/workflow-template-list.scss b/ui/src/workflow-templates/workflow-template-list.scss index 2293c1f83a06..29bc980980ce 100644 --- a/ui/src/workflow-templates/workflow-template-list.scss +++ b/ui/src/workflow-templates/workflow-template-list.scss @@ -5,4 +5,22 @@ &__row:hover { box-shadow: 1px 2px 3px rgba($argo-color-gray-9, .1), 0 0 0 1px rgba($argo-color-teal-5, .5); } -} \ No newline at end of file +} + +.workflow-templates-list { + padding: 1em; + + &__row-container { + a { + color: $argo-color-gray-6; + } + + a:hover { + color: $argo-color-teal-5; + } + } +} + +.row.pt-60 { + padding-top: 60px; +} diff --git a/ui/src/workflow-templates/workflow-template-list.tsx b/ui/src/workflow-templates/workflow-template-list.tsx index 6a9820b3572d..b7d4c1e23b98 100644 --- a/ui/src/workflow-templates/workflow-template-list.tsx +++ b/ui/src/workflow-templates/workflow-template-list.tsx @@ -2,16 +2,15 @@ import {Page} from 'argo-ui/src/components/page/page'; import {SlidingPanel} from 'argo-ui/src/components/sliding-panel/sliding-panel'; import * as React from 'react'; import {useContext, useEffect, useState} from 'react'; -import {Link, RouteComponentProps} from 'react-router-dom'; +import {RouteComponentProps} from 'react-router-dom'; -import {ANNOTATION_DESCRIPTION, ANNOTATION_TITLE} from '../shared/annotations'; import {uiUrl} from '../shared/base'; import {ErrorNotice} from '../shared/components/error-notice'; import {ExampleManifests} from '../shared/components/example-manifests'; import {InfoIcon} from '../shared/components/fa-icons'; import {Loading} from '../shared/components/loading'; import {PaginationPanel} from '../shared/components/pagination-panel'; -import {Timestamp, TimestampSwitch} from '../shared/components/timestamp'; +import {TimestampSwitch} from '../shared/components/timestamp'; import {ZeroState} from '../shared/components/zero-state'; import {Context} from '../shared/context'; import {Footnote} from '../shared/footnote'; @@ -26,6 +25,7 @@ import {useQueryParams} from '../shared/use-query-params'; import useTimestamp, {TIMESTAMP_KEYS} from '../shared/use-timestamp'; import {WorkflowTemplateCreator} from './workflow-template-creator'; import {WorkflowTemplateFilters} from './workflow-template-filters'; +import {WorkflowTemplateRow} from './workflow-template-row'; import './workflow-template-list.scss'; @@ -145,24 +145,9 @@ export function WorkflowTemplateList({match, location, history}: RouteComponentP CREATED - {templates.map(t => ( - -
- -
-
- {t.metadata.annotations?.[ANNOTATION_TITLE] ?? t.metadata.name} - {t.metadata.annotations?.[ANNOTATION_DESCRIPTION] ?

{t.metadata.annotations[ANNOTATION_DESCRIPTION]}

: null} -
-
{t.metadata.namespace}
-
- -
- - ))} + {templates.map(t => { + return ; + })} Workflow templates are reusable templates you can create new workflows from. . {learnMore}. diff --git a/ui/src/workflow-templates/workflow-template-row.scss b/ui/src/workflow-templates/workflow-template-row.scss new file mode 100644 index 000000000000..fca441264981 --- /dev/null +++ b/ui/src/workflow-templates/workflow-template-row.scss @@ -0,0 +1,8 @@ +@import 'node_modules/argo-ui/src/styles/config'; + +.wf-rows-name { + line-height: 1.5em; + display: inline-block; + vertical-align: middle; + white-space: pre-line; +} diff --git a/ui/src/workflow-templates/workflow-template-row.tsx b/ui/src/workflow-templates/workflow-template-row.tsx new file mode 100644 index 000000000000..1c3cab34dc4a --- /dev/null +++ b/ui/src/workflow-templates/workflow-template-row.tsx @@ -0,0 +1,41 @@ +import * as React from 'react'; +import {Link} from 'react-router-dom'; + +import {ANNOTATION_DESCRIPTION, ANNOTATION_TITLE} from '../shared/annotations'; +import {uiUrl} from '../shared/base'; +import {SuspenseReactMarkdownGfm} from '../shared/components/suspense-react-markdown-gfm'; +import {Timestamp} from '../shared/components/timestamp'; +import {WorkflowTemplate} from '../shared/models'; + +require('./workflow-template-row.scss'); + +interface WorkflowTemplateRowProps { + workflow: WorkflowTemplate; + displayISOFormat: boolean; +} + +export function WorkflowTemplateRow(props: WorkflowTemplateRowProps) { + const wf = props.workflow; + // title + description vars + const title = wf.metadata.annotations?.[ANNOTATION_TITLE] ?? wf.metadata.name; + const description = (wf.metadata.annotations?.[ANNOTATION_DESCRIPTION] && `\n${wf.metadata.annotations[ANNOTATION_DESCRIPTION]}`) || ''; + const hasAnnotation = title !== wf.metadata.name || description !== ''; + const markdown = `${title}${description}`; + + return ( +
+
+
+ +
+ +
{hasAnnotation ? : markdown}
+ +
{wf.metadata.namespace}
+
+ +
+
+
+ ); +} diff --git a/ui/src/workflows/components/workflow-creator.tsx b/ui/src/workflows/components/workflow-creator.tsx index a5e6c6e0e319..5be458fb82ce 100644 --- a/ui/src/workflows/components/workflow-creator.tsx +++ b/ui/src/workflows/components/workflow-creator.tsx @@ -10,6 +10,7 @@ import {exampleWorkflow} from '../../shared/examples'; import {Workflow, WorkflowTemplate} from '../../shared/models'; import * as nsUtils from '../../shared/namespaces'; import {services} from '../../shared/services'; +import {useEditableObject} from '../../shared/use-editable-object'; import {SubmitWorkflowPanel} from './submit-workflow-panel'; import {WorkflowEditor} from './workflow-editor'; @@ -19,7 +20,7 @@ export function WorkflowCreator({namespace, onCreate}: {namespace: string; onCre const [workflowTemplates, setWorkflowTemplates] = useState(); const [workflowTemplate, setWorkflowTemplate] = useState(); const [stage, setStage] = useState('choose-method'); - const [workflow, setWorkflow] = useState(); + const {object: workflow, setObject: setWorkflow, serialization, lang, setLang} = useEditableObject(); const [error, setError] = useState(); useEffect(() => { @@ -116,7 +117,7 @@ export function WorkflowCreator({namespace, onCreate}: {namespace: string; onCre - +
.
diff --git a/ui/src/workflows/components/workflow-details/workflow-resource-panel.tsx b/ui/src/workflows/components/workflow-details/workflow-resource-panel.tsx index 49193471bbe8..c7bddd44d6a1 100644 --- a/ui/src/workflows/components/workflow-details/workflow-resource-panel.tsx +++ b/ui/src/workflows/components/workflow-details/workflow-resource-panel.tsx @@ -1,10 +1,10 @@ import * as React from 'react'; -import {ObjectEditor} from '../../../shared/components/object-editor'; +import {SerializingObjectEditor} from '../../../shared/components/object-editor'; import {Workflow} from '../../../shared/models'; export const WorkflowResourcePanel = (props: {workflow: Workflow}) => (
- +
); diff --git a/ui/src/workflows/components/workflow-editor.tsx b/ui/src/workflows/components/workflow-editor.tsx index 45cab53bb3cd..9f0524335f35 100644 --- a/ui/src/workflows/components/workflow-editor.tsx +++ b/ui/src/workflows/components/workflow-editor.tsx @@ -4,6 +4,7 @@ import * as React from 'react'; import {MetadataEditor} from '../../shared/components/editors/metadata-editor'; import {WorkflowParametersEditor} from '../../shared/components/editors/workflow-parameters-editor'; import {ObjectEditor} from '../../shared/components/object-editor'; +import type {Lang} from '../../shared/components/object-parser'; import {Workflow} from '../../shared/models'; export function WorkflowEditor({ @@ -11,12 +12,18 @@ export function WorkflowEditor({ onTabSelected, onError, onChange, - template + onLangChange, + workflow, + serialization, + lang }: { - template: Workflow; - onChange: (template: Workflow) => void; + workflow: Workflow; + serialization: string; + lang: Lang; + onChange: (workflow: string | Workflow) => void; onError: (error: Error) => void; onTabSelected?: (tab: string) => void; + onLangChange: (lang: Lang) => void; selectedTabKey?: string; }) { return ( @@ -29,17 +36,26 @@ export function WorkflowEditor({ { key: 'manifest', title: 'Manifest', - content: onChange({...x})} /> + content: ( + + ) }, { key: 'parameters', title: 'Parameters', - content: onChange({...template, spec})} onError={onError} /> + content: onChange({...workflow, spec})} onError={onError} /> }, { key: 'metadata', title: 'MetaData', - content: onChange({...template, metadata})} /> + content: onChange({...workflow, metadata})} /> } ]} /> diff --git a/ui/src/workflows/components/workflow-yaml-viewer/workflow-yaml-viewer.tsx b/ui/src/workflows/components/workflow-yaml-viewer/workflow-yaml-viewer.tsx index dd4f0cdc5400..be52b8cb8223 100644 --- a/ui/src/workflows/components/workflow-yaml-viewer/workflow-yaml-viewer.tsx +++ b/ui/src/workflows/components/workflow-yaml-viewer/workflow-yaml-viewer.tsx @@ -1,7 +1,7 @@ import {SlideContents} from 'argo-ui/src/components/slide-contents/slide-contents'; import * as React from 'react'; -import {ObjectEditor} from '../../../shared/components/object-editor'; +import {SerializingObjectEditor} from '../../../shared/components/object-editor'; import * as models from '../../../shared/models'; import {getResolvedTemplates} from '../../../shared/template-resolution'; @@ -25,7 +25,7 @@ export function WorkflowYamlViewer(props: WorkflowYamlViewerProps) { contents.push(

{normalizeNodeName(props.selectedNode.displayName || props.selectedNode.name)}

- +
); } @@ -35,7 +35,7 @@ export function WorkflowYamlViewer(props: WorkflowYamlViewerProps) { contents.push(

{props.selectedNode.name}

- +
); } @@ -47,7 +47,7 @@ export function WorkflowYamlViewer(props: WorkflowYamlViewerProps) { } + contents={} className='workflow-yaml-section' /> ); @@ -59,7 +59,7 @@ export function WorkflowYamlViewer(props: WorkflowYamlViewerProps) { } + contents={} className='workflow-yaml-section' /> ); diff --git a/ui/src/workflows/components/workflows-row/react-markdown-gfm.tsx b/ui/src/workflows/components/workflows-row/react-markdown-gfm.tsx index 119f60b6e9f5..c1f73a49adb5 100644 --- a/ui/src/workflows/components/workflows-row/react-markdown-gfm.tsx +++ b/ui/src/workflows/components/workflows-row/react-markdown-gfm.tsx @@ -1,3 +1,4 @@ +// Use the lazy loaded suspense variant instead import React from 'react'; import ReactMarkdown from 'react-markdown'; import remarkGfm from 'remark-gfm'; diff --git a/ui/src/workflows/components/workflows-row/workflows-row.tsx b/ui/src/workflows/components/workflows-row/workflows-row.tsx index b22f412848ce..6a770674d5ea 100644 --- a/ui/src/workflows/components/workflows-row/workflows-row.tsx +++ b/ui/src/workflows/components/workflows-row/workflows-row.tsx @@ -6,8 +6,8 @@ import {Link} from 'react-router-dom'; import {ANNOTATION_DESCRIPTION, ANNOTATION_TITLE} from '../../../shared/annotations'; import {uiUrl} from '../../../shared/base'; import {DurationPanel} from '../../../shared/components/duration-panel'; -import {Loading} from '../../../shared/components/loading'; import {PhaseIcon} from '../../../shared/components/phase-icon'; +import {SuspenseReactMarkdownGfm} from '../../../shared/components/suspense-react-markdown-gfm'; import {Timestamp} from '../../../shared/components/timestamp'; import {wfDuration} from '../../../shared/duration'; import * as models from '../../../shared/models'; @@ -32,7 +32,7 @@ export function WorkflowsRow(props: WorkflowsRowProps) { // title + description vars const title = wf.metadata.annotations?.[ANNOTATION_TITLE] ?? wf.metadata.name; const description = (wf.metadata.annotations?.[ANNOTATION_DESCRIPTION] && `\n${wf.metadata.annotations[ANNOTATION_DESCRIPTION]}`) || ''; - const hasAnnotation = title !== wf.metadata.name && description !== ''; + const hasAnnotation = title !== wf.metadata.name || description !== ''; const markdown = `${title}${description}`; return ( @@ -110,16 +110,3 @@ export function WorkflowsRow(props: WorkflowsRowProps) { ); } - -// lazy load ReactMarkdown (and remark-gfm) as it is a large optional component (which can be split into a separate bundle) -const LazyReactMarkdownGfm = React.lazy(() => { - return import(/* webpackChunkName: "react-markdown-plus-gfm" */ './react-markdown-gfm'); -}); - -function SuspenseReactMarkdownGfm(props: {markdown: string}) { - return ( - }> - - - ); -} diff --git a/util/errors/errors.go b/util/errors/errors.go index 19addc48312c..6d41bf612213 100644 --- a/util/errors/errors.go +++ b/util/errors/errors.go @@ -26,7 +26,7 @@ func IgnoreContainerNotFoundErr(err error) error { // IsTransientErr reports whether the error is transient and logs it. func IsTransientErr(err error) bool { isTransient := IsTransientErrQuiet(err) - if !isTransient { + if err != nil && !isTransient { log.Warnf("Non-transient error: %v", err) } return isTransient diff --git a/util/errors/errors_test.go b/util/errors/errors_test.go index 8090072c1ae1..702c3d594251 100644 --- a/util/errors/errors_test.go +++ b/util/errors/errors_test.go @@ -8,6 +8,8 @@ import ( "os/exec" "testing" + log "github.com/sirupsen/logrus" + logtest "github.com/sirupsen/logrus/hooks/test" "github.com/stretchr/testify/assert" apierr "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/runtime/schema" @@ -50,12 +52,19 @@ var ( const transientEnvVarKey = "TRANSIENT_ERROR_PATTERN" func TestIsTransientErr(t *testing.T) { + hook := &logtest.Hook{} + log.AddHook(hook) + defer log.StandardLogger().ReplaceHooks(nil) + t.Run("Nil", func(t *testing.T) { assert.False(t, IsTransientErr(nil)) + assert.Nil(t, hook.LastEntry()) }) t.Run("ResourceQuotaConflictErr", func(t *testing.T) { assert.False(t, IsTransientErr(apierr.NewConflict(schema.GroupResource{}, "", nil))) + assert.Contains(t, hook.LastEntry().Message, "Non-transient error:") assert.True(t, IsTransientErr(apierr.NewConflict(schema.GroupResource{Group: "v1", Resource: "resourcequotas"}, "", nil))) + assert.Contains(t, hook.LastEntry().Message, "Transient error:") }) t.Run("ResourceQuotaTimeoutErr", func(t *testing.T) { assert.False(t, IsTransientErr(apierr.NewInternalError(errors.New("")))) diff --git a/workflow/common/util.go b/workflow/common/util.go index 4b5aa0278225..b7ffbe291494 100644 --- a/workflow/common/util.go +++ b/workflow/common/util.go @@ -133,14 +133,19 @@ func overwriteWithArguments(argParam, inParam *wfv1.Parameter) { func substituteAndGetConfigMapValue(inParam *wfv1.Parameter, globalParams Parameters, namespace string, configMapStore ConfigMapStore) error { if inParam.ValueFrom != nil && inParam.ValueFrom.ConfigMapKeyRef != nil { if configMapStore != nil { + replaceMap := make(map[string]interface{}) + for k, v := range globalParams { + replaceMap[k] = v + } + // SubstituteParams is called only at the end of this method. To support parametrization of the configmap // we need to perform a substitution here over the name and the key of the ConfigMapKeyRef. - cmName, err := substituteConfigMapKeyRefParam(inParam.ValueFrom.ConfigMapKeyRef.Name, globalParams) + cmName, err := substituteConfigMapKeyRefParam(inParam.ValueFrom.ConfigMapKeyRef.Name, replaceMap) if err != nil { log.WithError(err).Error("unable to substitute name for ConfigMapKeyRef") return err } - cmKey, err := substituteConfigMapKeyRefParam(inParam.ValueFrom.ConfigMapKeyRef.Key, globalParams) + cmKey, err := substituteConfigMapKeyRefParam(inParam.ValueFrom.ConfigMapKeyRef.Key, replaceMap) if err != nil { log.WithError(err).Error("unable to substitute key for ConfigMapKeyRef") return err @@ -219,21 +224,17 @@ func ProcessArgs(tmpl *wfv1.Template, args wfv1.ArgumentsProvider, globalParams, return SubstituteParams(newTmpl, globalParams, localParams) } -// substituteConfigMapKeyRefParam check if ConfigMapKeyRef's key is a param and perform the substitution. -func substituteConfigMapKeyRefParam(in string, globalParams Parameters) (string, error) { - if strings.HasPrefix(in, "{{") && strings.HasSuffix(in, "}}") { - k := strings.TrimSuffix(strings.TrimPrefix(in, "{{"), "}}") - k = strings.Trim(k, " ") - - v, ok := globalParams[k] - if !ok { - err := errors.InternalError(fmt.Sprintf("parameter %s not found", k)) - log.WithError(err).Error() - return "", err - } - return v, nil +// substituteConfigMapKeyRefParam performs template substitution for ConfigMapKeyRef +func substituteConfigMapKeyRefParam(in string, replaceMap map[string]interface{}) (string, error) { + tmpl, err := template.NewTemplate(in) + if err != nil { + return "", err + } + replacedString, err := tmpl.Replace(replaceMap, false) + if err != nil { + return "", fmt.Errorf("failed to substitute configMapKeyRef: %w", err) } - return in, nil + return replacedString, nil } // SubstituteParams returns a new copy of the template with global, pod, and input parameters substituted diff --git a/workflow/common/util_test.go b/workflow/common/util_test.go index a5b02c882d8d..529da00690ce 100644 --- a/workflow/common/util_test.go +++ b/workflow/common/util_test.go @@ -48,54 +48,6 @@ spec: command: [cowsay] args: ["hello world"] -` - validConfigMapRefWf = `apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - name: test-template-configmapkeyselector-substitution -spec: - entrypoint: whalesay - arguments: - parameters: - - name: name - value: simple-parameters - - name: key - value: msg - templates: - - name: whalesay - inputs: - parameters: - - name: message - valueFrom: - configMapKeyRef: - name: "{{ workflow.parameters.name }}" - key: "{{ workflow.parameters.key }}" - container: - image: argoproj/argosay:v2 - args: - - echo - - "{{inputs.parameters.message}}" -` - invalidConfigMapRefWf = `apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - name: test-template-configmapkeyselector-substitution -spec: - entrypoint: whalesay - templates: - - name: whalesay - inputs: - parameters: - - name: message - valueFrom: - configMapKeyRef: - name: "{{ workflow.parameters.name }}" - key: "{{ workflow.parameters.key }}" - container: - image: argoproj/argosay:v2 - args: - - echo - - "{{inputs.parameters.message}}" ` ) @@ -194,45 +146,70 @@ func TestIsDone(t *testing.T) { } func TestSubstituteConfigMapKeyRefParam(t *testing.T) { - res := ParseObjects([]byte(validConfigMapRefWf), false) - assert.Len(t, res, 1) - - obj, ok := res[0].Object.(*wfv1.Workflow) - assert.True(t, ok) - assert.NotNil(t, obj) - - globalParams := Parameters{ + globalParams := map[string]interface{}{ "workflow.parameters.name": "simple-parameters", "workflow.parameters.key": "msg", } - - for _, inParam := range obj.GetTemplateByName("whalesay").Inputs.Parameters { - cmName, _ := substituteConfigMapKeyRefParam(inParam.ValueFrom.ConfigMapKeyRef.Name, globalParams) - assert.Equal(t, "simple-parameters", cmName, "it should be equal") - - cmKey, _ := substituteConfigMapKeyRefParam(inParam.ValueFrom.ConfigMapKeyRef.Key, globalParams) - assert.Equal(t, "msg", cmKey, "it should be equal") + tests := []struct { + name string + configMapKeyRefParam string + expected string + expectedErr string + }{ + { + name: "No string templating", + configMapKeyRefParam: "simple-parameters", + expected: "simple-parameters", + expectedErr: "", + }, + { + name: "Simple template", + configMapKeyRefParam: "{{ workflow.parameters.name }}", + expected: "simple-parameters", + expectedErr: "", + }, + { + name: "Simple template with prefix and suffix", + configMapKeyRefParam: "prefix-{{ workflow.parameters.name }}-suffix", + expected: "prefix-simple-parameters-suffix", + expectedErr: "", + }, + { + name: "Expression template", + configMapKeyRefParam: "{{=upper(workflow.parameters.key)}}", + expected: "MSG", + expectedErr: "", + }, + { + name: "Simple template referencing nonexistent param", + configMapKeyRefParam: "prefix-{{ workflow.parameters.bad }}", + expected: "", + expectedErr: "failed to substitute configMapKeyRef: failed to resolve {{ workflow.parameters.bad }}", + }, + { + name: "Expression template with invalid expression", + configMapKeyRefParam: "{{=!}}", + expected: "", + expectedErr: "failed to substitute configMapKeyRef: failed to evaluate expression: unexpected token EOF (1:1)\n | !\n | ^", + }, + { + name: "Malformed template", + configMapKeyRefParam: "{{ workflow.parameters.bad", + expected: "", + expectedErr: "Cannot find end tag=\"}}\" in the template=\"{{ workflow.parameters.bad\" starting from \" workflow.parameters.bad\"", + }, } -} - -func TestSubstituteConfigMapKeyRefParamWithNoParamsDefined(t *testing.T) { - res := ParseObjects([]byte(invalidConfigMapRefWf), false) - assert.Len(t, res, 1) - - obj, ok := res[0].Object.(*wfv1.Workflow) - assert.True(t, ok) - assert.NotNil(t, obj) - - globalParams := Parameters{} - - for _, inParam := range obj.GetTemplateByName("whalesay").Inputs.Parameters { - cmName, err := substituteConfigMapKeyRefParam(inParam.ValueFrom.ConfigMapKeyRef.Name, globalParams) - require.EqualError(t, err, "parameter workflow.parameters.name not found") - assert.Equal(t, "", cmName) - cmKey, err := substituteConfigMapKeyRefParam(inParam.ValueFrom.ConfigMapKeyRef.Key, globalParams) - require.EqualError(t, err, "parameter workflow.parameters.key not found") - assert.Equal(t, "", cmKey) + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result, err := substituteConfigMapKeyRefParam(tt.configMapKeyRefParam, globalParams) + assert.Equal(t, tt.expected, result) + if tt.expectedErr != "" { + require.EqualError(t, err, tt.expectedErr) + } else { + require.NoError(t, err) + } + }) } } diff --git a/workflow/controller/operator_test.go b/workflow/controller/operator_test.go index 5b772e2f33f6..fe9b9a9aac13 100644 --- a/workflow/controller/operator_test.go +++ b/workflow/controller/operator_test.go @@ -6514,6 +6514,7 @@ status: phase: Failed nodes: my-wf: + id: my-wf name: my-wf phase: Failed `) diff --git a/workflow/util/util.go b/workflow/util/util.go index b56e9429a35d..395928000c7a 100644 --- a/workflow/util/util.go +++ b/workflow/util/util.go @@ -2,6 +2,7 @@ package util import ( "bufio" + "container/list" "context" "encoding/json" "fmt" @@ -12,6 +13,7 @@ import ( "path/filepath" "regexp" nruntime "runtime" + "slices" "strconv" "strings" "time" @@ -769,19 +771,6 @@ func convertNodeID(newWf *wfv1.Workflow, regex *regexp.Regexp, oldNodeID string, return newWf.NodeID(newNodeName) } -func getDescendantNodeIDs(wf *wfv1.Workflow, node wfv1.NodeStatus) []string { - var descendantNodeIDs []string - descendantNodeIDs = append(descendantNodeIDs, node.Children...) - for _, child := range node.Children { - childStatus, err := wf.Status.Nodes.Get(child) - if err != nil { - log.Panicf("Coudn't obtain child for %s, panicking", child) - } - descendantNodeIDs = append(descendantNodeIDs, getDescendantNodeIDs(wf, *childStatus)...) - } - return descendantNodeIDs -} - func isDescendantNodeSucceeded(wf *wfv1.Workflow, node wfv1.NodeStatus, nodeIDsToReset map[string]bool) bool { for _, child := range node.Children { childStatus, err := wf.Status.Nodes.Get(child) @@ -807,50 +796,420 @@ func deletePodNodeDuringRetryWorkflow(wf *wfv1.Workflow, node wfv1.NodeStatus, d return deletedPods, podsToDelete } -func containsNode(nodes []string, node string) bool { - for _, e := range nodes { - if e == node { - return true +func createNewRetryWorkflow(wf *wfv1.Workflow, parameters []string) (*wfv1.Workflow, error) { + newWF := wf.DeepCopy() + + // Delete/reset fields which indicate workflow completed + delete(newWF.Labels, common.LabelKeyCompleted) + delete(newWF.Labels, common.LabelKeyWorkflowArchivingStatus) + newWF.Status.Conditions.UpsertCondition(wfv1.Condition{Status: metav1.ConditionFalse, Type: wfv1.ConditionTypeCompleted}) + newWF.ObjectMeta.Labels[common.LabelKeyPhase] = string(wfv1.NodeRunning) + newWF.Status.Phase = wfv1.WorkflowRunning + newWF.Status.Nodes = make(wfv1.Nodes) + newWF.Status.Message = "" + newWF.Status.StartedAt = metav1.Time{Time: time.Now().UTC()} + newWF.Status.FinishedAt = metav1.Time{} + if newWF.Status.StoredWorkflowSpec != nil { + newWF.Status.StoredWorkflowSpec.Shutdown = "" + } + newWF.Spec.Shutdown = "" + newWF.Status.PersistentVolumeClaims = []apiv1.Volume{} + if newWF.Spec.ActiveDeadlineSeconds != nil && *newWF.Spec.ActiveDeadlineSeconds == 0 { + // if it was terminated, unset the deadline + newWF.Spec.ActiveDeadlineSeconds = nil + } + // Override parameters + if parameters != nil { + if _, ok := wf.ObjectMeta.Labels[common.LabelKeyPreviousWorkflowName]; ok { + log.Warnln("Overriding parameters on resubmitted workflows may have unexpected results") + } + err := overrideParameters(newWF, parameters) + if err != nil { + return nil, err } } - return false + return newWF, nil +} + +type dagNode struct { + n *wfv1.NodeStatus + parent *dagNode + children []*dagNode +} + +func newWorkflowsDag(wf *wfv1.Workflow) ([]*dagNode, error) { + nodes := make(map[string]*dagNode) + parentsMap := make(map[string]*wfv1.NodeStatus) + + // create mapping from node to parent + // as well as creating temp mappings from nodeID to node + for _, wfNode := range wf.Status.Nodes { + n := dagNode{} + n.n = &wfNode + nodes[wfNode.ID] = &n + for _, child := range wfNode.Children { + parentsMap[child] = &wfNode + } + } + + for _, wfNode := range wf.Status.Nodes { + parentWfNode, ok := parentsMap[wfNode.ID] + if !ok && wfNode.Name != wf.Name && !strings.HasPrefix(wfNode.Name, wf.ObjectMeta.Name+".onExit") { + return nil, fmt.Errorf("couldn't find parent node for %s", wfNode.ID) + } + + var parentNode *dagNode + if parentWfNode != nil { + parentNode = nodes[parentWfNode.ID] + } + + children := []*dagNode{} + + for _, childID := range wfNode.Children { + childNode, ok := nodes[childID] + if !ok { + return nil, fmt.Errorf("coudln't obtain child %s", childID) + } + children = append(children, childNode) + } + nodes[wfNode.ID].parent = parentNode + nodes[wfNode.ID].children = children + } + + values := []*dagNode{} + for _, v := range nodes { + values = append(values, v) + } + return values, nil +} + +func singularPath(nodes []*dagNode, toNode string) ([]*dagNode, error) { + if len(nodes) <= 0 { + return nil, fmt.Errorf("expected at least 1 node") + } + var root *dagNode + var leaf *dagNode + for i := range nodes { + if nodes[i].n.ID == toNode { + leaf = nodes[i] + } + if nodes[i].parent == nil { + root = nodes[i] + } + } + + if leaf == nil { + return nil, fmt.Errorf("was unable to find %s", toNode) + } + + curr := leaf + + reverseNodes := []*dagNode{} + for { + reverseNodes = append(reverseNodes, curr) + if curr.n.ID == root.n.ID { + break + } + if curr.parent == nil { + return nil, fmt.Errorf("parent was nil but curr is not the root node") + } + curr = curr.parent + } + + slices.Reverse(reverseNodes) + return reverseNodes, nil +} + +func getChildren(n *dagNode) map[string]bool { + children := make(map[string]bool) + queue := list.New() + queue.PushBack(n) + for { + currNode := queue.Front() + if currNode == nil { + break + } + + curr := currNode.Value.(*dagNode) + for i := range curr.children { + children[curr.children[i].n.ID] = true + queue.PushBack(curr.children[i]) + } + queue.Remove(currNode) + } + return children +} + +type resetFn func(string) +type deleteFn func(string) + +// untilFn is a function that returns two variables, the first indicates +// a `found` boolean while the second indicates if reset should be called. +type untilFn func(*dagNode) (bool, bool) + +func getUntilFnNodeType(nodeType wfv1.NodeType) untilFn { + return func(n *dagNode) (bool, bool) { + return n.n.Type == nodeType, true + } +} + +func resetUntil(n *dagNode, should untilFn, resetFunc resetFn) (*dagNode, error) { + curr := n + for { + if curr == nil { + return nil, fmt.Errorf("was seeking node but ran out of nodes to explore") + } + + if foundNode, shouldReset := should(curr); foundNode { + if shouldReset { + resetFunc(curr.n.ID) + } + return curr, nil + } + curr = curr.parent + } } -func isGroupNode(node wfv1.NodeStatus) bool { - return node.Type == wfv1.NodeTypeDAG || node.Type == wfv1.NodeTypeTaskGroup || node.Type == wfv1.NodeTypeStepGroup || node.Type == wfv1.NodeTypeSteps +func getTillBoundaryFn(boundaryID string) untilFn { + return func(n *dagNode) (bool, bool) { + return n.n.ID == boundaryID, n.n.BoundaryID != "" + } } -func resetConnectedParentGroupNodes(oldWF *wfv1.Workflow, newWF *wfv1.Workflow, currentNode wfv1.NodeStatus, resetParentGroupNodes []string) (*wfv1.Workflow, []string) { - currentNodeID := currentNode.ID +func resetBoundaries(n *dagNode, resetFunc resetFn) (*dagNode, error) { + curr := n for { - currentNode, err := oldWF.Status.Nodes.Get(currentNodeID) + if curr == nil { + return curr, nil + } + if curr.parent != nil && curr.parent.n.Type == wfv1.NodeTypeStepGroup { + resetFunc(curr.parent.n.ID) + } + seekingBoundaryID := curr.n.BoundaryID + if seekingBoundaryID == "" { + return curr.parent, nil + } + var err error + curr, err = resetUntil(curr, getTillBoundaryFn(seekingBoundaryID), resetFunc) if err != nil { - log.Panicf("dying due to inability to obtain node for %s, panicking", currentNodeID) + return nil, err } - if !containsNode(resetParentGroupNodes, currentNodeID) { - newWF.Status.Nodes.Set(currentNodeID, resetNode(*currentNode.DeepCopy())) - resetParentGroupNodes = append(resetParentGroupNodes, currentNodeID) - log.Debugf("Reset connected group node %s", currentNode.Name) + } +} + +func resetStepGroup(n *dagNode, resetFunc resetFn) (*dagNode, error) { + return resetUntil(n, getUntilFnNodeType(wfv1.NodeTypeStepGroup), resetFunc) +} + +func resetSteps(n *dagNode, resetFunc resetFn) (*dagNode, error) { + n, err := resetUntil(n, getUntilFnNodeType(wfv1.NodeTypeSteps), resetFunc) + if err != nil { + return nil, err + } + return resetBoundaries(n, resetFunc) +} + +func resetTaskGroup(n *dagNode, resetFunc resetFn) (*dagNode, error) { + return resetUntil(n, getUntilFnNodeType(wfv1.NodeTypeTaskGroup), resetFunc) +} + +func resetDAG(n *dagNode, resetFunc resetFn) (*dagNode, error) { + n, err := resetUntil(n, getUntilFnNodeType(wfv1.NodeTypeDAG), resetFunc) + if err != nil { + return nil, err + } + return resetBoundaries(n, resetFunc) +} + +// resetPod is only called in the event a Container was found. This implies that there is a parent pod. +func resetPod(n *dagNode, resetFunc resetFn, addToDelete deleteFn) (*dagNode, error) { + // this sets to reset but resets are overridden by deletes in the final FormulateRetryWorkflow logic. + curr, err := resetUntil(n, getUntilFnNodeType(wfv1.NodeTypePod), resetFunc) + if err != nil { + return nil, err + } + addToDelete(curr.n.ID) + children := getChildren(curr) + for childID := range children { + addToDelete(childID) + } + return curr, nil +} + +func resetPath(allNodes []*dagNode, startNode string) (map[string]bool, map[string]bool, error) { + nodes, err := singularPath(allNodes, startNode) + + curr := nodes[len(nodes)-1] + if len(nodes) > 0 { + // remove startNode + nodes = nodes[:len(nodes)-1] + } + + nodesToDelete := getChildren(curr) + nodesToDelete[curr.n.ID] = true + + nodesToReset := make(map[string]bool) + + if err != nil { + return nil, nil, err + } + l := len(nodes) + if l <= 0 { + return nodesToReset, nodesToDelete, nil + } + + // safe to reset the startNode since deletions + // override resets. + addToReset := func(nodeID string) { + nodesToReset[nodeID] = true + } + + addToDelete := func(nodeID string) { + nodesToDelete[nodeID] = true + } + + var mustFind wfv1.NodeType + mustFind = "" + + if curr.n.Type == wfv1.NodeTypeContainer { + // special case where the retry node is the container of a containerSet + mustFind = wfv1.NodeTypePod + } + + findBoundaries := false + for { + + if curr == nil { + break } - if currentNode.BoundaryID != "" && currentNode.BoundaryID != oldWF.ObjectMeta.Name { - parentNode, err := oldWF.Status.Nodes.Get(currentNode.BoundaryID) + + switch curr.n.Type { + case wfv1.NodeTypePod: + //ignore + case wfv1.NodeTypeContainer: + //ignore + case wfv1.NodeTypeSteps: + addToReset(curr.n.ID) + findBoundaries = true + case wfv1.NodeTypeStepGroup: + addToReset(curr.n.ID) + findBoundaries = true + case wfv1.NodeTypeDAG: + addToReset(curr.n.ID) + findBoundaries = true + case wfv1.NodeTypeTaskGroup: + addToReset(curr.n.ID) + findBoundaries = true + case wfv1.NodeTypeRetry: + addToReset(curr.n.ID) + case wfv1.NodeTypeSkipped: + // ignore -> doesn't make sense to reach this + case wfv1.NodeTypeSuspend: + // ignore + case wfv1.NodeTypeHTTP: + // ignore + case wfv1.NodeTypePlugin: + addToReset(curr.n.ID) + } + + if mustFind == "" && !findBoundaries { + curr = curr.parent + continue + } + + if findBoundaries { + curr, err = resetBoundaries(curr, addToReset) if err != nil { - log.Panicf("unable to obtain node for %s, panicking", currentNode.BoundaryID) - } - if isGroupNode(*parentNode) { - currentNodeID = parentNode.ID - } else { - break + return nil, nil, err } - } else { + findBoundaries = false + continue + } + + switch mustFind { + case wfv1.NodeTypePod: + curr, err = resetPod(curr, addToReset, addToDelete) + case wfv1.NodeTypeSteps: + curr, err = resetSteps(curr, addToReset) + case wfv1.NodeTypeStepGroup: + curr, err = resetStepGroup(curr, addToReset) + case wfv1.NodeTypeDAG: + curr, err = resetDAG(curr, addToReset) + case wfv1.NodeTypeTaskGroup: + curr, err = resetTaskGroup(curr, addToReset) + default: + return nil, nil, fmt.Errorf("invalid mustFind of %s supplied", mustFind) + } + mustFind = "" + if err != nil { + return nil, nil, err + } + + } + return nodesToReset, nodesToDelete, nil +} + +func setUnion[T comparable](m1 map[T]bool, m2 map[T]bool) map[T]bool { + res := make(map[T]bool) + + for k, v := range m1 { + res[k] = v + } + + for k, v := range m2 { + if _, ok := m1[k]; !ok { + res[k] = v + } + } + return res +} +func shouldRetryFailedType(nodeTyp wfv1.NodeType) bool { + if nodeTyp == wfv1.NodeTypePod || nodeTyp == wfv1.NodeTypeContainer { + return true + } + return false +} + +// dagSortedNodes sorts the nodes based on topological order, omits onExitNode +func dagSortedNodes(nodes []*dagNode, rootNodeName string) []*dagNode { + sortedNodes := make([]*dagNode, 0) + + if len(nodes) == 0 { + return sortedNodes + } + + queue := make([]*dagNode, 0) + + for _, n := range nodes { + if n.n.Name == rootNodeName { + queue = append(queue, n) break } } - return newWF, resetParentGroupNodes + + if len(queue) != 1 { + panic("couldn't find root node") + } + + for len(queue) > 0 { + curr := queue[0] + sortedNodes = append(sortedNodes, curr) + queue = queue[1:] + queue = append(queue, curr.children...) + } + + return sortedNodes } -// FormulateRetryWorkflow formulates a previous workflow to be retried, deleting all failed steps as well as the onExit node (and children) +// FormulateRetryWorkflow attempts to retry a workflow +// The logic is as follows: +// create a DAG +// topological sort +// iterate through all must delete nodes: iterator $node +// obtain singular path to each $node +// reset all "reset points" to $node func FormulateRetryWorkflow(ctx context.Context, wf *wfv1.Workflow, restartSuccessful bool, nodeFieldSelector string, parameters []string) (*wfv1.Workflow, []string, error) { + switch wf.Status.Phase { case wfv1.WorkflowFailed, wfv1.WorkflowError: case wfv1.WorkflowSucceeded: @@ -861,178 +1220,152 @@ func FormulateRetryWorkflow(ctx context.Context, wf *wfv1.Workflow, restartSucce return nil, nil, errors.Errorf(errors.CodeBadRequest, "Cannot retry a workflow in phase %s", wf.Status.Phase) } - newWF := wf.DeepCopy() + onExitNodeName := wf.ObjectMeta.Name + ".onExit" - // Delete/reset fields which indicate workflow completed - delete(newWF.Labels, common.LabelKeyCompleted) - delete(newWF.Labels, common.LabelKeyWorkflowArchivingStatus) - newWF.Status.Conditions.UpsertCondition(wfv1.Condition{Status: metav1.ConditionFalse, Type: wfv1.ConditionTypeCompleted}) - newWF.ObjectMeta.Labels[common.LabelKeyPhase] = string(wfv1.NodeRunning) - newWF.Status.Phase = wfv1.WorkflowRunning - newWF.Status.Nodes = make(wfv1.Nodes) - newWF.Status.Message = "" - newWF.Status.StartedAt = metav1.Time{Time: time.Now().UTC()} - newWF.Status.FinishedAt = metav1.Time{} - if newWF.Status.StoredWorkflowSpec != nil { - newWF.Status.StoredWorkflowSpec.Shutdown = "" + newWf, err := createNewRetryWorkflow(wf, parameters) + if err != nil { + return nil, nil, err } - newWF.Spec.Shutdown = "" - newWF.Status.PersistentVolumeClaims = []apiv1.Volume{} - if newWF.Spec.ActiveDeadlineSeconds != nil && *newWF.Spec.ActiveDeadlineSeconds == 0 { - // if it was terminated, unset the deadline - newWF.Spec.ActiveDeadlineSeconds = nil + + deleteNodesMap, err := getNodeIDsToReset(restartSuccessful, nodeFieldSelector, wf.Status.Nodes) + if err != nil { + return nil, nil, err } - // Override parameters - if parameters != nil { - if _, ok := wf.ObjectMeta.Labels[common.LabelKeyPreviousWorkflowName]; ok { - log.Warnln("Overriding parameters on resubmitted workflows may have unexpected results") + + failed := make(map[string]bool) + for nodeID, node := range wf.Status.Nodes { + if node.Phase.FailedOrError() && shouldRetryFailedType(node.Type) && !isDescendantNodeSucceeded(wf, node, deleteNodesMap) { + failed[nodeID] = true } - err := overrideParameters(newWF, parameters) + } + for failedNode := range failed { + deleteNodesMap[failedNode] = true + } + + nodes, err := newWorkflowsDag(wf) + if err != nil { + return nil, nil, err + } + + toReset := make(map[string]bool) + toDelete := make(map[string]bool) + + nodesMap := make(map[string]*dagNode) + for i := range nodes { + nodesMap[nodes[i].n.ID] = nodes[i] + } + + nodes = dagSortedNodes(nodes, wf.Name) + + deleteNodes := make([]*dagNode, 0) + + // deleteNodes will not contain an exit node + for i := range nodes { + if _, ok := deleteNodesMap[nodes[i].n.ID]; ok { + deleteNodes = append(deleteNodes, nodes[i]) + } + } + + // this is kind of complex + // we rely on deleteNodes being topologically sorted. + // this is done via a breadth first search on the dag via `dagSortedNodes` + // This is because nodes at the top take precedence over nodes at the bottom. + // if a failed node was declared to be scheduled for deletion, we should + // never execute resetPath on that node. + // we ensure this behaviour via calling resetPath in topological order. + for i := range deleteNodes { + currNode := deleteNodes[i] + shouldDelete := toDelete[currNode.n.ID] + if shouldDelete { + continue + } + pathToReset, pathToDelete, err := resetPath(nodes, currNode.n.ID) if err != nil { return nil, nil, err } + toReset = setUnion(toReset, pathToReset) + toDelete = setUnion(toDelete, pathToDelete) } - onExitNodeName := wf.ObjectMeta.Name + ".onExit" - // Get all children of nodes that match filter - nodeIDsToReset, err := getNodeIDsToReset(restartSuccessful, nodeFieldSelector, wf.Status.Nodes) - if err != nil { - return nil, nil, err + for nodeID := range toReset { + // avoid reseting nodes that are marked for deletion + if in := toDelete[nodeID]; in { + continue + } + + n := wf.Status.Nodes[nodeID] + + newWf.Status.Nodes.Set(nodeID, resetNode(*n.DeepCopy())) } - // Iterate the previous nodes. If it was successful Pod carry it forward - deletedNodes := make(map[string]bool) deletedPods := make(map[string]bool) - var podsToDelete []string - var resetParentGroupNodes []string - for _, node := range wf.Status.Nodes { - doForceResetNode := false - if _, present := nodeIDsToReset[node.ID]; present { - // if we are resetting this node then don't carry it across regardless of its phase - doForceResetNode = true - } - switch node.Phase { - case wfv1.NodeSucceeded, wfv1.NodeSkipped: - if strings.HasPrefix(node.Name, onExitNodeName) || doForceResetNode { - log.Debugf("Force reset for node: %s", node.Name) - // Reset parent node if this node is a step/task group or DAG. - if isGroupNode(node) && node.BoundaryID != "" { - if node.ID != wf.ObjectMeta.Name { // Skip root node - descendantNodeIDs := getDescendantNodeIDs(wf, node) - var nodeGroupNeedsReset bool - // Only reset DAG that's in the same branch as the nodeIDsToReset - for _, child := range descendantNodeIDs { - childNode, err := wf.Status.Nodes.Get(child) - if err != nil { - log.Warnf("was unable to obtain node for %s due to %s", child, err) - return nil, nil, fmt.Errorf("Was unable to obtain node for %s due to %s", child, err) - } - if _, present := nodeIDsToReset[child]; present { - log.Debugf("Group node %s needs to reset since its child %s is in the force reset path", node.Name, childNode.Name) - nodeGroupNeedsReset = true - break - } - } - if nodeGroupNeedsReset { - newWF, resetParentGroupNodes = resetConnectedParentGroupNodes(wf, newWF, node, resetParentGroupNodes) - } - } - } else { - if node.Type == wfv1.NodeTypePod || node.Type == wfv1.NodeTypeSuspend || node.Type == wfv1.NodeTypeSkipped { - newWF, resetParentGroupNodes = resetConnectedParentGroupNodes(wf, newWF, node, resetParentGroupNodes) - // Only remove the descendants of a suspended node but not the suspended node itself. The descendants - // of a suspended node need to be removed since the conditions should be re-evaluated based on - // the modified supplied parameter values. - if node.Type != wfv1.NodeTypeSuspend { - deletedNodes[node.ID] = true - deletedPods, podsToDelete = deletePodNodeDuringRetryWorkflow(wf, node, deletedPods, podsToDelete) - log.Debugf("Deleted pod node: %s", node.Name) - } + podsToDelete := []string{} - descendantNodeIDs := getDescendantNodeIDs(wf, node) - for _, descendantNodeID := range descendantNodeIDs { - deletedNodes[descendantNodeID] = true - descendantNode, err := wf.Status.Nodes.Get(descendantNodeID) - if err != nil { - log.Warnf("Was unable to obtain node for %s due to %s", descendantNodeID, err) - return nil, nil, fmt.Errorf("Was unable to obtain node for %s due to %s", descendantNodeID, err) - } - if descendantNode.Type == wfv1.NodeTypePod { - newWF, resetParentGroupNodes = resetConnectedParentGroupNodes(wf, newWF, node, resetParentGroupNodes) - deletedPods, podsToDelete = deletePodNodeDuringRetryWorkflow(wf, *descendantNode, deletedPods, podsToDelete) - log.Debugf("Deleted pod node %s since it belongs to node %s", descendantNode.Name, node.Name) - } - } - } else { - log.Debugf("Reset non-pod/suspend/skipped node %s", node.Name) - newNode := node.DeepCopy() - newWF.Status.Nodes.Set(newNode.ID, resetNode(*newNode)) - } - } - } else { - if !containsNode(resetParentGroupNodes, node.ID) { - log.Debugf("Node %s remains as is", node.Name) - newWF.Status.Nodes.Set(node.ID, node) + for nodeID := range toDelete { + n := wf.Status.Nodes[nodeID] + if n.Type == wfv1.NodeTypePod { + deletedPods, podsToDelete = deletePodNodeDuringRetryWorkflow(wf, n, deletedPods, podsToDelete) + } + } + + for id, n := range wf.Status.Nodes { + shouldDelete := toDelete[id] || strings.HasPrefix(n.Name, onExitNodeName) + if _, err := newWf.Status.Nodes.Get(id); err != nil && !shouldDelete { + newWf.Status.Nodes.Set(id, *n.DeepCopy()) + } + if n.Name == onExitNodeName { + queue := list.New() + queue.PushBack(n) + for { + currNode := queue.Front() + if currNode == nil { + break } - } - case wfv1.NodeError, wfv1.NodeFailed, wfv1.NodeOmitted: - if isGroupNode(node) { - newNode := node.DeepCopy() - newWF.Status.Nodes.Set(newNode.ID, resetNode(*newNode)) - log.Debugf("Reset %s node %s since it's a group node", node.Name, string(node.Phase)) - continue - } else { - if node.Type != wfv1.NodeTypeRetry && isDescendantNodeSucceeded(wf, node, nodeIDsToReset) { - log.Debugf("Node %s remains as is since it has succeed child nodes.", node.Name) - newWF.Status.Nodes.Set(node.ID, node) - continue + curr := currNode.Value.(wfv1.NodeStatus) + deletedPods, podsToDelete = deletePodNodeDuringRetryWorkflow(wf, curr, deletedPods, podsToDelete) + for i := range curr.Children { + child, err := wf.Status.Nodes.Get(curr.Children[i]) + if err != nil { + return nil, nil, err + } + queue.PushBack(child) } - log.Debugf("Deleted %s node %s since it's not a group node", node.Name, string(node.Phase)) - deletedPods, podsToDelete = deletePodNodeDuringRetryWorkflow(wf, node, deletedPods, podsToDelete) - log.Debugf("Deleted pod node: %s", node.Name) - deletedNodes[node.ID] = true + queue.Remove(currNode) } - // do not add this status to the node. pretend as if this node never existed. - default: - // Do not allow retry of workflows with pods in Running/Pending phase - return nil, nil, errors.InternalErrorf("Workflow cannot be retried with node %s in %s phase", node.Name, node.Phase) + } + if n.Name == wf.Name && !shouldRetryFailedType(n.Type) { + newWf.Status.Nodes.Set(id, resetNode(*n.DeepCopy())) } } + for id, oldWfNode := range wf.Status.Nodes { - if len(deletedNodes) > 0 { - for _, node := range newWF.Status.Nodes { - if deletedNodes[node.ID] { - log.Debugf("Removed node: %s", node.Name) - newWF.Status.Nodes.Delete(node.ID) - continue - } + if !newWf.Status.Nodes.Has(id) { + continue + } - var newChildren []string - for _, child := range node.Children { - if !deletedNodes[child] { - newChildren = append(newChildren, child) - } + newChildren := []string{} + for _, childID := range oldWfNode.Children { + if toDelete[childID] { + continue } - node.Children = newChildren + newChildren = append(newChildren, childID) + } + newOutboundNodes := []string{} - var outboundNodes []string - for _, outboundNode := range node.OutboundNodes { - if !deletedNodes[outboundNode] { - outboundNodes = append(outboundNodes, outboundNode) - } + for _, outBoundNodeID := range oldWfNode.OutboundNodes { + if toDelete[outBoundNodeID] { + continue } - node.OutboundNodes = outboundNodes - - newWF.Status.Nodes.Set(node.ID, node) + newOutboundNodes = append(newOutboundNodes, outBoundNodeID) } - } - newWF.Status.StoredTemplates = make(map[string]wfv1.Template) - for id, tmpl := range wf.Status.StoredTemplates { - newWF.Status.StoredTemplates[id] = tmpl + wfNode := newWf.Status.Nodes[id] + wfNode.Children = newChildren + wfNode.OutboundNodes = newOutboundNodes + newWf.Status.Nodes.Set(id, *wfNode.DeepCopy()) } - return newWF, podsToDelete, nil + return newWf, podsToDelete, nil } func resetNode(node wfv1.NodeStatus) wfv1.NodeStatus { @@ -1076,25 +1409,13 @@ func getNodeIDsToReset(restartSuccessful bool, nodeFieldSelector string, nodes w selector, err := fields.ParseSelector(nodeFieldSelector) if err != nil { return nil, err - } else { - for _, node := range nodes { - if SelectorMatchesNode(selector, node) { - // traverse all children of the node - var queue []string - queue = append(queue, node.ID) - - for len(queue) > 0 { - childNode := queue[0] - // if the child isn't already in nodeIDsToReset then we add it and traverse its children - if _, present := nodeIDsToReset[childNode]; !present { - nodeIDsToReset[childNode] = true - queue = append(queue, nodes[childNode].Children...) - } - queue = queue[1:] - } - } + } + for _, node := range nodes { + if SelectorMatchesNode(selector, node) { + nodeIDsToReset[node.ID] = true } } + return nodeIDsToReset, nil } diff --git a/workflow/util/util_test.go b/workflow/util/util_test.go index 67b1b2e84374..6de73e0d1b71 100644 --- a/workflow/util/util_test.go +++ b/workflow/util/util_test.go @@ -7,7 +7,6 @@ import ( "path/filepath" "strings" "testing" - "time" "github.com/go-jose/go-jose/v3/jwt" "github.com/stretchr/testify/assert" @@ -1026,51 +1025,6 @@ func TestRetryExitHandler(t *testing.T) { func TestFormulateRetryWorkflow(t *testing.T) { ctx := context.Background() wfClient := argofake.NewSimpleClientset().ArgoprojV1alpha1().Workflows("my-ns") - createdTime := metav1.Time{Time: time.Now().Add(-1 * time.Second).UTC()} - finishedTime := metav1.Time{Time: createdTime.Add(time.Second * 2)} - t.Run("Steps", func(t *testing.T) { - wf := &wfv1.Workflow{ - ObjectMeta: metav1.ObjectMeta{ - Name: "my-steps", - Labels: map[string]string{ - common.LabelKeyCompleted: "true", - common.LabelKeyWorkflowArchivingStatus: "Pending", - }, - }, - Status: wfv1.WorkflowStatus{ - Phase: wfv1.WorkflowFailed, - StartedAt: createdTime, - FinishedAt: finishedTime, - Nodes: map[string]wfv1.NodeStatus{ - "failed-node": {Name: "failed-node", StartedAt: createdTime, FinishedAt: finishedTime, Phase: wfv1.NodeFailed, Message: "failed"}, - "succeeded-node": {Name: "succeeded-node", StartedAt: createdTime, FinishedAt: finishedTime, Phase: wfv1.NodeSucceeded, Message: "succeeded"}}, - }, - } - _, err := wfClient.Create(ctx, wf, metav1.CreateOptions{}) - require.NoError(t, err) - wf, _, err = FormulateRetryWorkflow(ctx, wf, false, "", nil) - require.NoError(t, err) - assert.Equal(t, wfv1.WorkflowRunning, wf.Status.Phase) - assert.Equal(t, metav1.Time{}, wf.Status.FinishedAt) - assert.True(t, wf.Status.StartedAt.After(createdTime.Time)) - assert.NotContains(t, wf.Labels, common.LabelKeyCompleted) - assert.NotContains(t, wf.Labels, common.LabelKeyWorkflowArchivingStatus) - for _, node := range wf.Status.Nodes { - switch node.Phase { - case wfv1.NodeSucceeded: - assert.Equal(t, "succeeded", node.Message) - assert.Equal(t, wfv1.NodeSucceeded, node.Phase) - assert.Equal(t, createdTime, node.StartedAt) - assert.Equal(t, finishedTime, node.FinishedAt) - case wfv1.NodeFailed: - assert.Equal(t, "", node.Message) - assert.Equal(t, wfv1.NodeRunning, node.Phase) - assert.Equal(t, metav1.Time{}, node.FinishedAt) - assert.True(t, node.StartedAt.After(createdTime.Time)) - } - } - - }) t.Run("DAG", func(t *testing.T) { wf := &wfv1.Workflow{ ObjectMeta: metav1.ObjectMeta{ @@ -1080,15 +1034,14 @@ func TestFormulateRetryWorkflow(t *testing.T) { Status: wfv1.WorkflowStatus{ Phase: wfv1.WorkflowFailed, Nodes: map[string]wfv1.NodeStatus{ - "": {Phase: wfv1.NodeFailed, Type: wfv1.NodeTypeTaskGroup}}, + "my-dag": {Phase: wfv1.NodeFailed, Type: wfv1.NodeTypeDAG, Name: "my-dag", ID: "my-dag"}}, }, } _, err := wfClient.Create(ctx, wf, metav1.CreateOptions{}) require.NoError(t, err) wf, _, err = FormulateRetryWorkflow(ctx, wf, false, "", nil) require.NoError(t, err) - require.Len(t, wf.Status.Nodes, 1) - assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes[""].Phase) + assert.Len(t, wf.Status.Nodes, 1) }) t.Run("Skipped and Suspended Nodes", func(t *testing.T) { wf := &wfv1.Workflow{ @@ -1099,12 +1052,12 @@ func TestFormulateRetryWorkflow(t *testing.T) { Status: wfv1.WorkflowStatus{ Phase: wfv1.WorkflowFailed, Nodes: map[string]wfv1.NodeStatus{ - "entrypoint": {ID: "entrypoint", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeTaskGroup, Children: []string{"suspended", "skipped"}}, + "wf-with-skipped-and-suspended-nodes": {ID: "wf-with-skipped-and-suspended-nodes", Name: "wf-with-skipped-and-suspended-nodes", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeDAG, Children: []string{"suspended", "skipped"}}, "suspended": { ID: "suspended", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeSuspend, - BoundaryID: "entrypoint", + BoundaryID: "wf-with-skipped-and-suspended-nodes", Children: []string{"child"}, Outputs: &wfv1.Outputs{Parameters: []wfv1.Parameter{{ Name: "param-1", @@ -1119,14 +1072,8 @@ func TestFormulateRetryWorkflow(t *testing.T) { require.NoError(t, err) wf, _, err = FormulateRetryWorkflow(ctx, wf, true, "id=suspended", nil) require.NoError(t, err) - require.Len(t, wf.Status.Nodes, 3) - assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["entrypoint"].Phase) - assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["suspended"].Phase) - assert.Equal(t, wfv1.Parameter{ - Name: "param-1", - Value: nil, - ValueFrom: &wfv1.ValueFrom{Supplied: &wfv1.SuppliedValueFrom{}}, - }, wf.Status.Nodes["suspended"].Outputs.Parameters[0]) + require.Len(t, wf.Status.Nodes, 2) + assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["wf-with-skipped-and-suspended-nodes"].Phase) assert.Equal(t, wfv1.NodeSkipped, wf.Status.Nodes["skipped"].Phase) }) t.Run("Nested DAG with Non-group Node Selected", func(t *testing.T) { @@ -1138,7 +1085,7 @@ func TestFormulateRetryWorkflow(t *testing.T) { Status: wfv1.WorkflowStatus{ Phase: wfv1.WorkflowFailed, Nodes: map[string]wfv1.NodeStatus{ - "my-nested-dag-1": {ID: "my-nested-dag-1", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeTaskGroup, Children: []string{"1"}}, + "my-nested-dag-1": {ID: "my-nested-dag-1", Name: "my-nested-dag-1", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeDAG, Children: []string{"1"}}, "1": {ID: "1", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeTaskGroup, BoundaryID: "my-nested-dag-1", Children: []string{"2", "4"}}, "2": {ID: "2", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeTaskGroup, BoundaryID: "1", Children: []string{"3"}}, "3": {ID: "3", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypePod, BoundaryID: "2"}, @@ -1151,7 +1098,7 @@ func TestFormulateRetryWorkflow(t *testing.T) { require.NoError(t, err) // Node #3, #4 are deleted and will be recreated so only 3 nodes left in wf.Status.Nodes require.Len(t, wf.Status.Nodes, 3) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes["my-nested-dag-1"].Phase) + assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["my-nested-dag-1"].Phase) // The parent group nodes should be running. assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["1"].Phase) assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["2"].Phase) @@ -1165,9 +1112,9 @@ func TestFormulateRetryWorkflow(t *testing.T) { Status: wfv1.WorkflowStatus{ Phase: wfv1.WorkflowFailed, Nodes: map[string]wfv1.NodeStatus{ - "my-nested-dag-2": {ID: "my-nested-dag-2", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeTaskGroup, Children: []string{"1"}}, + "my-nested-dag-2": {ID: "my-nested-dag-2", Name: "my-nested-dag-2", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeDAG, Children: []string{"1"}}, "1": {ID: "1", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeTaskGroup, BoundaryID: "my-nested-dag-2", Children: []string{"2", "4"}}, - "2": {ID: "2", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeTaskGroup, BoundaryID: "1", Children: []string{"3"}}, + "2": {ID: "2", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypePod, BoundaryID: "1", Children: []string{"3"}}, "3": {ID: "3", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypePod, BoundaryID: "2"}, "4": {ID: "4", Phase: wfv1.NodeFailed, Type: wfv1.NodeTypePod, BoundaryID: "1"}}, }, @@ -1178,12 +1125,10 @@ func TestFormulateRetryWorkflow(t *testing.T) { require.NoError(t, err) // Node #2, #3, and #4 are deleted and will be recreated so only 2 nodes left in wf.Status.Nodes require.Len(t, wf.Status.Nodes, 4) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes["my-nested-dag-2"].Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes["1"].Phase) + assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["my-nested-dag-2"].Phase) + assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["1"].Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes["2"].Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes["3"].Phase) - assert.Equal(t, "", string(wf.Status.Nodes["4"].Phase)) - }) t.Run("OverrideParams", func(t *testing.T) { wf := &wfv1.Workflow{ @@ -1199,7 +1144,7 @@ func TestFormulateRetryWorkflow(t *testing.T) { Status: wfv1.WorkflowStatus{ Phase: wfv1.WorkflowFailed, Nodes: map[string]wfv1.NodeStatus{ - "1": {ID: "1", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeTaskGroup}, + "override-param-wf": {ID: "override-param-wf", Name: "override-param-wf", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeDAG}, }}, } wf, _, err := FormulateRetryWorkflow(context.Background(), wf, false, "", []string{"message=modified"}) @@ -1222,7 +1167,7 @@ func TestFormulateRetryWorkflow(t *testing.T) { Status: wfv1.WorkflowStatus{ Phase: wfv1.WorkflowFailed, Nodes: map[string]wfv1.NodeStatus{ - "1": {ID: "1", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeTaskGroup}, + "override-param-wf": {ID: "override-param-wf", Name: "override-param-wf", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeTaskGroup}, }, StoredWorkflowSpec: &wfv1.WorkflowSpec{Arguments: wfv1.Arguments{ Parameters: []wfv1.Parameter{ @@ -1300,11 +1245,11 @@ func TestFormulateRetryWorkflow(t *testing.T) { Status: wfv1.WorkflowStatus{ Phase: wfv1.WorkflowSucceeded, Nodes: map[string]wfv1.NodeStatus{ - "successful-workflow-2": {ID: "successful-workflow-2", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeTaskGroup, Children: []string{"1"}}, - "1": {ID: "1", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeTaskGroup, BoundaryID: "successful-workflow-2", Children: []string{"2", "4"}}, - "2": {ID: "2", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeTaskGroup, BoundaryID: "1", Children: []string{"3"}}, - "3": {ID: "3", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypePod, BoundaryID: "2"}, - "4": {ID: "4", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypePod, BoundaryID: "1"}}, + "successful-workflow-2": {ID: "successful-workflow-2", Name: "successful-workflow-2", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeDAG, Children: []string{"1"}}, + "1": {ID: "1", Name: "1", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeTaskGroup, BoundaryID: "successful-workflow-2", Children: []string{"2", "4"}}, + "2": {ID: "2", Name: "2", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypeTaskGroup, BoundaryID: "1", Children: []string{"3"}}, + "3": {ID: "3", Name: "3", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypePod, BoundaryID: "2"}, + "4": {ID: "4", Name: "4", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypePod, BoundaryID: "1"}}, }, } _, err := wfClient.Create(ctx, wf, metav1.CreateOptions{}) @@ -1313,7 +1258,7 @@ func TestFormulateRetryWorkflow(t *testing.T) { require.NoError(t, err) // Node #4 is deleted and will be recreated so only 4 nodes left in wf.Status.Nodes require.Len(t, wf.Status.Nodes, 4) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes["successful-workflow-2"].Phase) + assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["successful-workflow-2"].Phase) // The parent group nodes should be running. assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["1"].Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes["2"].Phase) @@ -1329,7 +1274,7 @@ func TestFormulateRetryWorkflow(t *testing.T) { Status: wfv1.WorkflowStatus{ Phase: wfv1.WorkflowFailed, Nodes: map[string]wfv1.NodeStatus{ - "continue-on-failed-workflow": {ID: "continue-on-failed-workflow", Phase: wfv1.NodeFailed, Type: wfv1.NodeTypeDAG, Children: []string{"1"}, OutboundNodes: []string{"3", "5"}}, + "continue-on-failed-workflow": {ID: "continue-on-failed-workflow", Name: "continue-on-failed-workflow", Phase: wfv1.NodeFailed, Type: wfv1.NodeTypeDAG, Children: []string{"1"}, OutboundNodes: []string{"3", "5"}}, "1": {ID: "1", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypePod, BoundaryID: "continue-on-failed-workflow", Children: []string{"2", "4"}, Name: "node1"}, "2": {ID: "2", Phase: wfv1.NodeFailed, Type: wfv1.NodeTypePod, BoundaryID: "continue-on-failed-workflow", Children: []string{"3"}, Name: "node2"}, "3": {ID: "3", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypePod, BoundaryID: "continue-on-failed-workflow", Name: "node3"}, @@ -1342,9 +1287,8 @@ func TestFormulateRetryWorkflow(t *testing.T) { wf, podsToDelete, err := FormulateRetryWorkflow(ctx, wf, false, "", nil) require.NoError(t, err) require.Len(t, wf.Status.Nodes, 4) - assert.Equal(t, wfv1.NodeFailed, wf.Status.Nodes["2"].Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes["3"].Phase) - assert.Len(t, podsToDelete, 2) + assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes["1"].Phase) + assert.Len(t, podsToDelete, 1) }) t.Run("Retry continue on failed workflow with restartSuccessful and nodeFieldSelector", func(t *testing.T) { @@ -1356,7 +1300,7 @@ func TestFormulateRetryWorkflow(t *testing.T) { Status: wfv1.WorkflowStatus{ Phase: wfv1.WorkflowFailed, Nodes: map[string]wfv1.NodeStatus{ - "continue-on-failed-workflow-2": {ID: "continue-on-failed-workflow-2", Phase: wfv1.NodeFailed, Type: wfv1.NodeTypeDAG, Children: []string{"1"}, OutboundNodes: []string{"3", "5"}}, + "continue-on-failed-workflow-2": {ID: "continue-on-failed-workflow-2", Name: "continue-on-failed-workflow-2", Phase: wfv1.NodeFailed, Type: wfv1.NodeTypeDAG, Children: []string{"1"}, OutboundNodes: []string{"3", "5"}}, "1": {ID: "1", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypePod, BoundaryID: "continue-on-failed-workflow-2", Children: []string{"2", "4"}, Name: "node1"}, "2": {ID: "2", Phase: wfv1.NodeFailed, Type: wfv1.NodeTypePod, BoundaryID: "continue-on-failed-workflow-2", Children: []string{"3"}, Name: "node2"}, "3": {ID: "3", Phase: wfv1.NodeSucceeded, Type: wfv1.NodeTypePod, BoundaryID: "continue-on-failed-workflow-2", Name: "node3"}, @@ -1371,7 +1315,7 @@ func TestFormulateRetryWorkflow(t *testing.T) { require.Len(t, wf.Status.Nodes, 2) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes["1"].Phase) assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["continue-on-failed-workflow-2"].Phase) - assert.Len(t, podsToDelete, 4) + assert.Len(t, podsToDelete, 3) }) } @@ -1916,24 +1860,23 @@ func TestRetryWorkflowWithNestedDAGsWithSuspendNodes(t *testing.T) { wf, podsToDelete, err := FormulateRetryWorkflow(ctx, wf, true, "name=fail-two-nested-dag-suspend.dag1-step1", nil) require.NoError(t, err) assert.Len(t, wf.Status.Nodes, 1) - assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["fail-two-nested-dag-suspend"].Phase) assert.Len(t, podsToDelete, 6) // Retry top individual suspend node wf = wfv1.MustUnmarshalWorkflow(retryWorkflowWithNestedDAGsWithSuspendNodes) wf, podsToDelete, err = FormulateRetryWorkflow(ctx, wf, true, "name=fail-two-nested-dag-suspend.dag1-step2", nil) require.NoError(t, err) - assert.Len(t, wf.Status.Nodes, 3) + require.Len(t, wf.Status.Nodes, 2) assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["fail-two-nested-dag-suspend"].Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step1").Phase) - assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step2").Phase) assert.Len(t, podsToDelete, 5) // Retry the starting on first DAG in one of the branches wf = wfv1.MustUnmarshalWorkflow(retryWorkflowWithNestedDAGsWithSuspendNodes) wf, podsToDelete, err = FormulateRetryWorkflow(ctx, wf, true, "name=fail-two-nested-dag-suspend.dag1-step3-middle2", nil) require.NoError(t, err) - assert.Len(t, wf.Status.Nodes, 12) + + assert.Len(t, wf.Status.Nodes, 9) assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["fail-two-nested-dag-suspend"].Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step1").Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step2").Phase) @@ -1944,17 +1887,13 @@ func TestRetryWorkflowWithNestedDAGsWithSuspendNodes(t *testing.T) { assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1.dag2-branch1-step2.dag3-step1").Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1.dag2-branch1-step2.dag3-step2").Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1.dag2-branch1-step2.dag3-step3").Phase) - // The nodes in the retrying branch are reset - assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2").Phase) - assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1").Phase) - assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1.dag3-step1").Phase) assert.Len(t, podsToDelete, 3) // Retry the starting on second DAG in one of the branches wf = wfv1.MustUnmarshalWorkflow(retryWorkflowWithNestedDAGsWithSuspendNodes) wf, podsToDelete, err = FormulateRetryWorkflow(ctx, wf, true, "name=fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1", nil) require.NoError(t, err) - assert.Len(t, wf.Status.Nodes, 12) + assert.Len(t, wf.Status.Nodes, 10) assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["fail-two-nested-dag-suspend"].Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step1").Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step2").Phase) @@ -1967,15 +1906,13 @@ func TestRetryWorkflowWithNestedDAGsWithSuspendNodes(t *testing.T) { assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1.dag2-branch1-step2.dag3-step3").Phase) // The nodes in the retrying branch are reset assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2").Phase) - assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1").Phase) - assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1.dag3-step1").Phase) assert.Len(t, podsToDelete, 3) // Retry the first individual node (suspended node) connecting to the second DAG in one of the branches wf = wfv1.MustUnmarshalWorkflow(retryWorkflowWithNestedDAGsWithSuspendNodes) wf, podsToDelete, err = FormulateRetryWorkflow(ctx, wf, true, "name=fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1.dag3-step1", nil) require.NoError(t, err) - assert.Len(t, wf.Status.Nodes, 12) + assert.Len(t, wf.Status.Nodes, 11) assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["fail-two-nested-dag-suspend"].Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step1").Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step2").Phase) @@ -1989,7 +1926,6 @@ func TestRetryWorkflowWithNestedDAGsWithSuspendNodes(t *testing.T) { // The nodes in the retrying branch are reset (parent DAGs are marked as running) assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2").Phase) assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1").Phase) - assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1.dag3-step1").Phase) assert.Len(t, podsToDelete, 3) // Retry the second individual node (pod node) connecting to the second DAG in one of the branches @@ -2041,7 +1977,7 @@ func TestRetryWorkflowWithNestedDAGsWithSuspendNodes(t *testing.T) { wf = wfv1.MustUnmarshalWorkflow(retryWorkflowWithNestedDAGsWithSuspendNodes) wf, podsToDelete, err = FormulateRetryWorkflow(ctx, wf, true, "name=fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step2", nil) require.NoError(t, err) - assert.Len(t, wf.Status.Nodes, 15) + assert.Len(t, wf.Status.Nodes, 14) assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["fail-two-nested-dag-suspend"].Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step1").Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step2").Phase) @@ -2054,34 +1990,20 @@ func TestRetryWorkflowWithNestedDAGsWithSuspendNodes(t *testing.T) { assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1.dag2-branch1-step2.dag3-step3").Phase) // The nodes in the retrying branch are reset (parent DAGs are marked as running) assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1").Phase) + assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1").Phase) // The suspended node remains succeeded assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1.dag3-step1").Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1.dag3-step2").Phase) - assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step2").Phase) assert.Len(t, podsToDelete, 1) // Retry the node that connects the two branches wf = wfv1.MustUnmarshalWorkflow(retryWorkflowWithNestedDAGsWithSuspendNodes) wf, podsToDelete, err = FormulateRetryWorkflow(ctx, wf, true, "name=fail-two-nested-dag-suspend.dag1-step4", nil) require.NoError(t, err) - assert.Len(t, wf.Status.Nodes, 16) + assert.Len(t, wf.Status.Nodes, 15) assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["fail-two-nested-dag-suspend"].Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step1").Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step2").Phase) - // All nodes in two branches remains succeeded - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1.dag2-branch1-step1").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1.dag2-branch1-step2").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1.dag2-branch1-step2.dag3-step1").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1.dag2-branch1-step2.dag3-step2").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1.dag2-branch1-step2.dag3-step3").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1.dag3-step1").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1.dag3-step2").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step2").Phase) - assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step4").Phase) assert.Len(t, podsToDelete, 1) // Retry the last node (failing node) @@ -2092,18 +2014,1900 @@ func TestRetryWorkflowWithNestedDAGsWithSuspendNodes(t *testing.T) { assert.Equal(t, wfv1.NodeRunning, wf.Status.Nodes["fail-two-nested-dag-suspend"].Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step1").Phase) assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step2").Phase) - // All nodes in two branches remains succeeded - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1.dag2-branch1-step1").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1.dag2-branch1-step2").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1.dag2-branch1-step2.dag3-step1").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1.dag2-branch1-step2.dag3-step2").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle1.dag2-branch1-step2.dag3-step3").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1.dag3-step1").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step1.dag3-step2").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step3-middle2.dag2-branch2-step2").Phase) - assert.Equal(t, wfv1.NodeSucceeded, wf.Status.Nodes.FindByName("fail-two-nested-dag-suspend.dag1-step4").Phase) assert.Len(t, podsToDelete, 1) } + +const stepsRetryFormulate = `apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + annotations: + workflows.argoproj.io/pod-name-format: v2 + creationTimestamp: "2024-09-19T02:41:51Z" + generateName: steps- + generation: 29 + labels: + workflows.argoproj.io/completed: "true" + workflows.argoproj.io/phase: Succeeded + name: steps-4k5vn + namespace: argo + resourceVersion: "50080" + uid: 0e7608c7-4555-46a4-8697-3be04eec428b +spec: + activeDeadlineSeconds: 300 + arguments: {} + entrypoint: hello-hello-hello + podSpecPatch: | + terminationGracePeriodSeconds: 3 + templates: + - inputs: {} + metadata: {} + name: hello-hello-hello + outputs: {} + steps: + - - arguments: + parameters: + - name: message + value: hello1 + name: hello1 + template: whalesay + - - arguments: + parameters: + - name: message + value: hello2a + name: hello2a + template: whalesay + - arguments: + parameters: + - name: message + value: hello2b + name: hello2b + template: whalesay + - container: + args: + - '{{inputs.parameters.message}}' + command: + - cowsay + image: docker/whalesay + name: "" + resources: {} + inputs: + parameters: + - name: message + metadata: {} + name: whalesay + outputs: {} +status: + artifactGCStatus: + notSpecified: true + artifactRepositoryRef: + artifactRepository: + archiveLogs: true + s3: + accessKeySecret: + key: accesskey + name: my-minio-cred + bucket: my-bucket + endpoint: minio:9000 + insecure: true + secretKeySecret: + key: secretkey + name: my-minio-cred + configMap: artifact-repositories + key: default-v1 + namespace: argo + conditions: + - status: "False" + type: PodRunning + - status: "True" + type: Completed + finishedAt: "2024-09-19T02:43:44Z" + nodes: + steps-4k5vn: + children: + - steps-4k5vn-899690889 + displayName: steps-4k5vn + finishedAt: "2024-09-19T02:43:44Z" + id: steps-4k5vn + name: steps-4k5vn + outboundNodes: + - steps-4k5vn-2627784879 + - steps-4k5vn-2644562498 + phase: Succeeded + progress: 3/3 + resourcesDuration: + cpu: 1 + memory: 22 + startedAt: "2024-09-19T02:43:30Z" + templateName: hello-hello-hello + templateScope: local/steps-4k5vn + type: Steps + steps-4k5vn-899690889: + boundaryID: steps-4k5vn + children: + - steps-4k5vn-1044844302 + displayName: '[0]' + finishedAt: "2024-09-19T02:42:12Z" + id: steps-4k5vn-899690889 + name: steps-4k5vn[0] + nodeFlag: {} + phase: Succeeded + progress: 3/3 + resourcesDuration: + cpu: 1 + memory: 22 + startedAt: "2024-09-19T02:41:51Z" + templateScope: local/steps-4k5vn + type: StepGroup + steps-4k5vn-1044844302: + boundaryID: steps-4k5vn + children: + - steps-4k5vn-4053927188 + displayName: hello1 + finishedAt: "2024-09-19T02:42:09Z" + hostNodeName: k3d-k3s-default-server-0 + id: steps-4k5vn-1044844302 + inputs: + parameters: + - name: message + value: hello1 + name: steps-4k5vn[0].hello1 + outputs: + artifacts: + - name: main-logs + s3: + key: steps-4k5vn/steps-4k5vn-whalesay-1044844302/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 1 + memory: 11 + startedAt: "2024-09-19T02:41:51Z" + templateName: whalesay + templateScope: local/steps-4k5vn + type: Pod + steps-4k5vn-2627784879: + boundaryID: steps-4k5vn + displayName: hello2a + finishedAt: "2024-09-19T02:43:39Z" + hostNodeName: k3d-k3s-default-server-0 + id: steps-4k5vn-2627784879 + inputs: + parameters: + - name: message + value: hello2a + name: steps-4k5vn[1].hello2a + outputs: + artifacts: + - name: main-logs + s3: + key: steps-4k5vn/steps-4k5vn-whalesay-2627784879/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 5 + startedAt: "2024-09-19T02:43:30Z" + templateName: whalesay + templateScope: local/steps-4k5vn + type: Pod + steps-4k5vn-2644562498: + boundaryID: steps-4k5vn + displayName: hello2b + finishedAt: "2024-09-19T02:43:41Z" + hostNodeName: k3d-k3s-default-server-0 + id: steps-4k5vn-2644562498 + inputs: + parameters: + - name: message + value: hello2b + name: steps-4k5vn[1].hello2b + outputs: + artifacts: + - name: main-logs + s3: + key: steps-4k5vn/steps-4k5vn-whalesay-2644562498/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 6 + startedAt: "2024-09-19T02:43:30Z" + templateName: whalesay + templateScope: local/steps-4k5vn + type: Pod + steps-4k5vn-4053927188: + boundaryID: steps-4k5vn + children: + - steps-4k5vn-2627784879 + - steps-4k5vn-2644562498 + displayName: '[1]' + finishedAt: "2024-09-19T02:43:44Z" + id: steps-4k5vn-4053927188 + name: steps-4k5vn[1] + nodeFlag: {} + phase: Succeeded + progress: 2/2 + resourcesDuration: + cpu: 0 + memory: 11 + startedAt: "2024-09-19T02:43:30Z" + templateScope: local/steps-4k5vn + type: StepGroup + phase: Succeeded + progress: 3/3 + resourcesDuration: + cpu: 1 + memory: 22 + startedAt: "2024-09-19T02:43:30Z" + taskResultsCompletionStatus: + steps-4k5vn-1044844302: true + steps-4k5vn-2627784879: true + steps-4k5vn-2644562498: true + +` + +func TestStepsRetryWorkflow(t *testing.T) { + assert := assert.New(t) + require := require.New(t) + wf := wfv1.MustUnmarshalWorkflow(stepsRetryFormulate) + selectorStr := "id=steps-4k5vn-2627784879" + + running := map[string]bool{ + "steps-4k5vn-4053927188": true, + "steps-4k5vn": true, + } + + deleted := map[string]bool{ + "steps-4k5vn-2627784879": true, + } + + succeeded := make(map[string]bool) + + for _, node := range wf.Status.Nodes { + _, inRunning := running[node.ID] + _, inDeleted := deleted[node.ID] + if !inRunning && !inDeleted { + succeeded[node.ID] = true + } + } + newWf, podsToDelete, err := FormulateRetryWorkflow(context.Background(), wf, true, selectorStr, []string{}) + require.NoError(err) + assert.Len(podsToDelete, 1) + assert.Len(newWf.Status.Nodes, 5) + + for _, node := range newWf.Status.Nodes { + if _, ok := running[node.ID]; ok { + assert.Equal(wfv1.NodeRunning, node.Phase) + } + if _, ok := succeeded[node.ID]; ok { + assert.Equal(wfv1.NodeSucceeded, node.Phase) + } + } + +} + +func TestDagConversion(t *testing.T) { + assert := assert.New(t) + require := require.New(t) + wf := wfv1.MustUnmarshalWorkflow(stepsRetryFormulate) + + nodes, err := newWorkflowsDag(wf) + require.NoError(err) + assert.Len(nodes, len(wf.Status.Nodes)) + + numNilParent := 0 + for _, n := range nodes { + if n.parent == nil { + numNilParent++ + } + } + assert.Equal(1, numNilParent) +} + +const dagDiamondRetry = `apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + annotations: + workflows.argoproj.io/pod-name-format: v2 + creationTimestamp: "2024-10-01T04:27:23Z" + generateName: dag-diamond- + generation: 16 + labels: + workflows.argoproj.io/completed: "true" + workflows.argoproj.io/phase: Succeeded + name: dag-diamond-82q7s + namespace: argo + resourceVersion: "4633" + uid: dd3d2674-43d8-446a-afdf-17ec95afade2 +spec: + activeDeadlineSeconds: 300 + arguments: {} + entrypoint: diamond + podSpecPatch: | + terminationGracePeriodSeconds: 3 + templates: + - dag: + tasks: + - arguments: + parameters: + - name: message + value: A + name: A + template: echo + - arguments: + parameters: + - name: message + value: B + depends: A + name: B + template: echo + - arguments: + parameters: + - name: message + value: C + depends: A + name: C + template: echo + - arguments: + parameters: + - name: message + value: D + depends: B && C + name: D + template: echo + inputs: {} + metadata: {} + name: diamond + outputs: {} + - container: + command: + - echo + - '{{inputs.parameters.message}}' + image: alpine:3.7 + name: "" + resources: {} + inputs: + parameters: + - name: message + metadata: {} + name: echo + outputs: {} +status: + artifactGCStatus: + notSpecified: true + artifactRepositoryRef: + artifactRepository: + archiveLogs: true + s3: + accessKeySecret: + key: accesskey + name: my-minio-cred + bucket: my-bucket + endpoint: minio:9000 + insecure: true + secretKeySecret: + key: secretkey + name: my-minio-cred + configMap: artifact-repositories + key: default-v1 + namespace: argo + conditions: + - status: "False" + type: PodRunning + - status: "True" + type: Completed + finishedAt: "2024-10-01T04:27:42Z" + nodes: + dag-diamond-82q7s: + children: + - dag-diamond-82q7s-1310542453 + displayName: dag-diamond-82q7s + finishedAt: "2024-10-01T04:27:42Z" + id: dag-diamond-82q7s + name: dag-diamond-82q7s + outboundNodes: + - dag-diamond-82q7s-1226654358 + phase: Succeeded + progress: 4/4 + resourcesDuration: + cpu: 0 + memory: 8 + startedAt: "2024-10-01T04:27:23Z" + templateName: diamond + templateScope: local/dag-diamond-82q7s + type: DAG + dag-diamond-82q7s-1226654358: + boundaryID: dag-diamond-82q7s + displayName: D + finishedAt: "2024-10-01T04:27:39Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-diamond-82q7s-1226654358 + inputs: + parameters: + - name: message + value: D + name: dag-diamond-82q7s.D + outputs: + artifacts: + - name: main-logs + s3: + key: dag-diamond-82q7s/dag-diamond-82q7s-echo-1226654358/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-01T04:27:36Z" + templateName: echo + templateScope: local/dag-diamond-82q7s + type: Pod + dag-diamond-82q7s-1260209596: + boundaryID: dag-diamond-82q7s + children: + - dag-diamond-82q7s-1226654358 + displayName: B + finishedAt: "2024-10-01T04:27:33Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-diamond-82q7s-1260209596 + inputs: + parameters: + - name: message + value: B + name: dag-diamond-82q7s.B + outputs: + artifacts: + - name: main-logs + s3: + key: dag-diamond-82q7s/dag-diamond-82q7s-echo-1260209596/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-01T04:27:30Z" + templateName: echo + templateScope: local/dag-diamond-82q7s + type: Pod + dag-diamond-82q7s-1276987215: + boundaryID: dag-diamond-82q7s + children: + - dag-diamond-82q7s-1226654358 + displayName: C + finishedAt: "2024-10-01T04:27:33Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-diamond-82q7s-1276987215 + inputs: + parameters: + - name: message + value: C + name: dag-diamond-82q7s.C + outputs: + artifacts: + - name: main-logs + s3: + key: dag-diamond-82q7s/dag-diamond-82q7s-echo-1276987215/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-01T04:27:30Z" + templateName: echo + templateScope: local/dag-diamond-82q7s + type: Pod + dag-diamond-82q7s-1310542453: + boundaryID: dag-diamond-82q7s + children: + - dag-diamond-82q7s-1260209596 + - dag-diamond-82q7s-1276987215 + displayName: A + finishedAt: "2024-10-01T04:27:27Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-diamond-82q7s-1310542453 + inputs: + parameters: + - name: message + value: A + name: dag-diamond-82q7s.A + outputs: + artifacts: + - name: main-logs + s3: + key: dag-diamond-82q7s/dag-diamond-82q7s-echo-1310542453/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-01T04:27:23Z" + templateName: echo + templateScope: local/dag-diamond-82q7s + type: Pod + phase: Succeeded + progress: 4/4 + resourcesDuration: + cpu: 0 + memory: 8 + startedAt: "2024-10-01T04:27:23Z" + taskResultsCompletionStatus: + dag-diamond-82q7s-1226654358: true + dag-diamond-82q7s-1260209596: true + dag-diamond-82q7s-1276987215: true + dag-diamond-82q7s-1310542453: true + +` + +func TestDAGDiamondRetryWorkflow(t *testing.T) { + assert := assert.New(t) + require := require.New(t) + wf := wfv1.MustUnmarshalWorkflow(dagDiamondRetry) + selectorStr := "id=dag-diamond-82q7s-1260209596" + + running := map[string]bool{ + "dag-diamond-82q7s": true, + } + + deleted := map[string]bool{ + "dag-diamond-82q7s-1226654358": true, + } + + succeeded := make(map[string]bool) + + for _, node := range wf.Status.Nodes { + _, inRunning := running[node.ID] + _, inDeleted := deleted[node.ID] + if !inRunning && !inDeleted { + succeeded[node.ID] = true + } + } + newWf, podsToDelete, err := FormulateRetryWorkflow(context.Background(), wf, true, selectorStr, []string{}) + + require.NoError(err) + assert.Len(podsToDelete, 2) + assert.Len(newWf.Status.Nodes, 3) + + for _, node := range newWf.Status.Nodes { + if _, ok := running[node.ID]; ok { + assert.Equal(wfv1.NodeRunning, node.Phase) + } + if _, ok := succeeded[node.ID]; ok { + assert.Equal(wfv1.NodeSucceeded, node.Phase) + } + } +} + +const onExitWorkflowRetry = `apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + annotations: + workflows.argoproj.io/pod-name-format: v2 + creationTimestamp: "2024-10-02T05:54:00Z" + generateName: work-avoidance- + generation: 25 + labels: + workflows.argoproj.io/completed: "true" + workflows.argoproj.io/phase: Succeeded + workflows.argoproj.io/resubmitted-from-workflow: work-avoidance-xghlj + name: work-avoidance-trkkq + namespace: argo + resourceVersion: "2661" + uid: 0271624e-0096-428a-81da-643dbbd69440 +spec: + activeDeadlineSeconds: 300 + arguments: {} + entrypoint: main + onExit: save-markers + podSpecPatch: | + terminationGracePeriodSeconds: 3 + templates: + - inputs: {} + metadata: {} + name: main + outputs: {} + steps: + - - arguments: {} + name: load-markers + template: load-markers + - - arguments: + parameters: + - name: num + value: '{{item}}' + name: echo + template: echo + withSequence: + count: "3" + - container: + command: + - mkdir + - -p + - /work/markers + image: docker/whalesay:latest + name: "" + resources: {} + volumeMounts: + - mountPath: /work + name: work + inputs: + artifacts: + - name: markers + optional: true + path: /work/markers + s3: + accessKeySecret: + key: accesskey + name: my-minio-cred + bucket: my-bucket + endpoint: minio:9000 + insecure: true + key: work-avoidance-markers + secretKeySecret: + key: secretkey + name: my-minio-cred + metadata: {} + name: load-markers + outputs: {} + - inputs: + parameters: + - name: num + metadata: {} + name: echo + outputs: {} + script: + command: + - bash + - -eux + image: docker/whalesay:latest + name: "" + resources: {} + source: | + marker=/work/markers/$(date +%Y-%m-%d)-echo-{{inputs.parameters.num}} + if [ -e ${marker} ]; then + echo "work already done" + exit 0 + fi + echo "working very hard" + # toss a virtual coin and exit 1 if 1 + if [ $(($(($RANDOM%10))%2)) -eq 1 ]; then + echo "oh no!" + exit 1 + fi + touch ${marker} + volumeMounts: + - mountPath: /work + name: work + - container: + command: + - "true" + image: docker/whalesay:latest + name: "" + resources: {} + volumeMounts: + - mountPath: /work + name: work + inputs: {} + metadata: {} + name: save-markers + outputs: + artifacts: + - name: markers + path: /work/markers + s3: + accessKeySecret: + key: accesskey + name: my-minio-cred + bucket: my-bucket + endpoint: minio:9000 + insecure: true + key: work-avoidance-markers + secretKeySecret: + key: secretkey + name: my-minio-cred + volumeClaimTemplates: + - metadata: + creationTimestamp: null + name: work + spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 10Mi + status: {} +status: + artifactGCStatus: + notSpecified: true + artifactRepositoryRef: + artifactRepository: + archiveLogs: true + s3: + accessKeySecret: + key: accesskey + name: my-minio-cred + bucket: my-bucket + endpoint: minio:9000 + insecure: true + secretKeySecret: + key: secretkey + name: my-minio-cred + configMap: artifact-repositories + key: default-v1 + namespace: argo + conditions: + - status: "False" + type: PodRunning + - status: "True" + type: Completed + finishedAt: "2024-10-02T05:54:41Z" + nodes: + work-avoidance-trkkq: + children: + - work-avoidance-trkkq-88427725 + displayName: work-avoidance-trkkq + finishedAt: "2024-10-02T05:54:30Z" + id: work-avoidance-trkkq + name: work-avoidance-trkkq + outboundNodes: + - work-avoidance-trkkq-4180283560 + - work-avoidance-trkkq-605537244 + - work-avoidance-trkkq-4183398008 + phase: Succeeded + progress: 4/4 + resourcesDuration: + cpu: 1 + memory: 22 + startedAt: "2024-10-02T05:54:00Z" + templateName: main + templateScope: local/work-avoidance-trkkq + type: Steps + work-avoidance-trkkq-21464344: + boundaryID: work-avoidance-trkkq + children: + - work-avoidance-trkkq-4180283560 + - work-avoidance-trkkq-605537244 + - work-avoidance-trkkq-4183398008 + displayName: '[1]' + finishedAt: "2024-10-02T05:54:30Z" + id: work-avoidance-trkkq-21464344 + name: work-avoidance-trkkq[1] + nodeFlag: {} + phase: Succeeded + progress: 3/3 + resourcesDuration: + cpu: 1 + memory: 18 + startedAt: "2024-10-02T05:54:14Z" + templateScope: local/work-avoidance-trkkq + type: StepGroup + work-avoidance-trkkq-88427725: + boundaryID: work-avoidance-trkkq + children: + - work-avoidance-trkkq-3329426915 + displayName: '[0]' + finishedAt: "2024-10-02T05:54:14Z" + id: work-avoidance-trkkq-88427725 + name: work-avoidance-trkkq[0] + nodeFlag: {} + phase: Succeeded + progress: 4/4 + resourcesDuration: + cpu: 1 + memory: 22 + startedAt: "2024-10-02T05:54:00Z" + templateScope: local/work-avoidance-trkkq + type: StepGroup + work-avoidance-trkkq-605537244: + boundaryID: work-avoidance-trkkq + displayName: echo(1:1) + finishedAt: "2024-10-02T05:54:24Z" + hostNodeName: k3d-k3s-default-server-0 + id: work-avoidance-trkkq-605537244 + inputs: + parameters: + - name: num + value: "1" + name: work-avoidance-trkkq[1].echo(1:1) + outputs: + artifacts: + - name: main-logs + s3: + key: work-avoidance-trkkq/work-avoidance-trkkq-echo-605537244/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 6 + startedAt: "2024-10-02T05:54:14Z" + templateName: echo + templateScope: local/work-avoidance-trkkq + type: Pod + work-avoidance-trkkq-1461956272: + displayName: work-avoidance-trkkq.onExit + finishedAt: "2024-10-02T05:54:38Z" + hostNodeName: k3d-k3s-default-server-0 + id: work-avoidance-trkkq-1461956272 + name: work-avoidance-trkkq.onExit + nodeFlag: + hooked: true + outputs: + artifacts: + - name: markers + path: /work/markers + s3: + accessKeySecret: + key: accesskey + name: my-minio-cred + bucket: my-bucket + endpoint: minio:9000 + insecure: true + key: work-avoidance-markers + secretKeySecret: + key: secretkey + name: my-minio-cred + - name: main-logs + s3: + key: work-avoidance-trkkq/work-avoidance-trkkq-save-markers-1461956272/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 5 + startedAt: "2024-10-02T05:54:30Z" + templateName: save-markers + templateScope: local/work-avoidance-trkkq + type: Pod + work-avoidance-trkkq-3329426915: + boundaryID: work-avoidance-trkkq + children: + - work-avoidance-trkkq-21464344 + displayName: load-markers + finishedAt: "2024-10-02T05:54:12Z" + hostNodeName: k3d-k3s-default-server-0 + id: work-avoidance-trkkq-3329426915 + inputs: + artifacts: + - name: markers + optional: true + path: /work/markers + s3: + accessKeySecret: + key: accesskey + name: my-minio-cred + bucket: my-bucket + endpoint: minio:9000 + insecure: true + key: work-avoidance-markers + secretKeySecret: + key: secretkey + name: my-minio-cred + name: work-avoidance-trkkq[0].load-markers + outputs: + artifacts: + - name: main-logs + s3: + key: work-avoidance-trkkq/work-avoidance-trkkq-load-markers-3329426915/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 4 + startedAt: "2024-10-02T05:54:00Z" + templateName: load-markers + templateScope: local/work-avoidance-trkkq + type: Pod + work-avoidance-trkkq-4180283560: + boundaryID: work-avoidance-trkkq + displayName: echo(0:0) + finishedAt: "2024-10-02T05:54:27Z" + hostNodeName: k3d-k3s-default-server-0 + id: work-avoidance-trkkq-4180283560 + inputs: + parameters: + - name: num + value: "0" + name: work-avoidance-trkkq[1].echo(0:0) + outputs: + artifacts: + - name: main-logs + s3: + key: work-avoidance-trkkq/work-avoidance-trkkq-echo-4180283560/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 1 + memory: 8 + startedAt: "2024-10-02T05:54:14Z" + templateName: echo + templateScope: local/work-avoidance-trkkq + type: Pod + work-avoidance-trkkq-4183398008: + boundaryID: work-avoidance-trkkq + displayName: echo(2:2) + finishedAt: "2024-10-02T05:54:21Z" + hostNodeName: k3d-k3s-default-server-0 + id: work-avoidance-trkkq-4183398008 + inputs: + parameters: + - name: num + value: "2" + name: work-avoidance-trkkq[1].echo(2:2) + outputs: + artifacts: + - name: main-logs + s3: + key: work-avoidance-trkkq/work-avoidance-trkkq-echo-4183398008/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 4 + startedAt: "2024-10-02T05:54:14Z" + templateName: echo + templateScope: local/work-avoidance-trkkq + type: Pod + phase: Succeeded + progress: 5/5 + resourcesDuration: + cpu: 1 + memory: 27 + startedAt: "2024-10-02T05:54:00Z" + taskResultsCompletionStatus: + work-avoidance-trkkq-605537244: true + work-avoidance-trkkq-1461956272: true + work-avoidance-trkkq-3329426915: true + work-avoidance-trkkq-4180283560: true + work-avoidance-trkkq-4183398008: true +` + +func TestOnExitWorkflowRetry(t *testing.T) { + assert := assert.New(t) + require := require.New(t) + wf := wfv1.MustUnmarshalWorkflow(onExitWorkflowRetry) + running := map[string]bool{ + "work-avoidance-trkkq-21464344": true, + "work-avoidance-trkkq": true, + } + deleted := map[string]bool{ + "work-avoidance-trkkq-1461956272": true, + "work-avoidance-trkkq-4183398008": true, + } + + succeeded := make(map[string]bool) + + for _, node := range wf.Status.Nodes { + _, inRunning := running[node.ID] + _, inDeleted := deleted[node.ID] + if !inRunning && !inDeleted { + succeeded[node.ID] = true + } + } + + selectorStr := "id=work-avoidance-trkkq-4183398008" + newWf, podsToDelete, err := FormulateRetryWorkflow(context.Background(), wf, true, selectorStr, []string{}) + require.NoError(err) + assert.Len(newWf.Status.Nodes, 6) + assert.Len(podsToDelete, 2) + + for _, node := range newWf.Status.Nodes { + if _, ok := running[node.ID]; ok { + assert.Equal(wfv1.NodeRunning, node.Phase) + } + if _, ok := succeeded[node.ID]; ok { + assert.Equal(wfv1.NodeSucceeded, node.Phase) + } + } + +} + +const onExitWorkflow = ` +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + annotations: + workflows.argoproj.io/pod-name-format: v2 + creationTimestamp: "2024-10-14T09:21:14Z" + generation: 10 + labels: + workflows.argoproj.io/completed: "true" + workflows.argoproj.io/phase: Failed + name: retry-workflow-with-failed-exit-handler + namespace: argo + resourceVersion: "13510" + uid: f72bf6f7-3d8c-4b31-893b-ef03d4718959 +spec: + activeDeadlineSeconds: 300 + arguments: {} + entrypoint: hello + onExit: exit-handler + podSpecPatch: | + terminationGracePeriodSeconds: 3 + templates: + - container: + args: + - echo hello + command: + - sh + - -c + image: alpine:3.18 + name: "" + resources: {} + inputs: {} + metadata: {} + name: hello + outputs: {} + - container: + args: + - exit 1 + command: + - sh + - -c + image: alpine:3.18 + name: "" + resources: {} + inputs: {} + metadata: {} + name: exit-handler + outputs: {} +status: + artifactGCStatus: + notSpecified: true + artifactRepositoryRef: + artifactRepository: + archiveLogs: true + s3: + accessKeySecret: + key: accesskey + name: my-minio-cred + bucket: my-bucket + endpoint: minio:9000 + insecure: true + secretKeySecret: + key: secretkey + name: my-minio-cred + configMap: artifact-repositories + key: default-v1 + namespace: argo + conditions: + - status: "False" + type: PodRunning + - status: "True" + type: Completed + finishedAt: "2024-10-14T09:21:27Z" + message: Error (exit code 1) + nodes: + retry-workflow-with-failed-exit-handler: + displayName: retry-workflow-with-failed-exit-handler + finishedAt: "2024-10-14T09:21:18Z" + hostNodeName: k3d-k3s-default-server-0 + id: retry-workflow-with-failed-exit-handler + name: retry-workflow-with-failed-exit-handler + outputs: + artifacts: + - name: main-logs + s3: + key: retry-workflow-with-failed-exit-handler/retry-workflow-with-failed-exit-handler/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-14T09:21:14Z" + templateName: hello + templateScope: local/retry-workflow-with-failed-exit-handler + type: Pod + retry-workflow-with-failed-exit-handler-512308683: + displayName: retry-workflow-with-failed-exit-handler.onExit + finishedAt: "2024-10-14T09:21:24Z" + hostNodeName: k3d-k3s-default-server-0 + id: retry-workflow-with-failed-exit-handler-512308683 + message: Error (exit code 1) + name: retry-workflow-with-failed-exit-handler.onExit + nodeFlag: + hooked: true + outputs: + artifacts: + - name: main-logs + s3: + key: retry-workflow-with-failed-exit-handler/retry-workflow-with-failed-exit-handler-exit-handler-512308683/main.log + exitCode: "1" + phase: Failed + progress: 0/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-14T09:21:21Z" + templateName: exit-handler + templateScope: local/retry-workflow-with-failed-exit-handler + type: Pod + phase: Failed + progress: 1/2 + resourcesDuration: + cpu: 0 + memory: 4 + startedAt: "2024-10-14T09:21:14Z" + taskResultsCompletionStatus: + retry-workflow-with-failed-exit-handler: true + retry-workflow-with-failed-exit-handler-512308683: true +` + +func TestOnExitWorkflow(t *testing.T) { + require := require.New(t) + assert := assert.New(t) + wf := wfv1.MustUnmarshalWorkflow(onExitWorkflow) + + newWf, podsToDelete, err := FormulateRetryWorkflow(context.Background(), wf, false, "", []string{}) + require.NoError(err) + assert.Len(podsToDelete, 1) + assert.Len(newWf.Status.Nodes, 1) + assert.Equal(wfv1.NodeSucceeded, newWf.Status.Nodes["retry-workflow-with-failed-exit-handler"].Phase) + +} + +const nestedDAG = `apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + annotations: + workflows.argoproj.io/pod-name-format: v2 + creationTimestamp: "2024-10-16T04:12:51Z" + generateName: dag-nested- + generation: 39 + labels: + workflows.argoproj.io/completed: "true" + workflows.argoproj.io/phase: Succeeded + workflows.argoproj.io/resubmitted-from-workflow: dag-nested-52l5t + name: dag-nested-zxlc2 + namespace: argo + resourceVersion: "11348" + uid: 402ed1f0-0dbf-42fd-92b8-b7858ba2979c +spec: + activeDeadlineSeconds: 300 + arguments: {} + entrypoint: diamond + podSpecPatch: | + terminationGracePeriodSeconds: 3 + templates: + - container: + command: + - echo + - '{{inputs.parameters.message}}' + image: alpine:3.7 + name: "" + resources: {} + inputs: + parameters: + - name: message + metadata: {} + name: echo + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: message + value: A + name: A + template: nested-diamond + - arguments: + parameters: + - name: message + value: B + depends: A + name: B + template: nested-diamond + - arguments: + parameters: + - name: message + value: C + depends: A + name: C + template: nested-diamond + - arguments: + parameters: + - name: message + value: D + depends: B && C + name: D + template: nested-diamond + inputs: {} + metadata: {} + name: diamond + outputs: {} + - dag: + tasks: + - arguments: + parameters: + - name: message + value: '{{inputs.parameters.message}}A' + name: A + template: echo + - arguments: + parameters: + - name: message + value: '{{inputs.parameters.message}}B' + depends: A + name: B + template: echo + - arguments: + parameters: + - name: message + value: '{{inputs.parameters.message}}C' + depends: A + name: C + template: echo + - arguments: + parameters: + - name: message + value: '{{inputs.parameters.message}}D' + depends: B && C + name: D + template: echo + inputs: + parameters: + - name: message + metadata: {} + name: nested-diamond + outputs: {} +status: + artifactGCStatus: + notSpecified: true + artifactRepositoryRef: + artifactRepository: + archiveLogs: true + s3: + accessKeySecret: + key: accesskey + name: my-minio-cred + bucket: my-bucket + endpoint: minio:9000 + insecure: true + secretKeySecret: + key: secretkey + name: my-minio-cred + configMap: artifact-repositories + key: default-v1 + namespace: argo + conditions: + - status: "False" + type: PodRunning + - status: "True" + type: Completed + finishedAt: "2024-10-16T04:13:49Z" + nodes: + dag-nested-zxlc2: + children: + - dag-nested-zxlc2-1970677234 + displayName: dag-nested-zxlc2 + finishedAt: "2024-10-16T04:13:49Z" + id: dag-nested-zxlc2 + name: dag-nested-zxlc2 + outboundNodes: + - dag-nested-zxlc2-644277987 + phase: Succeeded + progress: 16/16 + resourcesDuration: + cpu: 0 + memory: 30 + startedAt: "2024-10-16T04:12:51Z" + templateName: diamond + templateScope: local/dag-nested-zxlc2 + type: DAG + dag-nested-zxlc2-644277987: + boundaryID: dag-nested-zxlc2-1920344377 + displayName: D + finishedAt: "2024-10-16T04:13:46Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-644277987 + inputs: + parameters: + - name: message + value: DD + name: dag-nested-zxlc2.D.D + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-644277987/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-16T04:13:43Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + dag-nested-zxlc2-694610844: + boundaryID: dag-nested-zxlc2-1920344377 + children: + - dag-nested-zxlc2-744943701 + - dag-nested-zxlc2-728166082 + displayName: A + finishedAt: "2024-10-16T04:13:33Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-694610844 + inputs: + parameters: + - name: message + value: DA + name: dag-nested-zxlc2.D.A + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-694610844/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 1 + startedAt: "2024-10-16T04:13:30Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + dag-nested-zxlc2-725087280: + boundaryID: dag-nested-zxlc2-1970677234 + children: + - dag-nested-zxlc2-1953899615 + - dag-nested-zxlc2-1937121996 + displayName: D + finishedAt: "2024-10-16T04:13:07Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-725087280 + inputs: + parameters: + - name: message + value: AD + name: dag-nested-zxlc2.A.D + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-725087280/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-16T04:13:03Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + dag-nested-zxlc2-728166082: + boundaryID: dag-nested-zxlc2-1920344377 + children: + - dag-nested-zxlc2-644277987 + displayName: C + finishedAt: "2024-10-16T04:13:40Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-728166082 + inputs: + parameters: + - name: message + value: DC + name: dag-nested-zxlc2.D.C + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-728166082/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-16T04:13:36Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + dag-nested-zxlc2-744943701: + boundaryID: dag-nested-zxlc2-1920344377 + children: + - dag-nested-zxlc2-644277987 + displayName: B + finishedAt: "2024-10-16T04:13:40Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-744943701 + inputs: + parameters: + - name: message + value: DB + name: dag-nested-zxlc2.D.B + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-744943701/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-16T04:13:36Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + dag-nested-zxlc2-808975375: + boundaryID: dag-nested-zxlc2-1970677234 + children: + - dag-nested-zxlc2-825752994 + - dag-nested-zxlc2-842530613 + displayName: A + finishedAt: "2024-10-16T04:12:54Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-808975375 + inputs: + parameters: + - name: message + value: AA + name: dag-nested-zxlc2.A.A + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-808975375/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 1 + startedAt: "2024-10-16T04:12:51Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + dag-nested-zxlc2-825752994: + boundaryID: dag-nested-zxlc2-1970677234 + children: + - dag-nested-zxlc2-725087280 + displayName: B + finishedAt: "2024-10-16T04:13:00Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-825752994 + inputs: + parameters: + - name: message + value: AB + name: dag-nested-zxlc2.A.B + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-825752994/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-16T04:12:57Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + dag-nested-zxlc2-842530613: + boundaryID: dag-nested-zxlc2-1970677234 + children: + - dag-nested-zxlc2-725087280 + displayName: C + finishedAt: "2024-10-16T04:13:00Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-842530613 + inputs: + parameters: + - name: message + value: AC + name: dag-nested-zxlc2.A.C + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-842530613/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-16T04:12:57Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + dag-nested-zxlc2-903321510: + boundaryID: dag-nested-zxlc2-1937121996 + children: + - dag-nested-zxlc2-1920344377 + displayName: D + finishedAt: "2024-10-16T04:13:27Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-903321510 + inputs: + parameters: + - name: message + value: CD + name: dag-nested-zxlc2.C.D + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-903321510/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-16T04:13:23Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + dag-nested-zxlc2-936876748: + boundaryID: dag-nested-zxlc2-1937121996 + children: + - dag-nested-zxlc2-903321510 + displayName: B + finishedAt: "2024-10-16T04:13:20Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-936876748 + inputs: + parameters: + - name: message + value: CB + name: dag-nested-zxlc2.C.B + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-936876748/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-16T04:13:16Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + dag-nested-zxlc2-953654367: + boundaryID: dag-nested-zxlc2-1937121996 + children: + - dag-nested-zxlc2-903321510 + displayName: C + finishedAt: "2024-10-16T04:13:20Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-953654367 + inputs: + parameters: + - name: message + value: CC + name: dag-nested-zxlc2.C.C + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-953654367/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-16T04:13:16Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + dag-nested-zxlc2-987209605: + boundaryID: dag-nested-zxlc2-1937121996 + children: + - dag-nested-zxlc2-936876748 + - dag-nested-zxlc2-953654367 + displayName: A + finishedAt: "2024-10-16T04:13:13Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-987209605 + inputs: + parameters: + - name: message + value: CA + name: dag-nested-zxlc2.C.A + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-987209605/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-16T04:13:10Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + dag-nested-zxlc2-1920344377: + boundaryID: dag-nested-zxlc2 + children: + - dag-nested-zxlc2-694610844 + displayName: D + finishedAt: "2024-10-16T04:13:49Z" + id: dag-nested-zxlc2-1920344377 + inputs: + parameters: + - name: message + value: D + name: dag-nested-zxlc2.D + outboundNodes: + - dag-nested-zxlc2-644277987 + phase: Succeeded + progress: 4/4 + resourcesDuration: + cpu: 0 + memory: 7 + startedAt: "2024-10-16T04:13:30Z" + templateName: nested-diamond + templateScope: local/dag-nested-zxlc2 + type: DAG + dag-nested-zxlc2-1937121996: + boundaryID: dag-nested-zxlc2 + children: + - dag-nested-zxlc2-987209605 + displayName: C + finishedAt: "2024-10-16T04:13:30Z" + id: dag-nested-zxlc2-1937121996 + inputs: + parameters: + - name: message + value: C + name: dag-nested-zxlc2.C + outboundNodes: + - dag-nested-zxlc2-903321510 + phase: Succeeded + progress: 8/8 + resourcesDuration: + cpu: 0 + memory: 15 + startedAt: "2024-10-16T04:13:10Z" + templateName: nested-diamond + templateScope: local/dag-nested-zxlc2 + type: DAG + dag-nested-zxlc2-1953899615: + boundaryID: dag-nested-zxlc2 + children: + - dag-nested-zxlc2-3753141766 + displayName: B + finishedAt: "2024-10-16T04:13:30Z" + id: dag-nested-zxlc2-1953899615 + inputs: + parameters: + - name: message + value: B + name: dag-nested-zxlc2.B + outboundNodes: + - dag-nested-zxlc2-3837029861 + phase: Succeeded + progress: 8/8 + resourcesDuration: + cpu: 0 + memory: 15 + startedAt: "2024-10-16T04:13:10Z" + templateName: nested-diamond + templateScope: local/dag-nested-zxlc2 + type: DAG + dag-nested-zxlc2-1970677234: + boundaryID: dag-nested-zxlc2 + children: + - dag-nested-zxlc2-808975375 + displayName: A + finishedAt: "2024-10-16T04:13:10Z" + id: dag-nested-zxlc2-1970677234 + inputs: + parameters: + - name: message + value: A + name: dag-nested-zxlc2.A + outboundNodes: + - dag-nested-zxlc2-725087280 + phase: Succeeded + progress: 16/16 + resourcesDuration: + cpu: 0 + memory: 30 + startedAt: "2024-10-16T04:12:51Z" + templateName: nested-diamond + templateScope: local/dag-nested-zxlc2 + type: DAG + dag-nested-zxlc2-3719586528: + boundaryID: dag-nested-zxlc2-1953899615 + children: + - dag-nested-zxlc2-3837029861 + displayName: C + finishedAt: "2024-10-16T04:13:20Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-3719586528 + inputs: + parameters: + - name: message + value: BC + name: dag-nested-zxlc2.B.C + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-3719586528/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-16T04:13:16Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + dag-nested-zxlc2-3736364147: + boundaryID: dag-nested-zxlc2-1953899615 + children: + - dag-nested-zxlc2-3837029861 + displayName: B + finishedAt: "2024-10-16T04:13:20Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-3736364147 + inputs: + parameters: + - name: message + value: BB + name: dag-nested-zxlc2.B.B + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-3736364147/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-16T04:13:16Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + dag-nested-zxlc2-3753141766: + boundaryID: dag-nested-zxlc2-1953899615 + children: + - dag-nested-zxlc2-3736364147 + - dag-nested-zxlc2-3719586528 + displayName: A + finishedAt: "2024-10-16T04:13:13Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-3753141766 + inputs: + parameters: + - name: message + value: BA + name: dag-nested-zxlc2.B.A + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-3753141766/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-16T04:13:10Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + dag-nested-zxlc2-3837029861: + boundaryID: dag-nested-zxlc2-1953899615 + children: + - dag-nested-zxlc2-1920344377 + displayName: D + finishedAt: "2024-10-16T04:13:27Z" + hostNodeName: k3d-k3s-default-server-0 + id: dag-nested-zxlc2-3837029861 + inputs: + parameters: + - name: message + value: BD + name: dag-nested-zxlc2.B.D + outputs: + artifacts: + - name: main-logs + s3: + key: dag-nested-zxlc2/dag-nested-zxlc2-echo-3837029861/main.log + exitCode: "0" + phase: Succeeded + progress: 1/1 + resourcesDuration: + cpu: 0 + memory: 2 + startedAt: "2024-10-16T04:13:23Z" + templateName: echo + templateScope: local/dag-nested-zxlc2 + type: Pod + phase: Succeeded + progress: 16/16 + resourcesDuration: + cpu: 0 + memory: 30 + startedAt: "2024-10-16T04:12:51Z" + taskResultsCompletionStatus: + dag-nested-zxlc2-644277987: true + dag-nested-zxlc2-694610844: true + dag-nested-zxlc2-725087280: true + dag-nested-zxlc2-728166082: true + dag-nested-zxlc2-744943701: true + dag-nested-zxlc2-808975375: true + dag-nested-zxlc2-825752994: true + dag-nested-zxlc2-842530613: true + dag-nested-zxlc2-903321510: true + dag-nested-zxlc2-936876748: true + dag-nested-zxlc2-953654367: true + dag-nested-zxlc2-987209605: true + dag-nested-zxlc2-3719586528: true + dag-nested-zxlc2-3736364147: true + dag-nested-zxlc2-3753141766: true + dag-nested-zxlc2-3837029861: true + +` + +func TestNestedDAG(t *testing.T) { + require := require.New(t) + assert := assert.New(t) + wf := wfv1.MustUnmarshalWorkflow(nestedDAG) + + running := map[string]bool{ + "dag-nested-zxlc2-1920344377": true, + "dag-nested-zxlc2-1970677234 ": true, + "dag-nested-zxlc2": true, + } + deleted := map[string]bool{ + "dag-nested-zxlc2-744943701": true, + "dag-nested-zxlc2-644277987": true, + } + + succeeded := map[string]bool{} + + for _, node := range wf.Status.Nodes { + _, inRunning := running[node.ID] + _, inDeleted := deleted[node.ID] + if !inRunning && !inDeleted { + succeeded[node.ID] = true + } + } + + newWf, podsToDelete, err := FormulateRetryWorkflow(context.Background(), wf, true, "id=dag-nested-zxlc2-744943701", []string{}) + require.NoError(err) + assert.Len(podsToDelete, 2) + + for _, node := range newWf.Status.Nodes { + if _, ok := running[node.ID]; ok { + assert.Equal(wfv1.NodeRunning, node.Phase) + } + if _, ok := succeeded[node.ID]; ok { + assert.Equal(wfv1.NodeSucceeded, node.Phase) + } + } + +}