Skip to content

Commit

Permalink
feat: better prompt
Browse files Browse the repository at this point in the history
[skip ci]
  • Loading branch information
adityathebe committed Dec 24, 2024
1 parent 82c28ef commit f9265d4
Show file tree
Hide file tree
Showing 5 changed files with 274 additions and 41 deletions.
36 changes: 31 additions & 5 deletions api/v1/playbook_actions.go
Original file line number Diff line number Diff line change
Expand Up @@ -279,21 +279,47 @@ type AIActionClient struct {
// Supported: anthropic (default), ollama, openai.
Backend api.LLMBackend `json:"backend,omitempty"`

// Example: gpt-4o for openai, claude-3-haiku-20240307 for Anthropic, llama3.1:8b for Ollama ...
// Model name based on the backend chosen.
// Example: gpt-4o for openai, claude-3-5-sonnet-latest for Anthropic, llama3.1:8b for Ollama
Model string `json:"model,omitempty"`

// BaseURL or API url.
// Example: server URL for ollama or custom url for Anthropic if using a proxy, ...
// Example: server URL for ollama or custom url for Anthropic if using a proxy
APIURL string `json:"apiURL,omitempty"`
}

type AIActionContext struct {
Config string `json:"config" yaml:"config" template:"true"`
Changes TimeMetadata `json:"changes,omitempty" yaml:"changes,omitempty"`
Analysis TimeMetadata `json:"analysis,omitempty" yaml:"analysis,omitempty"`
// The config id to operate on.
// If not provided, the playbook's config is used.
Config string `json:"config,omitempty" yaml:"config,omitempty" template:"true"`

// Select changes for the config to provide as an additional context to the AI model.
Changes TimeMetadata `json:"changes,omitempty" yaml:"changes,omitempty"`

// Select analysis for the config to provide as an additional context to the AI model.
Analysis TimeMetadata `json:"analysis,omitempty" yaml:"analysis,omitempty"`

// Select related configs to provide as an additional context to the AI model.
Relationships []AIActionRelationship `json:"relationships,omitempty" yaml:"relationships,omitempty"`
}

func (t AIActionContext) ShouldFetchConfigChanges() bool {
// if changes are being fetched from relationships, we don't have to query
// the changes for just the config alone.

if t.Changes.Since == "" {
return false
}

for _, r := range t.Relationships {
if r.Changes.Since != "" {
return false
}
}

return true
}

type AIAction struct {
AIActionClient `json:",inline" yaml:",inline"`
AIActionContext `json:",inline" yaml:",inline" template:"true"`
Expand Down
20 changes: 16 additions & 4 deletions config/crds/mission-control.flanksource.com_playbooks.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@ spec:
ai:
properties:
analysis:
description: Select analysis for the config to provide as
an additional context to the AI model.
properties:
since:
type: string
Expand Down Expand Up @@ -97,29 +99,37 @@ spec:
apiURL:
description: |-
BaseURL or API url.
Example: server URL for ollama or custom url for Anthropic if using a proxy, ...
Example: server URL for ollama or custom url for Anthropic if using a proxy
type: string
backend:
description: |-
Optionally specify the LLM backend.
Supported: anthropic (default), ollama, openai.
type: string
changes:
description: Select changes for the config to provide as
an additional context to the AI model.
properties:
since:
type: string
required:
- since
type: object
config:
description: |-
The config id to operate on.
If not provided, the playbook's config is used.
type: string
model:
description: 'Example: gpt-4o for openai, claude-3-haiku-20240307
for Anthropic, llama3.1:8b for Ollama ...'
description: |-
Model name based on the backend chosen.
Example: gpt-4o for openai, claude-3-5-sonnet-latest for Anthropic, llama3.1:8b for Ollama
type: string
prompt:
type: string
relationships:
description: Select related configs to provide as an additional
context to the AI model.
items:
properties:
analysis:
Expand All @@ -137,13 +147,15 @@ spec:
- since
type: object
depth:
description: max depth to traverse the relationship.
Defaults to 3
type: integer
direction:
description: use incoming/outgoing/all relationships.
type: string
type: object
type: array
required:
- config
- prompt
type: object
azureDevopsPipeline:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,9 @@ spec:
actions:
- name: query
ai:
backend: openai
model: gpt-4o
apiKey:
valueFrom:
secretKeyRef:
name: openai
key: API_KEY
backend: ollama
model: llama3.1:8b
apiURL: http://localhost:11434
prompt: '{{.params.prompt}}'
changes:
since: 2h
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,17 +19,24 @@ spec:
actions:
- name: query
ai:
backend: ollama
model: llama3.1:8b
apiURL: http://localhost:11434
prompt: "{{.params.prompt}}"
backend: anthropic
model: claude-3-5-sonnet-latest
apiKey:
valueFrom:
secretKeyRef:
name: anthropic
key: API_KEY
prompt: '{{.params.prompt}}'
changes:
since: 2h
since: 2d
analysis:
since: 2h
since: 2d
relationships:
- depth: 3
direction: outgoing
changes:
since: 12h
- depth: 5
direction: incoming

changes:
since: 12h
Loading

0 comments on commit f9265d4

Please sign in to comment.