Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: playbook AI Action #1693

Open
wants to merge 12 commits into
base: main
Choose a base branch
from
5 changes: 4 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,10 @@ gen-schemas:
cd hack/generate-schemas && \
go mod edit -module=github.com/flanksource/incident-commander/hack/generate-schemas && \
go mod edit -require=github.com/flanksource/[email protected] && \
go mod edit -replace=github.com/flanksource/incident-commander=../../ && \
go mod edit -replace=github.com/flanksource/incident-commander=../../ && \
if grep -v "^//" ../../go.mod | grep -q "replace.*github.com/flanksource/duty.*=>"; then \
go mod edit -replace=github.com/flanksource/duty=../../../duty; \
fi && \
go mod tidy && \
go run ./main.go

Expand Down
8 changes: 8 additions & 0 deletions api/global.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,3 +26,11 @@ var (
const (
PropertyIncidentsDisabled = "incidents.disable"
)

type LLMBackend string

const (
LLMBackendAnthropic LLMBackend = "anthropic"
LLMBackendOpenAI LLMBackend = "openai"
LLMBackendOllama LLMBackend = "ollama"
)
23 changes: 22 additions & 1 deletion api/v1/connection_types.go
Original file line number Diff line number Diff line change
Expand Up @@ -238,15 +238,36 @@ type ConnectionSMB struct {
Share string `json:"share"`
}

type ConnectionOpenAI struct {
Model *string `json:"model,omitempty"`
BaseURL *types.EnvVar `json:"url,omitempty"`
ApiKey types.EnvVar `json:"apiKey"`
}

type ConnectionOllama struct {
Model *string `json:"model,omitempty"`
BaseURL types.EnvVar `json:"url,omitempty"`
ApiKey types.EnvVar `json:"apiKey,omitempty"`
}

type ConnectionAnthropic struct {
Model *string `json:"model,omitempty"`
BaseURL *types.EnvVar `json:"url,omitempty"`
ApiKey types.EnvVar `json:"apiKey"`
}

// ConnectionSpec defines the desired state of Connection
type ConnectionSpec struct {
Properties types.JSONStringMap `json:"properties,omitempty"`

Anthropic *ConnectionAnthropic `json:"anthropic,omitempty"`
AWS *ConnectionAWS `json:"aws,omitempty"`
S3 *ConnectionAWSS3 `json:"s3,omitempty"`
Azure *ConnectionAzure `json:"azure,omitempty"`
AzureDevops *ConnectionAzureDevops `json:"azureDevops,omitempty"`
GCP *ConnectionGCP `json:"gcp,omitempty"`
Ollama *ConnectionOllama `json:"ollama,omitempty"`
OpenAI *ConnectionOpenAI `json:"openai,omitempty"`
S3 *ConnectionAWSS3 `json:"s3,omitempty"`

Folder *ConnectionFolder `json:"folder,omitempty"`
Git *ConnectionGit `json:"git,omitempty"`
Expand Down
158 changes: 158 additions & 0 deletions api/v1/playbook_actions.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,15 @@ import (
"github.com/flanksource/commons/duration"
"github.com/flanksource/commons/utils"
"github.com/flanksource/duty/connection"
"github.com/flanksource/duty/context"
"github.com/flanksource/duty/models"
"github.com/flanksource/duty/query"
"github.com/flanksource/duty/types"
"github.com/google/uuid"
"github.com/samber/lo"
"k8s.io/client-go/kubernetes"

"github.com/flanksource/incident-commander/api"
)

type NotificationAction struct {
Expand Down Expand Up @@ -235,6 +241,157 @@ func (git GitCheckout) GetCertificate() types.EnvVar {
return utils.Deref(git.Certificate)
}

type TimeMetadata struct {
Since string `json:"since" yaml:"since"`
}

type AIActionRelationship struct {
// max depth to traverse the relationship. Defaults to 3
Depth *int `json:"depth,omitempty"`

// use incoming/outgoing/all relationships.
Direction query.RelationDirection `json:"direction,omitempty"`

Changes TimeMetadata `json:"changes,omitempty"`
Analysis TimeMetadata `json:"analysis,omitempty"`
}

func (t AIActionRelationship) ToRelationshipQuery(configID uuid.UUID) query.RelationQuery {
q := query.RelationQuery{
ID: configID,
MaxDepth: t.Depth,
Relation: t.Direction,
}

if q.MaxDepth == nil {
q.MaxDepth = lo.ToPtr(3)
}

if q.Relation == "" {
q.Relation = query.All
}

return q
}

type AIActionClient struct {
// Connection to setup the llm backend connection
Connection *string `json:"connection,omitempty"`

// API Key
APIKey types.EnvVar `json:"apiKey,omitempty"`

// Optionally specify the LLM backend.
// Supported: anthropic (default), ollama, openai.
Backend api.LLMBackend `json:"backend,omitempty"`

// Model name based on the backend chosen.
// Example: gpt-4o for openai, claude-3-5-sonnet-latest for Anthropic, llama3.1:8b for Ollama
Model string `json:"model,omitempty"`

// BaseURL or API url.
// Example: server URL for ollama or custom url for Anthropic if using a proxy
APIURL string `json:"apiURL,omitempty"`
}

func (t *AIActionClient) Populate(ctx context.Context) error {
if t.Connection != nil {
conn, err := ctx.HydrateConnectionByURL(*t.Connection)
if err != nil {
return err
} else if conn == nil {
return fmt.Errorf("connection(%s) was not found: %w", *t.Connection, err)
}

if err := t.APIKey.Scan(conn.Password); err != nil {
return err
}

t.APIURL = conn.URL

if m, ok := conn.Properties["model"]; ok {
t.Model = m
}

switch conn.Type {
case models.ConnectionTypeOllama:
t.Backend = api.LLMBackendOllama
case models.ConnectionTypeAnthropic:
t.Backend = api.LLMBackendAnthropic
case models.ConnectionTypeOpenAI:
t.Backend = api.LLMBackendOpenAI
default:
return fmt.Errorf("connection of type %q is not supported. Supported types: %s, %s & %s",
conn.Type, models.ConnectionTypeOllama, models.ConnectionTypeAnthropic, models.ConnectionTypeOpenAI,
)
}
}

if !t.APIKey.IsEmpty() {
if v, err := ctx.GetEnvValueFromCache(t.APIKey, ctx.GetNamespace()); err != nil {
return fmt.Errorf("failed to get api key from source ref : %w", err)
} else {
t.APIKey.ValueStatic = v
}
}

return nil
}

type AIActionContext struct {
// The config id to operate on.
// If not provided, the playbook's config is used.
Config string `json:"config,omitempty" yaml:"config,omitempty" template:"true"`

// Select changes for the config to provide as an additional context to the AI model.
Changes TimeMetadata `json:"changes,omitempty" yaml:"changes,omitempty"`

// Select analysis for the config to provide as an additional context to the AI model.
Analysis TimeMetadata `json:"analysis,omitempty" yaml:"analysis,omitempty"`

// Select related configs to provide as an additional context to the AI model.
Relationships []AIActionRelationship `json:"relationships,omitempty" yaml:"relationships,omitempty"`
}

func (t AIActionContext) ShouldFetchConfigChanges() bool {
// if changes are being fetched from relationships, we don't have to query
// the changes for just the config alone.

if t.Changes.Since == "" {
return false
}

for _, r := range t.Relationships {
if r.Changes.Since != "" {
return false
}
}

return true
}

type AIAction struct {
AIActionClient `json:",inline" yaml:",inline"`
AIActionContext `json:",inline" yaml:",inline" template:"true"`

// Use an AI agent that can autonomously drive the diagnosis using tools that interface directly with the database.
// NOTE: Not exposed for now
UseAgent bool `json:"-"`

// system prompt is a way to provide context, instructions, and guidelines to the LLM before presenting it
// with a question or task.
// By using a system prompt, you can set the stage for the conversation, specifying LLM's role, personality,
// tone, or any other relevant information that will help it better understand and respond to the user's input.
SystemPrompt string `json:"systemPrompt"`

// Prompt is the humna prompt
Prompt string `json:"prompt" template:"true"`

// Output format of the prompt.
// Supported: markdown (default), slack.
Formats []string `json:"formats,omitempty"`
}

type ExecAction struct {
// Script can be an inline script or a path to a script that needs to be executed
// On windows executed via powershell and in darwin and linux executed using bash
Expand Down Expand Up @@ -405,6 +562,7 @@ type PlaybookAction struct {
// When left empty, the templating is done on the main instance(host) itself.
TemplatesOn string `json:"templatesOn,omitempty" yaml:"templatesOn,omitempty"`

AI *AIAction `json:"ai,omitempty" yaml:"ai,omitempty" template:"true"`
Exec *ExecAction `json:"exec,omitempty" yaml:"exec,omitempty" template:"true"`
GitOps *GitOpsAction `json:"gitops,omitempty" yaml:"gitops,omitempty" template:"true"`
Github *GithubAction `json:"github,omitempty" yaml:"github,omitempty" template:"true"`
Expand Down
Loading
Loading