Skip to content

Commit

Permalink
Fix formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
devleejb committed Feb 29, 2024
1 parent dbed00c commit decf88c
Show file tree
Hide file tree
Showing 6 changed files with 55 additions and 70 deletions.
2 changes: 1 addition & 1 deletion src/commands/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,4 @@ pub async fn handle_command(command: Commands) -> Result<(), Box<dyn std::error:
Commands::Search(search) => search::handle_search(search).await,
Commands::Set(set) => set::handle_set(set).await,
}
}
}
68 changes: 32 additions & 36 deletions src/commands/search/mod.rs
Original file line number Diff line number Diff line change
@@ -1,37 +1,37 @@
use clap::Parser;
use std::env;
use spinners::{Spinner, Spinners};
use cli_clipboard::{ClipboardContext, ClipboardProvider};
use llm_chain::{
chains::conversation::Chain, executor, parameters, prompt, step::Step,
chains::conversation::Chain,
executor,
options::{ModelRef, OptionsBuilder},
prompt::{Conversation, ChatMessageCollection},
parameters, prompt,
prompt::{ChatMessageCollection, Conversation},
step::Step,
};
use llm_chain_openai::chatgpt::Model;
use cli_clipboard::{ClipboardContext, ClipboardProvider};
use spinners::{Spinner, Spinners};
use std::env;
#[derive(Debug, Parser)]
#[clap(
name = "search",
about="Search a command from the LLM model",
)]
#[clap(name = "search", about = "Search a command from the LLM model")]
pub struct Search {
// The command to search
#[clap(help="The command to search")]
qeury: String,
#[clap(help = "The command to search")]
qeury: String,
}

pub fn few_shot_template(list: Vec<(String, String)>) -> ChatMessageCollection<String> {

let mut ret_prompt = Conversation::new();

for (user, assistant) in &list {
ret_prompt = ret_prompt.with_user(user.to_string()).with_assistant(assistant.to_string());
ret_prompt = ret_prompt
.with_user(user.to_string())
.with_assistant(assistant.to_string());
}

ret_prompt
}

pub async fn handle_search(search: Search) -> Result<(), Box<dyn std::error::Error>> {

if !env::var("OPENAI_API_KEY").is_ok() {
println!("Please set your OpenAI API key using the `set key` command.");
return Ok(());
Expand All @@ -45,18 +45,14 @@ pub async fn handle_search(search: Search) -> Result<(), Box<dyn std::error::Err
option_builder.add_option(llm_chain::options::Opt::Model(model));
let options = option_builder.build();

let exec = executor!(
chatgpt,
options
)?;
let exec = executor!(chatgpt, options)?;

let few_shot_examples: Vec<(String, String)> = vec![
("Show all pods in k8s".to_string(), "kubectl get pods".to_string()),
("Find all files recursively within the current directory that contain 'a' in their filenames.".to_string(), "find . -type f -name '*a*' -print".to_string()),
("Provide the command to build and push a Docker image from the current directory.".to_string(), "docker build -t myapp:latest --path".to_string()),
];



let mut conversation = Conversation::new()
.with_system_template(
"I want you to act as generating a command for request tasks on {{os_name}}. Also please don't explain the commands, just generate the command.",
Expand All @@ -67,30 +63,30 @@ pub async fn handle_search(search: Search) -> Result<(), Box<dyn std::error::Err

conversation.append(few_shot_prompt);

let conversation = conversation
.with_system(
"Only generate the command, don't explain it".to_string()
);
let conversation =
conversation.with_system("Only generate the command, don't explain it".to_string());

let mut chain = Chain::new_with_message_collection(
&conversation
);
let mut chain = Chain::new_with_message_collection(&conversation);

let step = Step::for_prompt_template(
prompt!(
user: "task : {{query}}"
)
);
let step = Step::for_prompt_template(prompt!(
user: "task : {{query}}"
));
let parameters = parameters!().with("query", search.qeury);
let res = chain.send_message(step, &parameters, &exec).await?;
let res = res.to_immediate().await?.as_content().to_chat().to_string();
let res = res.split("Assistant: ").collect::<Vec<&str>>()[1].to_string().trim().to_string();
let res = res.split("Assistant: ").collect::<Vec<&str>>()[1]
.to_string()
.trim()
.to_string();

let mut ctx: ClipboardContext = ClipboardProvider::new().unwrap();
ctx.set_contents(res.clone().to_string()).unwrap();

spinner.stop_and_persist("✔", "Finished searching for the command and copied to your clipboard :)".into());

spinner.stop_and_persist(
"✔",
"Finished searching for the command and copied to your clipboard :)".into(),
);

println!("{}", res);
Ok(())
}
}
13 changes: 4 additions & 9 deletions src/commands/set/key/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,18 +5,14 @@ use std::fs::File;
use std::io::prelude::*;

#[derive(Debug, Parser)]
#[clap(
name = "key",
about="Register for an API key to use the OpenAI API",
)]
#[clap(name = "key", about = "Register for an API key to use the OpenAI API")]
pub struct Key {
// The API key to set
#[clap(help="OpenAI API Key")]
#[clap(help = "OpenAI API Key")]
api_key: String,
}

pub async fn handle_key(key: Key) -> Result<(), Box<dyn std::error::Error>> {

let home_dir = dirs::home_dir().unwrap();
let save_dir = home_dir.join(".cllm");
let config_path = save_dir.join("credentials.json");
Expand All @@ -28,8 +24,7 @@ pub async fn handle_key(key: Key) -> Result<(), Box<dyn std::error::Error>> {
let mut config = if config_path.exists() {
let config = std::fs::read_to_string(config_path.clone())?;
serde_json::from_str(&config)?
}
else {
} else {
serde_json::json!({})
};

Expand All @@ -40,4 +35,4 @@ pub async fn handle_key(key: Key) -> Result<(), Box<dyn std::error::Error>> {

println!("API key set successfully.");
Ok(())
}
}
13 changes: 5 additions & 8 deletions src/commands/set/mod.rs
Original file line number Diff line number Diff line change
@@ -1,24 +1,21 @@
mod key;
use clap::{Subcommand, Parser};
use clap::{Parser, Subcommand};

#[derive(Debug, Subcommand)]
#[clap(
name = "set",
about="Configure application resources",
)]
#[clap(name = "set", about = "Configure application resources")]
pub enum Commands {
// Set the API key
Key(key::Key)
Key(key::Key),
}

#[derive(Debug, Parser)]
pub struct Set {
#[clap(subcommand)]
subcmd: Commands
subcmd: Commands,
}

pub async fn handle_set(set: Set) -> Result<(), Box<dyn std::error::Error>> {
match set.subcmd {
Commands::Key(key) => key::handle_key(key).await,
}
}
}
13 changes: 6 additions & 7 deletions src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,24 +1,23 @@
pub(crate) mod commands;

use std::env;
use clap::Parser;
use commands::{Commands, handle_command};
use commands::{handle_command, Commands};
use dirs;
use std::env;

#[derive(Debug, Parser)]
#[clap(
version,
about="Empower your CLI experience with a command search tool driven by LLM magic!\n\
Github: https://github.com/dev-backpack/cllm\n\
If you have any questions or suggestions, feel free to open an issue on the github repo."
about = "Empower your CLI experience with a command search tool driven by LLM magic!\n\
If you have any questions or suggestions, feel free to open an issue on the github repo.\n\
GitHub: https://github.com/dev-backpack/cllm"
)]
struct Cli {
#[clap(subcommand)]
pub commands: Commands,
}

pub async fn run() -> Result<(), Box<dyn std::error::Error>> {

// Set the OPENAI_API_KEY environment variable
let home_dir = dirs::home_dir().unwrap();
let save_dir = home_dir.join(".cllm");
Expand All @@ -41,4 +40,4 @@ pub async fn run() -> Result<(), Box<dyn std::error::Error>> {
}

Ok(())
}
}
16 changes: 7 additions & 9 deletions src/main.rs
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
pub mod commands;

use std::env;
use clap::Parser;
use commands::{Commands, handle_command};
use commands::{handle_command, Commands};
use dirs;
use std::env;

#[derive(Debug, Parser)]
#[clap(
version,
about="Empower your CLI experience with a command search tool driven by LLM magic!\n\
Github: https://github.com/dev-backpack/cllm\n\
If you have any questions or suggestions, feel free to open an issue on the github repo."
about = "Empower your CLI experience with a command search tool driven by LLM magic!\n\
If you have any questions or suggestions, feel free to open an issue on the github repo.\n\
GitHub: https://github.com/dev-backpack/cllm"
)]
struct Cli {
#[clap(subcommand)]
Expand All @@ -19,7 +19,6 @@ struct Cli {

#[tokio::main]
async fn main() {

// Set the OPENAI_API_KEY environment variable
let home_dir = dirs::home_dir().unwrap();
let save_dir = home_dir.join(".cllm");
Expand All @@ -38,6 +37,5 @@ async fn main() {
// Parse the command line arguments
let cli = Cli::parse();

if let Err(_error) = handle_command(cli.commands).await {
}
}
if let Err(_error) = handle_command(cli.commands).await {}
}

0 comments on commit decf88c

Please sign in to comment.