Skip to content

Commit e871bb4

Browse files
committed
add cache
1 parent 36c55ca commit e871bb4

File tree

1 file changed

+104
-18
lines changed

1 file changed

+104
-18
lines changed

src/main.rs

+104-18
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
mod shell;
22
mod model;
33

4+
use std::collections::HashMap;
45
use std::io::{self, Write};
56
use std::fs;
67
use std::process::Command as ProcessCommand;
@@ -17,7 +18,6 @@ struct Config {
1718
max_tokens: i32
1819
}
1920

20-
2121
fn main() -> Result<(), Box<dyn std::error::Error>> {
2222
let matches = Command::new("llm-term")
2323
.version("1.0")
@@ -32,6 +32,12 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
3232
.long("config")
3333
.help("Run configuration setup")
3434
.action(clap::ArgAction::SetTrue))
35+
.arg(
36+
Arg::new("disable-cache")
37+
.long("disable-cache")
38+
.help("Disable cache and always query the LLM")
39+
.action(clap::ArgAction::SetTrue),
40+
)
3541
.get_matches();
3642

3743
let config_path = get_default_config_path().expect("Failed to get default config path");
@@ -46,32 +52,46 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
4652

4753
let config = load_or_create_config(&config_path)?;
4854

55+
let cache_path = get_cache_path()?;
56+
let mut cache = load_cache(&cache_path)?;
57+
4958
if let Some(prompt) = matches.get_one::<String>("prompt") {
50-
match &config.model.llm_get_command(&config, prompt.as_str()) {
51-
Ok(Some(command)) => {
52-
println!("{}", &command.cyan().bold());
59+
let disable_cache = matches.get_flag("disable-cache");
60+
61+
if !disable_cache {
62+
if let Some(cached_command) = cache.get(prompt) {
63+
println!("{}", "This command exists in cache".yellow());
64+
println!("{}", cached_command.cyan().bold());
5365
println!("{}", "Do you want to execute this command? (y/n)".yellow());
5466

5567
let mut user_input = String::new();
5668
io::stdin().read_line(&mut user_input)?;
5769

5870
if user_input.trim().to_lowercase() == "y" {
59-
let (shell_cmd, shell_arg) = Shell::detect().to_shell_command_and_command_arg();
60-
61-
match ProcessCommand::new(shell_cmd).arg(shell_arg).arg(&command).output() {
62-
Ok(output) => {
63-
println!("{}", "Command output:".green().bold());
64-
io::stdout().write_all(&output.stdout)?;
65-
io::stderr().write_all(&output.stderr)?;
66-
}
67-
Err(e) => eprintln!("{}", format!("Failed to execute command: {}", e).red()),
68-
}
71+
execute_command(cached_command)?;
6972
} else {
70-
println!("{}", "Command execution cancelled.".yellow());
73+
println!("{}", "Do you want to invalidate the cache? (y/n)".yellow());
74+
user_input.clear();
75+
io::stdin().read_line(&mut user_input)?;
76+
77+
if user_input.trim().to_lowercase() == "y" {
78+
// Invalidate cache
79+
cache.remove(prompt);
80+
save_cache(&cache_path, &cache)?;
81+
// Proceed to get command from LLM
82+
get_command_from_llm(&config, &mut cache, &cache_path, prompt)?;
83+
} else {
84+
println!("{}", "Command execution cancelled.".yellow());
85+
}
7186
}
72-
},
73-
Ok(None) => println!("{}", "No command could be generated.".yellow()),
74-
Err(e) => eprintln!("{}", format!("Error: {}", e).red()),
87+
return Ok(());
88+
} else {
89+
// Not in cache, proceed to get command from LLM
90+
get_command_from_llm(&config, &mut cache, &cache_path, prompt)?;
91+
}
92+
} else {
93+
// Cache is disabled, proceed to get command from LLM
94+
get_command_from_llm(&config, &mut cache, &cache_path, prompt)?;
7595
}
7696
} else {
7797
println!("{}", "Please provide a prompt or use --config to set up the configuration.".yellow());
@@ -129,4 +149,70 @@ fn create_config() -> Result<Config, io::Error> {
129149
model,
130150
max_tokens,
131151
})
152+
}
153+
154+
fn get_cache_path() -> Result<PathBuf, Box<dyn std::error::Error>> {
155+
let exe_path = std::env::current_exe()?;
156+
let exe_dir = exe_path.parent().ok_or("Failed to get executable directory")?;
157+
Ok(exe_dir.join("cache.json"))
158+
}
159+
160+
fn load_cache(path: &PathBuf) -> Result<HashMap<String, String>, Box<dyn std::error::Error>> {
161+
if let Ok(content) = fs::read_to_string(path) {
162+
Ok(serde_json::from_str(&content)?)
163+
} else {
164+
Ok(HashMap::new())
165+
}
166+
}
167+
168+
fn save_cache(path: &PathBuf, cache: &HashMap<String, String>) -> Result<(), Box<dyn std::error::Error>> {
169+
let content = serde_json::to_string_pretty(&cache)?;
170+
fs::write(path, content)?;
171+
Ok(())
172+
}
173+
174+
fn get_command_from_llm(
175+
config: &Config,
176+
cache: &mut HashMap<String, String>,
177+
cache_path: &PathBuf,
178+
prompt: &String,
179+
) -> Result<(), Box<dyn std::error::Error>> {
180+
match &config.model.llm_get_command(config, prompt.as_str()) {
181+
Ok(Some(command)) => {
182+
println!("{}", &command.cyan().bold());
183+
println!("{}", "Do you want to execute this command? (y/n)".yellow());
184+
185+
let mut user_input = String::new();
186+
io::stdin().read_line(&mut user_input)?;
187+
188+
if user_input.trim().to_lowercase() == "y" {
189+
execute_command(&command)?;
190+
} else {
191+
println!("{}", "Command execution cancelled.".yellow());
192+
}
193+
194+
// Save command to cache
195+
cache.insert(prompt.clone(), command.clone());
196+
save_cache(cache_path, cache)?;
197+
},
198+
Ok(None) => println!("{}", "No command could be generated.".yellow()),
199+
Err(e) => eprintln!("{}", format!("Error: {}", e).red()),
200+
}
201+
202+
Ok(())
203+
}
204+
205+
fn execute_command(command: &str) -> Result<(), Box<dyn std::error::Error>> {
206+
let (shell_cmd, shell_arg) = Shell::detect().to_shell_command_and_command_arg();
207+
208+
match ProcessCommand::new(shell_cmd).arg(shell_arg).arg(&command).output() {
209+
Ok(output) => {
210+
println!("{}", "Command output:".green().bold());
211+
io::stdout().write_all(&output.stdout)?;
212+
io::stderr().write_all(&output.stderr)?;
213+
}
214+
Err(e) => eprintln!("{}", format!("Failed to execute command: {}", e).red()),
215+
}
216+
217+
Ok(())
132218
}

0 commit comments

Comments
 (0)