use global_hotkey::hotkey::{HotKey, Modifiers, Code}; use crate::utilities::config::{load_config, save_config}; // Import save_config use reqwest::Client; use serde_json::json; use log::{info, error, debug, warn}; // Import warn use tauri::{AppHandle, Manager}; use crate::controllers::app_state::AppState; use std::sync::atomic::Ordering; use enigo::{Enigo, Settings, Keyboard}; pub fn parse_hotkey(hotkey_str: &str) -> Option { let parts: Vec<&str> = hotkey_str.split('+').map(|s| s.trim()).collect(); if parts.is_empty() { return None; } let mut mods = Modifiers::empty(); let mut code = Code::KeyA; let mut code_found = false; for part in parts { match part.to_lowercase().as_str() { "ctrl" | "control" => mods |= Modifiers::CONTROL, "shift" => mods |= Modifiers::SHIFT, "alt" => mods |= Modifiers::ALT, "super" | "cmd" | "meta" => mods |= Modifiers::META, key => { if key.len() == 1 { let c = key.chars().next().unwrap(); if c.is_alphabetic() { code = match c.to_ascii_uppercase() { 'A' => Code::KeyA, 'B' => Code::KeyB, 'C' => Code::KeyC, 'D' => Code::KeyD, 'E' => Code::KeyE, 'F' => Code::KeyF, 'G' => Code::KeyG, 'H' => Code::KeyH, 'I' => Code::KeyI, 'J' => Code::KeyJ, 'K' => Code::KeyK, 'L' => Code::KeyL, 'M' => Code::KeyM, 'N' => Code::KeyN, 'O' => Code::KeyO, 'P' => Code::KeyP, 'Q' => Code::KeyQ, 'R' => Code::KeyR, 'S' => Code::KeyS, 'T' => Code::KeyT, 'U' => Code::KeyU, 'V' => Code::KeyV, 'W' => Code::KeyW, 'X' => Code::KeyX, 'Y' => Code::KeyY, 'Z' => Code::KeyZ, _ => continue }; code_found = true; } } } } } if code_found { Some(HotKey::new(Some(mods), code)) } else { None } } pub async fn handle_ai_task(app: AppHandle, task_type: String) { let text; { let state = app.state::(); // Reset stop signal at start of task state.should_stop.store(false, Ordering::Relaxed); let mut clipboard_guard = state.clipboard.lock().unwrap(); if let Some(ref mut cb) = *clipboard_guard { text = match cb.get_text() { Ok(t) => t, Err(e) => { error!("Clipboard Read Error: {}", e); return; } }; } else { error!("Global clipboard not initialized"); return; } } if text.trim().is_empty() { return; } process_ai_task_with_text(app, task_type, text).await; } pub async fn process_ai_task_with_text(app: AppHandle, task_type: String, text: String) { let config = match load_config() { Ok(c) => c, Err(e) => { eprintln!("Config Load Error: {}", e); return; } }; let prompt_template = match task_type.as_str() { "speling" => config.prompts.speling.clone(), "summarize" => config.prompts.summarize.clone(), "translate" => config.prompts.translate.clone(), _ => return, }; let full_prompt = format!("{} {}\n\n{}", config.prompts.general_prompt_modefier, prompt_template, text); let use_stream = config.ollama.stream; let mimic_typing = config.output.mimic_typing; let client = Client::new(); let body = json!({ "model": config.ollama.model, "prompt": full_prompt, "stream": use_stream }); let url = format!("{}/api/generate", config.ollama.url.trim_end_matches('/')); info!("Sending Ollama request to: {} (Stream: {}, Mimic: {})", url, use_stream, mimic_typing); debug!("Request Body: {}", body); let mut res = match client.post(&url).json(&body).send().await { Ok(r) => r, Err(e) => { error!("Ollama Request Failed: {}", e); eprintln!("Ollama Req Error: {}", e); return; } }; let status = res.status(); info!("Ollama Response Status: {}", status); if !status.is_success() { let error_text = res.text().await.unwrap_or_default(); error!("Ollama API Error: Status {}, Body: {}", status, error_text); // Helper to extract cleaner error message from JSON if possible let error_msg = if let Ok(json_err) = serde_json::from_str::(&error_text) { json_err.get("error").and_then(|s| s.as_str()).unwrap_or(&error_text).to_string() } else { error_text.clone() }; // SMART RECOVERY: Check if model is not found, trying to auto-fix if status == reqwest::StatusCode::NOT_FOUND && error_msg.contains("model") && error_msg.contains("not found") { warn!("Model not found. Attempting to discover available models..."); let tags_url = format!("{}/api/tags", config.ollama.url.trim_end_matches('/')); if let Ok(tag_res) = client.get(&tags_url).send().await { if tag_res.status().is_success() { if let Ok(tag_data) = tag_res.json::().await { if let Some(models) = tag_data.get("models").and_then(|m| m.as_array()) { if let Some(first_model) = models.first().and_then(|m| m.get("name")).and_then(|n| n.as_str()) { let new_model = first_model.to_string(); info!("Found available model: {}. Retrying request...", new_model); // Update config let mut new_config = config.clone(); new_config.ollama.model = new_model.clone(); if let Err(e) = save_config(&new_config) { error!("Failed to save new config: {}", e); } // Retry Logic let new_body = json!({ "model": new_model, "prompt": full_prompt, "stream": false }); if let Ok(retry_res) = client.post(&url).json(&new_body).send().await { if retry_res.status().is_success() { // If retry works, process output normally let json_resp: serde_json::Value = retry_res.json().await.unwrap_or(json!({})); if let Some(response_text) = json_resp.get("response").and_then(|v| v.as_str()) { let state = app.state::(); if let Ok(mut clipboard_guard) = state.clipboard.lock() { if let Some(ref mut cb) = *clipboard_guard { let _ = cb.set_text(response_text.to_string()); } } return; } } } } } } } } } let user_notification = format!("⚠️ AI Error: {}\nCheck Config/Models.", error_msg); let state = app.state::(); if let Ok(mut clipboard_guard) = state.clipboard.lock() { if let Some(ref mut cb) = *clipboard_guard { let _ = cb.set_text(user_notification); } } return; } let state = app.state::(); let mut enigo_opt = if mimic_typing { match Enigo::new(&Settings::default()) { Ok(e) => Some(e), Err(e) => { error!("Failed to init Enigo: {}", e); None } } } else { None }; let mut full_buffer = String::new(); if use_stream { let mut buffer = String::new(); while let Ok(Some(chunk)) = res.chunk().await { if state.should_stop.load(Ordering::Relaxed) { info!("Detailed: Generation stopped by user."); break; } let chunk_str = String::from_utf8_lossy(&chunk); buffer.push_str(&chunk_str); while let Some(pos) = buffer.find('\n') { let line = buffer[..pos].to_string(); buffer.drain(..pos+1); if let Ok(json_obj) = serde_json::from_str::(&line) { if let Some(token) = json_obj.get("response").and_then(|s| s.as_str()) { full_buffer.push_str(token); if let Some(ref mut enigo) = enigo_opt { let _ = enigo.text(token); } } if json_obj.get("done").and_then(|b| b.as_bool()).unwrap_or(false) { break; } } } } } else { match res.json::().await { Ok(json_obj) => { if let Some(response_text) = json_obj.get("response").and_then(|v| v.as_str()) { full_buffer = response_text.to_string(); if state.should_stop.load(Ordering::Relaxed) { return; } if let Some(ref mut enigo) = enigo_opt { let _ = enigo.text(&full_buffer); } } } Err(e) => error!("JSON Parse Error: {}", e), } } // Output to Clipboard if NOT mimic typing if !mimic_typing && !full_buffer.is_empty() { if let Ok(mut clipboard_guard) = state.clipboard.lock() { if let Some(ref mut cb) = *clipboard_guard { if let Err(e) = cb.set_text(full_buffer) { error!("Clipboard Write Error: {}", e); } else { info!("Copied {} chars to clipboard.", text.len()); } } } } }