From 2e700fb86794488173e6c4d2afe02ea3957b3394 Mon Sep 17 00:00:00 2001 From: Brasse Date: Sat, 31 Jan 2026 18:49:59 +0100 Subject: [PATCH] Implement clipboard management, logging, and AI task handling with hotkey support and fixt pritty setings XD --- src-tauri/src/controllers/app_state.rs | 16 +- src-tauri/src/controllers/logs.rs | 35 +++ src-tauri/src/controllers/manual_test.rs | 8 + src-tauri/src/controllers/mod.rs | 3 + src-tauri/src/controllers/ollama.rs | 59 ++++++ src-tauri/src/main.rs | 70 +++++- src-tauri/src/utilities/ai_handler.rs | 259 +++++++++++++++++++++++ src-tauri/src/utilities/config.rs | 80 +++++++ src-tauri/src/utilities/mod.rs | 1 + src-tauri/src/viewers/tray.rs | 58 ++++- src-tauri/tauri.conf.json | 2 +- 11 files changed, 587 insertions(+), 4 deletions(-) create mode 100644 src-tauri/src/controllers/logs.rs create mode 100644 src-tauri/src/controllers/manual_test.rs create mode 100644 src-tauri/src/controllers/ollama.rs create mode 100644 src-tauri/src/utilities/ai_handler.rs diff --git a/src-tauri/src/controllers/app_state.rs b/src-tauri/src/controllers/app_state.rs index 8183624..f46b4e7 100644 --- a/src-tauri/src/controllers/app_state.rs +++ b/src-tauri/src/controllers/app_state.rs @@ -1,15 +1,29 @@ -use std::sync::Mutex; +use std::sync::{Mutex, Arc}; +use std::sync::atomic::AtomicBool; use tauri::{App, Manager, Runtime}; +use arboard::Clipboard; pub struct AppState { #[allow(dead_code)] pub ollama_ready: Mutex, + pub clipboard: Mutex>, + pub should_stop: Arc, } impl AppState { pub fn new() -> Self { + let clipboard = match Clipboard::new() { + Ok(cb) => Some(cb), + Err(e) => { + eprintln!("Failed to initialize global clipboard: {}", e); + None + } + }; + Self { ollama_ready: Mutex::new(false), + clipboard: Mutex::new(clipboard), + should_stop: Arc::new(AtomicBool::new(false)), } } } diff --git a/src-tauri/src/controllers/logs.rs b/src-tauri/src/controllers/logs.rs new file mode 100644 index 0000000..a708a2f --- /dev/null +++ b/src-tauri/src/controllers/logs.rs @@ -0,0 +1,35 @@ +use tauri::command; +use crate::utilities::config::load_config; +use std::fs; +use std::path::Path; + +#[command] +pub fn get_log_content() -> Result { + let config = load_config().map_err(|e| e.to_string())?; + + // Logic duplicated from proper logging setup, ideally should be shared or stored in state + // But for now, we re-resolve the path + let exe_path = std::env::current_exe().map_err(|e| e.to_string())?; + let exe_dir = exe_path.parent().ok_or("Could not find exe directory")?; + + let log_path_config = Path::new(&config.logging.path); + let log_dir = if log_path_config.is_absolute() { + log_path_config.to_path_buf() + } else { + exe_dir.join(log_path_config) + }; + + if !log_dir.exists() { + return Ok("No log directory found.".to_string()); + } + + // Find the latest log file (assuming today's date) + let file_name = chrono::Local::now().format("%Y-%m-%d.log").to_string(); + let log_path = log_dir.join(file_name); + + if log_path.exists() { + fs::read_to_string(log_path).map_err(|e| e.to_string()) + } else { + Ok("No log file for today yet.".to_string()) + } +} diff --git a/src-tauri/src/controllers/manual_test.rs b/src-tauri/src/controllers/manual_test.rs new file mode 100644 index 0000000..8565e08 --- /dev/null +++ b/src-tauri/src/controllers/manual_test.rs @@ -0,0 +1,8 @@ +use tauri::{command, AppHandle}; +use crate::utilities::ai_handler::process_ai_task_with_text; + +#[command] +pub async fn run_manual_test(app: AppHandle, task_type: String, text: String) -> Result<(), String> { + process_ai_task_with_text(app, task_type, text).await; + Ok(()) +} diff --git a/src-tauri/src/controllers/mod.rs b/src-tauri/src/controllers/mod.rs index 77efb6f..88d5c12 100644 --- a/src-tauri/src/controllers/mod.rs +++ b/src-tauri/src/controllers/mod.rs @@ -1,3 +1,6 @@ pub mod app_state; pub mod settings; pub mod greet; +pub mod ollama; +pub mod logs; +pub mod manual_test; diff --git a/src-tauri/src/controllers/ollama.rs b/src-tauri/src/controllers/ollama.rs new file mode 100644 index 0000000..f65ce90 --- /dev/null +++ b/src-tauri/src/controllers/ollama.rs @@ -0,0 +1,59 @@ +use tauri::command; +use reqwest::Client; +use serde::{Deserialize, Serialize}; +use log::{info, error}; + +#[derive(Serialize, Deserialize)] +pub struct OllamaModel { + pub name: String, + // other fields ignored +} + +#[derive(Serialize, Deserialize)] +pub struct OllamaTagsResponse { + pub models: Vec, +} + +#[command] +pub async fn test_ollama_connection(url: String) -> Result { + info!("Testing Ollama connection to {}", url); + let client = Client::new(); + let res = client.get(&url).send().await.map_err(|e| { + error!("Connection test failed: {}", e); + e.to_string() + })?; + + if res.status().is_success() { + let text = res.text().await.map_err(|e| e.to_string())?; + info!("Connection OK"); + Ok(text) + } else { + error!("Connection test failed with status: {}", res.status()); + Err(format!("Status: {}", res.status())) + } +} + +#[command] +pub async fn fetch_ollama_models(url: String) -> Result, String> { + let client = Client::new(); + let tags_url = format!("{}/api/tags", url.trim_end_matches('/')); + info!("Fetching models from {}", tags_url); + + let res = client.get(&tags_url).send().await.map_err(|e| { + error!("Fetch models failed: {}", e); + e.to_string() + })?; + + if res.status().is_success() { + let parsed: OllamaTagsResponse = res.json().await.map_err(|e| { + error!("Fetch models parse error: {}", e); + e.to_string() + })?; + let names = parsed.models.into_iter().map(|m| m.name).collect(); + info!("Fetched models successfully"); + Ok(names) + } else { + error!("Fetch models failed with status: {}", res.status()); + Err(format!("Failed to fetch models: {}", res.status())) + } +} diff --git a/src-tauri/src/main.rs b/src-tauri/src/main.rs index 3ea37d2..9d26f41 100644 --- a/src-tauri/src/main.rs +++ b/src-tauri/src/main.rs @@ -8,10 +8,15 @@ mod viewers; use log::info; use utilities::config::load_config; use utilities::logging::setup_logging; +use utilities::ai_handler::{parse_hotkey, handle_ai_task}; use controllers::app_state::init_state; use controllers::settings::{get_settings, save_settings}; use controllers::greet::greet; +use controllers::ollama::{test_ollama_connection, fetch_ollama_models}; +use controllers::logs::get_log_content; +use controllers::manual_test::run_manual_test; use viewers::tray::setup_tray; +use global_hotkey::{GlobalHotKeyManager, GlobalHotKeyEvent}; #[tokio::main] async fn main() { @@ -34,7 +39,15 @@ async fn main() { info!("Application started"); tauri::Builder::default() - .invoke_handler(tauri::generate_handler![get_settings, save_settings, greet]) + .invoke_handler(tauri::generate_handler![ + get_settings, + save_settings, + greet, + test_ollama_connection, + fetch_ollama_models, + get_log_content, + run_manual_test + ]) .setup(|app| { info!("Setting up application..."); @@ -44,6 +57,61 @@ async fn main() { // 4. Setup Tray setup_tray(app)?; + // 5. Setup Hotkeys + let config = load_config().unwrap_or_default(); + // Initialize manager and leak it to keep it alive for the session + let manager = GlobalHotKeyManager::new().expect("Failed to init GlobalHotKeyManager"); + +use log::info; // Ensure info is imported +use std::sync::atomic::Ordering; +use controllers::app_state::AppState; +use tauri::Manager; + +// ... inside setup ... + let mut key_map = std::collections::HashMap::new(); + + if let Some(k) = parse_hotkey(&config.keybindings.speling) { + if let Ok(_) = manager.register(k) { key_map.insert(k.id(), "speling".to_string()); } + } + if let Some(k) = parse_hotkey(&config.keybindings.summarize) { + if let Ok(_) = manager.register(k) { key_map.insert(k.id(), "summarize".to_string()); } + } + if let Some(k) = parse_hotkey(&config.keybindings.translate) { + if let Ok(_) = manager.register(k) { key_map.insert(k.id(), "translate".to_string()); } + } + if let Some(k) = parse_hotkey(&config.keybindings.stop) { + if let Ok(_) = manager.register(k) { key_map.insert(k.id(), "stop".to_string()); } + } + + // Keep manager alive + Box::leak(Box::new(manager)); + + let app_handle = app.handle().clone(); + + // Spawn Hotkey Listener Thread + std::thread::spawn(move || { + let receiver = GlobalHotKeyEvent::receiver(); + loop { + if let Ok(event) = receiver.recv() { + if event.state == global_hotkey::HotKeyState::Released { + if let Some(action) = key_map.get(&event.id) { + if action == "stop" { + let state = app_handle.state::(); + state.should_stop.store(true, Ordering::Relaxed); + info!("Stop signal received by hotkey."); + } else { + let action_clone = action.clone(); + let app_handle_clone = app_handle.clone(); + tauri::async_runtime::spawn(async move { + handle_ai_task(app_handle_clone, action_clone).await; + }); + } + } + } + } + } + }); + Ok(()) }) .on_window_event(|window, event| { diff --git a/src-tauri/src/utilities/ai_handler.rs b/src-tauri/src/utilities/ai_handler.rs new file mode 100644 index 0000000..4cb39be --- /dev/null +++ b/src-tauri/src/utilities/ai_handler.rs @@ -0,0 +1,259 @@ +use global_hotkey::hotkey::{HotKey, Modifiers, Code}; +use crate::utilities::config::{load_config, save_config}; // Import save_config +use reqwest::Client; +use serde_json::json; +use log::{info, error, debug, warn}; // Import warn +use tauri::{AppHandle, Manager}; +use crate::controllers::app_state::AppState; +use std::sync::atomic::Ordering; +use enigo::{Enigo, Settings, Keyboard}; + +pub fn parse_hotkey(hotkey_str: &str) -> Option { + let parts: Vec<&str> = hotkey_str.split('+').map(|s| s.trim()).collect(); + if parts.is_empty() { return None; } + + let mut mods = Modifiers::empty(); + let mut code = Code::KeyA; + let mut code_found = false; + + for part in parts { + match part.to_lowercase().as_str() { + "ctrl" | "control" => mods |= Modifiers::CONTROL, + "shift" => mods |= Modifiers::SHIFT, + "alt" => mods |= Modifiers::ALT, + "super" | "cmd" | "meta" => mods |= Modifiers::META, + key => { + if key.len() == 1 { + let c = key.chars().next().unwrap(); + if c.is_alphabetic() { + code = match c.to_ascii_uppercase() { + 'A' => Code::KeyA, 'B' => Code::KeyB, 'C' => Code::KeyC, 'D' => Code::KeyD, 'E' => Code::KeyE, + 'F' => Code::KeyF, 'G' => Code::KeyG, 'H' => Code::KeyH, 'I' => Code::KeyI, 'J' => Code::KeyJ, + 'K' => Code::KeyK, 'L' => Code::KeyL, 'M' => Code::KeyM, 'N' => Code::KeyN, 'O' => Code::KeyO, + 'P' => Code::KeyP, 'Q' => Code::KeyQ, 'R' => Code::KeyR, 'S' => Code::KeyS, 'T' => Code::KeyT, + 'U' => Code::KeyU, 'V' => Code::KeyV, 'W' => Code::KeyW, 'X' => Code::KeyX, 'Y' => Code::KeyY, + 'Z' => Code::KeyZ, + _ => continue + }; + code_found = true; + } + } + } + } + } + + if code_found { + Some(HotKey::new(Some(mods), code)) + } else { + None + } +} + +pub async fn handle_ai_task(app: AppHandle, task_type: String) { + let text; + { + let state = app.state::(); + // Reset stop signal at start of task + state.should_stop.store(false, Ordering::Relaxed); + + let mut clipboard_guard = state.clipboard.lock().unwrap(); + + if let Some(ref mut cb) = *clipboard_guard { + text = match cb.get_text() { + Ok(t) => t, + Err(e) => { + error!("Clipboard Read Error: {}", e); + return; + } + }; + } else { + error!("Global clipboard not initialized"); + return; + } + } + + if text.trim().is_empty() { return; } + + process_ai_task_with_text(app, task_type, text).await; +} + +pub async fn process_ai_task_with_text(app: AppHandle, task_type: String, text: String) { + let config = match load_config() { + Ok(c) => c, + Err(e) => { eprintln!("Config Load Error: {}", e); return; } + }; + + let prompt_template = match task_type.as_str() { + "speling" => config.prompts.speling.clone(), + "summarize" => config.prompts.summarize.clone(), + "translate" => config.prompts.translate.clone(), + _ => return, + }; + + let full_prompt = format!("{} {}\n\n{}", config.prompts.general_prompt_modefier, prompt_template, text); + + let use_stream = config.ollama.stream; + let mimic_typing = config.output.mimic_typing; + + let client = Client::new(); + let body = json!({ + "model": config.ollama.model, + "prompt": full_prompt, + "stream": use_stream + }); + + let url = format!("{}/api/generate", config.ollama.url.trim_end_matches('/')); + + info!("Sending Ollama request to: {} (Stream: {}, Mimic: {})", url, use_stream, mimic_typing); + debug!("Request Body: {}", body); + + let mut res = match client.post(&url).json(&body).send().await { + Ok(r) => r, + Err(e) => { + error!("Ollama Request Failed: {}", e); + eprintln!("Ollama Req Error: {}", e); + return; + } + }; + + let status = res.status(); + info!("Ollama Response Status: {}", status); + + if !status.is_success() { + let error_text = res.text().await.unwrap_or_default(); + error!("Ollama API Error: Status {}, Body: {}", status, error_text); + + // Helper to extract cleaner error message from JSON if possible + let error_msg = if let Ok(json_err) = serde_json::from_str::(&error_text) { + json_err.get("error").and_then(|s| s.as_str()).unwrap_or(&error_text).to_string() + } else { + error_text.clone() + }; + + // SMART RECOVERY: Check if model is not found, trying to auto-fix + if status == reqwest::StatusCode::NOT_FOUND && error_msg.contains("model") && error_msg.contains("not found") { + warn!("Model not found. Attempting to discover available models..."); + let tags_url = format!("{}/api/tags", config.ollama.url.trim_end_matches('/')); + if let Ok(tag_res) = client.get(&tags_url).send().await { + if tag_res.status().is_success() { + if let Ok(tag_data) = tag_res.json::().await { + if let Some(models) = tag_data.get("models").and_then(|m| m.as_array()) { + if let Some(first_model) = models.first().and_then(|m| m.get("name")).and_then(|n| n.as_str()) { + let new_model = first_model.to_string(); + info!("Found available model: {}. Retrying request...", new_model); + + // Update config + let mut new_config = config.clone(); + new_config.ollama.model = new_model.clone(); + if let Err(e) = save_config(&new_config) { + error!("Failed to save new config: {}", e); + } + + // Retry Logic + let new_body = json!({ + "model": new_model, + "prompt": full_prompt, + "stream": false + }); + + if let Ok(retry_res) = client.post(&url).json(&new_body).send().await { + if retry_res.status().is_success() { + // If retry works, process output normally + let json_resp: serde_json::Value = retry_res.json().await.unwrap_or(json!({})); + if let Some(response_text) = json_resp.get("response").and_then(|v| v.as_str()) { + let state = app.state::(); + if let Ok(mut clipboard_guard) = state.clipboard.lock() { + if let Some(ref mut cb) = *clipboard_guard { + let _ = cb.set_text(response_text.to_string()); + } + } + return; + } + } + } + } + } + } + } + } + } + + let user_notification = format!("⚠️ AI Error: {}\nCheck Config/Models.", error_msg); + let state = app.state::(); + if let Ok(mut clipboard_guard) = state.clipboard.lock() { + if let Some(ref mut cb) = *clipboard_guard { + let _ = cb.set_text(user_notification); + } + } + return; + } + + let state = app.state::(); + let mut enigo_opt = if mimic_typing { + match Enigo::new(&Settings::default()) { + Ok(e) => Some(e), + Err(e) => { + error!("Failed to init Enigo: {}", e); + None + } + } + } else { None }; + let mut full_buffer = String::new(); + + if use_stream { + let mut buffer = String::new(); + while let Ok(Some(chunk)) = res.chunk().await { + if state.should_stop.load(Ordering::Relaxed) { + info!("Detailed: Generation stopped by user."); + break; + } + + let chunk_str = String::from_utf8_lossy(&chunk); + buffer.push_str(&chunk_str); + + while let Some(pos) = buffer.find('\n') { + let line = buffer[..pos].to_string(); + buffer.drain(..pos+1); + + if let Ok(json_obj) = serde_json::from_str::(&line) { + if let Some(token) = json_obj.get("response").and_then(|s| s.as_str()) { + full_buffer.push_str(token); + if let Some(ref mut enigo) = enigo_opt { + let _ = enigo.text(token); + } + } + if json_obj.get("done").and_then(|b| b.as_bool()).unwrap_or(false) { + break; + } + } + } + } + } else { + match res.json::().await { + Ok(json_obj) => { + if let Some(response_text) = json_obj.get("response").and_then(|v| v.as_str()) { + full_buffer = response_text.to_string(); + if state.should_stop.load(Ordering::Relaxed) { return; } + + if let Some(ref mut enigo) = enigo_opt { + let _ = enigo.text(&full_buffer); + } + } + } + Err(e) => error!("JSON Parse Error: {}", e), + } + } + + // Output to Clipboard if NOT mimic typing + if !mimic_typing && !full_buffer.is_empty() { + if let Ok(mut clipboard_guard) = state.clipboard.lock() { + if let Some(ref mut cb) = *clipboard_guard { + if let Err(e) = cb.set_text(full_buffer) { + error!("Clipboard Write Error: {}", e); + } else { + info!("Copied {} chars to clipboard.", text.len()); + } + } + } + } +} diff --git a/src-tauri/src/utilities/config.rs b/src-tauri/src/utilities/config.rs index 649303c..a931d90 100644 --- a/src-tauri/src/utilities/config.rs +++ b/src-tauri/src/utilities/config.rs @@ -30,12 +30,88 @@ impl Default for GeneralConfig { } } +#[derive(Serialize, Deserialize, Clone)] +pub struct OutputConfig { + pub mimic_typing: bool, +} + +impl Default for OutputConfig { + fn default() -> Self { + Self { + mimic_typing: false, + } + } +} + +#[derive(Serialize, Deserialize, Clone)] +pub struct OllamaConfig { + pub url: String, + pub model: String, + pub stream: bool, +} + +impl Default for OllamaConfig { + fn default() -> Self { + Self { + url: "http://localhost:11434".to_string(), + model: "gemma3".to_string(), + stream: false, + } + } +} + +#[derive(Serialize, Deserialize, Clone)] +pub struct PromptsConfig { + pub general_prompt_modefier: String, + pub speling: String, + pub summarize: String, + pub translate: String, +} + +impl Default for PromptsConfig { + fn default() -> Self { + Self { + general_prompt_modefier: "".to_string(), + speling: "Rätta stavningen och grammatiken i följande text:".to_string(), + summarize: "Summera följande text:".to_string(), + translate: "Översätt följande text till Svenska:".to_string(), + } + } +} + +#[derive(Serialize, Deserialize, Clone)] +pub struct KeybindingsConfig { + pub speling: String, + pub summarize: String, + pub translate: String, + pub stop: String, +} + +impl Default for KeybindingsConfig { + fn default() -> Self { + Self { + summarize: "Ctrl+Shift+S".to_string(), + translate: "Ctrl+Shift+T".to_string(), + speling: "Ctrl+Shift+E".to_string(), + stop: "Ctrl+Shift+Q".to_string(), + } + } +} + #[derive(Serialize, Deserialize, Clone)] pub struct AppConfig { #[serde(default)] pub logging: LogConfig, #[serde(default)] pub general: GeneralConfig, + #[serde(default)] + pub output: OutputConfig, + #[serde(default)] + pub ollama: OllamaConfig, + #[serde(default)] + pub prompts: PromptsConfig, + #[serde(default)] + pub keybindings: KeybindingsConfig, } impl Default for AppConfig { @@ -43,6 +119,10 @@ impl Default for AppConfig { Self { logging: LogConfig::default(), general: GeneralConfig::default(), + output: OutputConfig::default(), + ollama: OllamaConfig::default(), + prompts: PromptsConfig::default(), + keybindings: KeybindingsConfig::default(), } } } diff --git a/src-tauri/src/utilities/mod.rs b/src-tauri/src/utilities/mod.rs index 8b0e66a..1dafb64 100644 --- a/src-tauri/src/utilities/mod.rs +++ b/src-tauri/src/utilities/mod.rs @@ -1,2 +1,3 @@ pub mod config; pub mod logging; +pub mod ai_handler; diff --git a/src-tauri/src/viewers/tray.rs b/src-tauri/src/viewers/tray.rs index 358d4eb..65a2a63 100644 --- a/src-tauri/src/viewers/tray.rs +++ b/src-tauri/src/viewers/tray.rs @@ -46,15 +46,63 @@ fn toggle_settings_window(app: &tauri::AppHandle) { } } +fn toggle_logs_window(app: &tauri::AppHandle) { + match app.get_webview_window("logs") { + Some(window) => { + if let Ok(true) = window.is_visible() { + let _ = window.hide(); + } else { + let _ = window.show(); + let _ = window.set_focus(); + } + } + None => { + let _ = WebviewWindowBuilder::new( + app, + "logs", + WebviewUrl::App("logs.html".into()) + ) + .title("AI Typist Loggar") + .inner_size(900.0, 600.0) + .build(); + } + } +} + +fn toggle_tester_window(app: &tauri::AppHandle) { + match app.get_webview_window("tester") { + Some(window) => { + if let Ok(true) = window.is_visible() { + let _ = window.hide(); + } else { + let _ = window.show(); + let _ = window.set_focus(); + } + } + None => { + let _ = WebviewWindowBuilder::new( + app, + "tester", + WebviewUrl::App("tester.html".into()) + ) + .title("Testa AI") + .inner_size(500.0, 400.0) + .build(); + } + } +} + pub fn setup_tray(app: &mut App) -> Result<(), Box> { // Settings window is now created via tauri.conf.json to ensure correct init context on Wayland // Skapa menyalternativ let quit_i = MenuItem::with_id(app, "quit", "Avsluta", true, None::<&str>)?; let settings_i = MenuItem::with_id(app, "settings", "Inställningar", true, None::<&str>)?; + let logs_i = MenuItem::with_id(app, "logs", "Visa Loggar", true, None::<&str>)?; + let tester_i = MenuItem::with_id(app, "tester", "Testa AI", true, None::<&str>)?; // Skapa menyn - let menu = Menu::with_items(app, &[&settings_i, &quit_i])?; + let menu = Menu::with_items(app, &[&settings_i, &logs_i, &tester_i, &quit_i])?; info!("Tray menu created"); @@ -74,6 +122,14 @@ pub fn setup_tray(app: &mut App) -> Result<(), Box { + info!("User clicked logs from tray"); + toggle_logs_window(app); + } + "tester" => { + info!("User clicked tester from tray"); + toggle_tester_window(app); + } _ => {} } }) diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json index 053cb5a..01f7939 100644 --- a/src-tauri/tauri.conf.json +++ b/src-tauri/tauri.conf.json @@ -13,7 +13,7 @@ { "label": "settings", "title": "AI Typist Inställningar", - "url": "index.html", + "url": "settings.html", "width": 800, "height": 600, "visible": true