commit 3dbfa61c0f689a27585b50932334b08122a6df65 Author: jrosh Date: Tue Aug 26 23:15:28 2025 +0200 init diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..869df07 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +/target +Cargo.lock \ No newline at end of file diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..4fb9363 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "ollama-chat" +version = "0.1.0" +edition = "2021" + +[dependencies] +gtk4 = { version = "0.9", features = ["v4_6"] } +tokio = { version = "1.0", features = ["full"] } +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +reqwest = { version = "0.12", features = ["json"] } +pango = "0.20" +regex = "1.0" diff --git a/src/api.rs b/src/api.rs new file mode 100644 index 0000000..19fafeb --- /dev/null +++ b/src/api.rs @@ -0,0 +1,38 @@ +use std::sync::{Arc, Mutex}; +use crate::types::{ChatMessage, ChatRequest, ChatResponse, ModelInfo, ModelsResponse}; + +pub async fn fetch_models(base_url: &str) -> Result, Box> { + let url = format!("{}/api/tags", base_url); + let response = reqwest::get(&url).await?; + let models_response: ModelsResponse = response.json().await?; + Ok(models_response.models) +} + +pub async fn send_chat_request( + base_url: &str, + model: &str, + conversation: &Arc>>, +) -> Result> { + let messages = { + let conversation = conversation.lock().unwrap(); + conversation.iter().cloned().collect::>() + }; + + let request = ChatRequest { + model: model.to_string(), + messages, + stream: false, + }; + + let client = reqwest::Client::new(); + let url = format!("{}/api/chat", base_url); + + let response = client + .post(&url) + .json(&request) + .send() + .await?; + + let chat_response: ChatResponse = response.json().await?; + Ok(chat_response.message.content) +} diff --git a/src/main.rs b/src/main.rs new file mode 100644 index 0000000..994e532 --- /dev/null +++ b/src/main.rs @@ -0,0 +1,18 @@ +use gtk4::prelude::*; +use gtk4::{glib, Application}; + +mod ui; +mod api; +mod types; +mod state; + +use state::AppState; + +const APP_ID: &str = "com.example.ollama-chat"; + +#[tokio::main] +async fn main() -> glib::ExitCode { + let app = Application::builder().application_id(APP_ID).build(); + app.connect_activate(ui::build_ui); + app.run() +} diff --git a/src/state.rs b/src/state.rs new file mode 100644 index 0000000..bc2f647 --- /dev/null +++ b/src/state.rs @@ -0,0 +1,17 @@ +use std::sync::{Arc, Mutex}; +use crate::types::ChatMessage; + +#[derive(Clone)] +pub struct AppState { + pub conversation: Arc>>, + pub ollama_url: String, +} + +impl Default for AppState { + fn default() -> Self { + Self { + conversation: Arc::new(Mutex::new(Vec::new())), + ollama_url: "http://localhost:11434".to_string(), + } + } +} diff --git a/src/types.rs b/src/types.rs new file mode 100644 index 0000000..282bf79 --- /dev/null +++ b/src/types.rs @@ -0,0 +1,31 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ChatMessage { + pub role: String, + pub content: String, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct ChatRequest { + pub model: String, + pub messages: Vec, + pub stream: bool, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct ChatResponse { + pub model: String, + pub message: ChatMessage, + pub done: bool, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct ModelInfo { + pub name: String, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct ModelsResponse { + pub models: Vec, +} diff --git a/src/ui.rs b/src/ui.rs new file mode 100644 index 0000000..8096651 --- /dev/null +++ b/src/ui.rs @@ -0,0 +1,284 @@ +use gtk4::prelude::*; +use gtk4::{glib, Application, ApplicationWindow, Button, ComboBoxText, Label, ScrolledWindow, TextView, TextBuffer, TextTag, TextTagTable, Orientation, PolicyType, WrapMode, Align}; +use gtk4::Box as GtkBox; +use glib::spawn_future_local; + +use crate::api; +use crate::state::AppState; +use crate::types::ChatMessage; + +pub fn build_ui(app: &Application) { + let window = ApplicationWindow::builder() + .application(app) + .title("Ollama Chat") + .default_width(900) + .default_height(700) + .build(); + + // Apply minimal CSS for larger fonts and spacing + let css_provider = gtk4::CssProvider::new(); + css_provider.load_from_data( + r#" + window { + font-size: 16px; + } + + .chat-text { + font-size: 16px; + padding: 24px; + } + + .input-text { + font-size: 16px; + padding: 16px; + } + + button { + font-size: 16px; + padding: 16px 24px; + } + + combobox { + font-size: 16px; + } + "# + ); + + gtk4::style_context_add_provider_for_display( + >k4::prelude::WidgetExt::display(&window), + &css_provider, + gtk4::STYLE_PROVIDER_PRIORITY_APPLICATION, + ); + + // Main container with padding + let main_container = GtkBox::new(Orientation::Vertical, 24); + main_container.set_margin_top(24); + main_container.set_margin_bottom(24); + main_container.set_margin_start(24); + main_container.set_margin_end(24); + + // Chat display area + let chat_scroll = ScrolledWindow::new(); + chat_scroll.set_policy(PolicyType::Never, PolicyType::Automatic); + chat_scroll.set_vexpand(true); + + let (chat_view, chat_buffer) = create_chat_view(); + chat_scroll.set_child(Some(&chat_view)); + + // Input area + let input_container = GtkBox::new(Orientation::Vertical, 16); + + let input_area_container = GtkBox::new(Orientation::Horizontal, 16); + + let input_scroll = ScrolledWindow::new(); + input_scroll.set_policy(PolicyType::Never, PolicyType::Automatic); + input_scroll.set_max_content_height(150); + input_scroll.set_propagate_natural_height(true); + input_scroll.set_hexpand(true); + + let input_view = TextView::new(); + input_view.add_css_class("input-text"); + input_view.set_wrap_mode(WrapMode::WordChar); + input_view.set_accepts_tab(false); + let input_buffer = input_view.buffer(); + input_scroll.set_child(Some(&input_view)); + + let send_button = Button::with_label("Send"); + send_button.set_valign(Align::End); + + input_area_container.append(&input_scroll); + input_area_container.append(&send_button); + + // Bottom controls + let controls_container = GtkBox::new(Orientation::Horizontal, 16); + + let model_label = Label::new(Some("Model:")); + let model_combo = ComboBoxText::new(); + let status_label = Label::new(Some("Ready")); + status_label.set_hexpand(true); + status_label.set_halign(Align::End); + + controls_container.append(&model_label); + controls_container.append(&model_combo); + controls_container.append(&status_label); + + input_container.append(&input_area_container); + input_container.append(&controls_container); + + // Assemble main UI + main_container.append(&chat_scroll); + main_container.append(&input_container); + window.set_child(Some(&main_container)); + + // Initialize app state + let app_state = AppState::default(); + + // Load available models + load_models(model_combo.clone(), status_label.clone(), app_state.clone()); + + // Set up event handlers + setup_send_handler( + send_button.clone(), + input_buffer, + chat_buffer, + model_combo, + status_label, + app_state, + ); + + setup_keyboard_shortcut(input_view, send_button); + + window.present(); +} + +fn create_chat_view() -> (TextView, TextBuffer) { + let chat_view = TextView::new(); + chat_view.set_editable(false); + chat_view.set_cursor_visible(false); + chat_view.set_wrap_mode(WrapMode::WordChar); + chat_view.add_css_class("chat-text"); + + let chat_buffer = TextBuffer::new(None); + chat_view.set_buffer(Some(&chat_buffer)); + + (chat_view, chat_buffer) +} + +fn load_models(combo: ComboBoxText, status_label: Label, app_state: AppState) { + status_label.set_text("Loading models..."); + + let combo_weak = combo.downgrade(); + let status_weak = status_label.downgrade(); + + spawn_future_local(async move { + match api::fetch_models(&app_state.ollama_url).await { + Ok(models) => { + if let (Some(combo), Some(status_label)) = (combo_weak.upgrade(), status_weak.upgrade()) { + combo.remove_all(); + for model in models { + combo.append_text(&model.name); + } + if combo.active().is_none() && combo.model().unwrap().iter_n_children(None) > 0 { + combo.set_active(Some(0)); + } + status_label.set_text("Ready"); + } + } + Err(e) => { + if let Some(status_label) = status_weak.upgrade() { + status_label.set_text(&format!("Error loading models: {}", e)); + } + } + } + }); +} + +fn setup_send_handler( + send_button: Button, + input_buffer: TextBuffer, + chat_buffer: TextBuffer, + model_combo: ComboBoxText, + status_label: Label, + app_state: AppState, +) { + send_button.connect_clicked(move |_| { + let start_iter = input_buffer.start_iter(); + let end_iter = input_buffer.end_iter(); + let text = input_buffer.text(&start_iter, &end_iter, false); + + if text.trim().is_empty() { + return; + } + + let selected_model = model_combo.active_text(); + if selected_model.is_none() { + status_label.set_text("Please select a model first"); + return; + } + + let model = selected_model.unwrap().to_string(); + input_buffer.delete(&mut input_buffer.start_iter(), &mut input_buffer.end_iter()); + + send_message( + text.to_string(), + model, + chat_buffer.clone(), + status_label.clone(), + app_state.clone(), + ); + }); +} + +fn setup_keyboard_shortcut(input_view: TextView, send_button: Button) { + let input_controller = gtk4::EventControllerKey::new(); + input_controller.connect_key_pressed(move |_, key, _, modifier| { + if key == gtk4::gdk::Key::Return && modifier.contains(gtk4::gdk::ModifierType::CONTROL_MASK) { + send_button.emit_clicked(); + glib::Propagation::Stop + } else { + glib::Propagation::Proceed + } + }); + input_view.add_controller(input_controller); +} + +fn send_message( + message: String, + model: String, + chat_buffer: TextBuffer, + status_label: Label, + app_state: AppState, +) { + // Add user message to conversation + { + let mut conversation = app_state.conversation.lock().unwrap(); + conversation.push(ChatMessage { + role: "user".to_string(), + content: message.clone(), + }); + } + + append_to_chat(&chat_buffer, "You", &message); + status_label.set_text("Sending message..."); + + let buffer_weak = chat_buffer.downgrade(); + let status_weak = status_label.downgrade(); + + spawn_future_local(async move { + match api::send_chat_request(&app_state.ollama_url, &model, &app_state.conversation).await { + Ok(response_text) => { + // Add assistant response to conversation + { + let mut conversation = app_state.conversation.lock().unwrap(); + conversation.push(ChatMessage { + role: "assistant".to_string(), + content: response_text.clone(), + }); + } + + if let (Some(chat_buffer), Some(status_label)) = (buffer_weak.upgrade(), status_weak.upgrade()) { + append_to_chat(&chat_buffer, "Assistant", &response_text); + status_label.set_text("Ready"); + } + } + Err(e) => { + if let Some(status_label) = status_weak.upgrade() { + status_label.set_text(&format!("Error: {}", e)); + } + } + } + }); +} + +fn append_to_chat(buffer: &TextBuffer, sender: &str, message: &str) { + let mut end_iter = buffer.end_iter(); + + // Add spacing if buffer is not empty + if buffer.char_count() > 0 { + buffer.insert(&mut end_iter, "\n\n"); + end_iter = buffer.end_iter(); + } + + // Add sender label and message + buffer.insert(&mut end_iter, &format!("{}:\n{}", sender, message)); +} \ No newline at end of file