initial work on message history and data transfer

This commit is contained in:
2026-02-09 20:57:47 +02:00
parent e878b8120b
commit e63fd76d2f
6 changed files with 149 additions and 49 deletions

View File

@@ -1,18 +1,30 @@
mod chatpersistence;
use genai::chat::{ChatMessage, ChatRequest, ChatResponse};
use std::cell::Cell;
use std::sync::atomic::AtomicI64;
use genai::chat::{ChatMessage, ChatRequest};
use genai::Client;
use shared::ai::ai_daemon_server::{AiDaemon, AiDaemonServer};
use shared::ai::{
ChatHistoryRequest, ChatHistoryResponse, ChatRequest as CRequest, ChatResponse as CResponse,
PromptRequest, PromptResponse,
ChatHistoryRequest, ChatHistoryResponse, ChatMessage as CMessage, ChatRequest as CRequest,
ChatResponse as CResponse, PromptRequest, PromptResponse,
};
use tonic::{transport::Server, Request, Response, Status};
use chatpersistence::SqliteChatRepository;
#[derive(Default)]
pub struct DaemonServer {}
pub struct DaemonServer {
message_counter: AtomicI64,
}
impl Default for DaemonServer {
fn default() -> Self {
Self {
message_counter: AtomicI64::new(0),
}
}
}
#[tonic::async_trait]
impl AiDaemon for DaemonServer {
@@ -33,11 +45,26 @@ impl AiDaemon for DaemonServer {
}
async fn chat(&self, request: Request<CRequest>) -> Result<Response<CResponse>, Status> {
let r = request.into_inner();
println!("<<<: {}", r.text());
let response = CResponse {
id: 1,
chat_id: 1,
text: "asdf".to_string(),
messages: vec![
CMessage {
id: self
.message_counter
.fetch_add(1, std::sync::atomic::Ordering::Relaxed),
text: r.text().to_string(),
is_user: true,
},
CMessage {
id: self
.message_counter
.fetch_add(1, std::sync::atomic::Ordering::Relaxed),
text: format!("Pong: {}", r.text()),
is_user: false,
},
],
};
return Ok(Response::new(response));
}

View File

@@ -10,7 +10,7 @@ pub mod chatmessage {
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct MessageHistory {
pub chat_id: i64,
pub chat_id: Option<i64>,
pub history: Vec<Message>,
}
}

View File

@@ -7,13 +7,17 @@ service AiDaemon {
rpc ChatHistory(ChatHistoryRequest) returns (ChatHistoryResponse);
}
message ChatResponse {
message ChatMessage {
int64 id = 1;
int64 chat_id = 2;
string text = 10;
bool is_user = 20;
}
message ChatResponse {
int64 chat_id = 2;
repeated ChatMessage messages = 10;
}
message ChatRequest {
optional int64 chat_id = 1;
optional string text = 10;

View File

@@ -2,13 +2,14 @@
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
use feshared::chatmessage::{Message, MessageHistory};
use shared::ai::{ai_daemon_client::AiDaemonClient, PromptRequest};
use shared::ai::{ai_daemon_client::AiDaemonClient, ChatRequest, PromptRequest};
use tauri::{Emitter, Manager, State};
use tauri_plugin_global_shortcut::{Code, GlobalShortcutExt, Modifiers, Shortcut, ShortcutState};
use tokio::sync::Mutex;
struct AppState {
grpc_client: Mutex<AiDaemonClient<tonic::transport::Channel>>,
current_chat: Mutex<Option<i64>>,
}
#[tauri::command]
@@ -41,6 +42,40 @@ async fn prompt_llm(state: State<'_, AppState>, prompt: String) -> Result<String
}
}
#[tauri::command]
async fn chat(
state: State<'_, AppState>,
prompt: String,
chat_id: Option<i64>,
) -> Result<Vec<Message>, String> {
let mut client = state.grpc_client.lock().await;
let request = tonic::Request::new(ChatRequest {
chat_id: chat_id,
text: Some(prompt),
});
match client.chat(request).await {
Ok(response) => {
let r = response.into_inner();
r.messages.iter().for_each(|m| {
if m.is_user {
println!(">>> {}", m.text)
} else {
println!("<<< {}", m.text)
}
});
Ok(r.messages
.iter()
.map(|msg| Message {
id: msg.id,
text: msg.text.clone(),
is_user: msg.is_user,
})
.collect())
}
Err(e) => Err(format!("gRPC error: {}", e)),
}
}
#[tauri::command]
async fn chat_history(
state: State<'_, AppState>,
@@ -48,14 +83,21 @@ async fn chat_history(
) -> Result<MessageHistory, String> {
let history = MessageHistory {
chat_id: match chat_id {
Some(id) => id,
None => -1,
Some(_) => chat_id,
None => Some(-1),
},
history: vec![Message {
history: vec![
Message {
id: 1,
text: String::from("asd"),
is_user: false,
}],
},
Message {
id: 2,
text: String::from("yeah!!!!"),
is_user: true,
},
],
};
Ok(history)
}
@@ -72,12 +114,14 @@ async fn main() {
tauri::Builder::default()
.manage(AppState {
grpc_client: Mutex::new(client),
current_chat: Mutex::new(None),
})
.plugin(tauri_plugin_global_shortcut::Builder::new().build())
.invoke_handler(tauri::generate_handler![
toggle_popup,
prompt_llm,
chat_history
chat_history,
chat,
])
.setup(|app| {
/* Auto-hide popup when focus is lost

View File

@@ -10,29 +10,42 @@ pub fn Popup() -> impl IntoView {
let prompt_input_ref = NodeRef::<Input>::new();
let (prompt_text, set_prompt_text) = signal(String::new());
let (messages, set_messages) = signal(Vec::<Message>::new());
// Action that calls the promp daemon
let init_history = Action::new_local(|(): &()| async move {
let response = invoke(
"chat_history",
serde_wasm_bindgen::to_value(&serde_json::json!({"chat_id": 1})).unwrap(),
)
.await;
let history: MessageHistory = serde_wasm_bindgen::from_value(response).unwrap();
history
});
Effect::new(move |_| {
init_history.dispatch(());
});
Effect::new(move |_| {
if let Some(mut dat) = init_history.value().get() {
set_messages.update(|m| m.append(&mut dat.history));
}
});
// Action that calls the chat action on the daemon
let prompt_action = Action::new_local(|prompt: &String| {
let prompt = prompt.clone();
async move {
let response = invoke(
"prompt_llm",
"chat",
serde_wasm_bindgen::to_value(&serde_json::json!({"prompt": prompt})).unwrap(),
)
.await;
let result: String = serde_wasm_bindgen::from_value(response).unwrap();
let result: Vec<Message> = serde_wasm_bindgen::from_value(response).unwrap();
result
}
});
// Update the model response div with the prompt result
Effect::new(move |_| {
if let Some(result) = prompt_action.value().get() {
set_messages.update(|previous| {
previous.push(Message {
id: previous.len() as i64,
text: result,
is_user: false,
});
});
if let Some(mut result) = prompt_action.value().get() {
set_messages.update(|m| m.append(&mut result));
}
});
// Clear the propt text-input when the window loses focus (and is hidden)
@@ -60,30 +73,17 @@ pub fn Popup() -> impl IntoView {
}
});
spawn_local(async move {
let response = invoke("chat_history", JsValue::bigint_from_str("1")).await;
let history: MessageHistory = serde_wasm_bindgen::from_value(response).unwrap();
set_messages.set(history.history.clone());
});
view! {
<main class="window-shell rounded-container">
<h3>"AI quick action"</h3>
<input
class="dark-input"
type="text"
node_ref=prompt_input_ref
placeholder="Ask Gordon AI"
placeholder="Prompt..."
autofocus
on:input=move |ev| set_prompt_text.set(event_target_value(&ev))
on:keydown=move |ev| {
if ev.key() == "Enter" {
set_messages.update(|previous| {
previous.push(Message {
id: previous.len() as i64,
text: prompt_text.get(),
is_user: true,
});
});
prompt_action.dispatch(prompt_text.get());
set_prompt_text.update(|s| *s = "".to_string());
}

View File

@@ -2,6 +2,7 @@ body {
background-color: transparent !important;
margin: 0;
color: #f0f0f0;
font-family: "Inter", sans-serif;
}
.window-shell {
@@ -22,21 +23,45 @@ body {
border-radius: 15px;
}
.dark-input {
padding: 12px 20px;
margin: 8px 0;
/* Colors & Background */
background-color: #1e1e1e;
color: #ffffff;
border: 1px solid #333333;
border-radius: 8px; /* Soft rounded corners */
/* Typography */
font-family: "Inter", sans-serif;
font-size: 16px;
/* Smooth Transition */
transition: all 0.3s ease;
outline: none;
}
.response-area {
width: 100%;
height: 300px;
overflow-y: auto;
gap: 10px;
flex-direction: column;
display: flex;
}
.msg {
margin: 1px;
border: solid 1px #808080;
background-color: #303030;
border-radius: 8px;
padding: 8px 16px;
margin: 8px 0;
animation: slideIn 0.3s ease-out;
max-width: 70%;
}
.msg-user {
text-align: end;
align-self: flex-end;
}
@keyframes slideIn {