Files
ws-agent/crates/daemon/src/main.rs

59 lines
1.9 KiB
Rust

use genai::chat::{ChatMessage, ChatRequest};
use genai::Client;
use shared::ai::ai_daemon_server::{AiDaemon, AiDaemonServer};
use shared::ai::{PromptRequest, PromptResponse};
use tonic::{transport::Server, Request, Response, Status};
#[derive(Default)]
pub struct DaemonServer {}
#[tonic::async_trait]
impl AiDaemon for DaemonServer {
async fn prompt(
&self,
request: Request<PromptRequest>,
) -> Result<Response<PromptResponse>, Status> {
let remote_a = request.remote_addr();
let prompt_value = request.into_inner().prompt;
println!("Request from {:?}: {:?}", remote_a, prompt_value);
let client = Client::default();
let response = prompt_ollama(&client, "llama3.2", prompt_value.as_str())
.await
.unwrap_or_else(|err| format!("Prompt error: {}", err));
println!("Respone: {}", response);
let reply = PromptResponse { response: response };
Ok(Response::new(reply))
}
}
async fn prompt_ollama(
client: &Client,
model: &str,
prompt: &str,
) -> Result<String, Box<dyn std::error::Error>> {
let chat_req = ChatRequest::new(vec![ChatMessage::user(prompt)]);
let chat_res = client.exec_chat(model, chat_req, None).await?;
let output = chat_res
.first_text()
.unwrap_or("No response content!")
.to_string();
Ok(output)
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let addr_s = "[::1]:50051";
let addr = addr_s.parse().unwrap();
let daemon = DaemonServer::default();
let reflection_service = tonic_reflection::server::Builder::configure()
.register_encoded_file_descriptor_set(shared::ai::FILE_DESCRIPTOR_SET)
.build_v1()?;
println!("Started daemon at {}", addr_s);
Server::builder()
.add_service(AiDaemonServer::new(daemon))
.add_service(reflection_service)
.serve(addr)
.await?;
Ok(())
}