dave: add trial mode

Fixes: #827
Signed-off-by: William Casarin <jb55@jb55.com>
This commit is contained in:
William Casarin
2025-05-01 19:16:41 -07:00
parent 6bbc20471a
commit 514e5748b8
4 changed files with 83 additions and 28 deletions

View File

@@ -2,21 +2,46 @@ use async_openai::config::OpenAIConfig;
#[derive(Debug)]
pub struct ModelConfig {
pub trial: bool,
endpoint: Option<String>,
model: String,
api_key: Option<String>,
}
// short-term trial key for testing
const DAVE_TRIAL: &str = unsafe {
std::str::from_utf8_unchecked(&[
0x73, 0x6b, 0x2d, 0x70, 0x72, 0x6f, 0x6a, 0x2d, 0x54, 0x6b, 0x61, 0x48, 0x46, 0x32, 0x73,
0x72, 0x43, 0x59, 0x73, 0x5a, 0x62, 0x33, 0x6f, 0x6b, 0x43, 0x75, 0x61, 0x78, 0x39, 0x57,
0x76, 0x72, 0x41, 0x46, 0x67, 0x5f, 0x39, 0x58, 0x78, 0x35, 0x65, 0x37, 0x4b, 0x53, 0x36,
0x76, 0x32, 0x32, 0x51, 0x30, 0x67, 0x48, 0x61, 0x58, 0x6b, 0x67, 0x6e, 0x4e, 0x4d, 0x63,
0x7a, 0x69, 0x72, 0x5f, 0x44, 0x57, 0x6e, 0x7a, 0x43, 0x77, 0x52, 0x50, 0x4e, 0x50, 0x39,
0x6b, 0x5a, 0x79, 0x75, 0x57, 0x4c, 0x35, 0x54, 0x33, 0x42, 0x6c, 0x62, 0x6b, 0x46, 0x4a,
0x72, 0x66, 0x49, 0x4b, 0x31, 0x77, 0x4f, 0x67, 0x31, 0x6a, 0x37, 0x54, 0x57, 0x42, 0x5a,
0x67, 0x66, 0x49, 0x75, 0x30, 0x51, 0x48, 0x4e, 0x31, 0x70, 0x6a, 0x72, 0x37, 0x4b, 0x38,
0x55, 0x54, 0x6d, 0x34, 0x50, 0x6f, 0x65, 0x47, 0x39, 0x61, 0x35, 0x79, 0x6c, 0x78, 0x45,
0x4f, 0x6f, 0x74, 0x43, 0x47, 0x42, 0x36, 0x65, 0x7a, 0x59, 0x5a, 0x37, 0x70, 0x54, 0x38,
0x63, 0x44, 0x75, 0x66, 0x75, 0x36, 0x52, 0x4d, 0x6b, 0x6c, 0x2d, 0x44, 0x51, 0x41,
])
};
impl Default for ModelConfig {
fn default() -> Self {
let api_key = std::env::var("DAVE_API_KEY")
.ok()
.or(std::env::var("OPENAI_API_KEY").ok());
// trial mode?
let trial = api_key.is_none();
let api_key = api_key.or(Some(DAVE_TRIAL.to_string()));
ModelConfig {
trial,
endpoint: std::env::var("DAVE_ENDPOINT").ok(),
model: std::env::var("DAVE_MODEL")
.ok()
.unwrap_or("gpt-4o".to_string()),
api_key: std::env::var("DAVE_API_KEY")
.ok()
.or(std::env::var("OPENAI_API_KEY").ok()),
api_key,
}
}
}
@@ -28,6 +53,7 @@ impl ModelConfig {
pub fn ollama() -> Self {
ModelConfig {
trial: false,
endpoint: std::env::var("OLLAMA_HOST").ok().map(|h| h + "/v1"),
model: "hhao/qwen2.5-coder-tools:latest".to_string(),
api_key: None,

View File

@@ -124,6 +124,8 @@ You are an AI agent for the nostr protocol called Dave, created by Damus. nostr
avatar.random_nudge();
}
match res {
DaveApiResponse::Failed(err) => self.chat.push(Message::Error(err)),
DaveApiResponse::Token(token) => match self.chat.last_mut() {
Some(Message::Assistant(msg)) => *msg = msg.clone() + &token,
Some(_) => self.chat.push(Message::Assistant(token)),
@@ -175,7 +177,7 @@ You are an AI agent for the nostr protocol called Dave, created by Damus. nostr
}
fn ui(&mut self, app_ctx: &mut AppContext, ui: &mut egui::Ui) -> DaveResponse {
DaveUi::new(&self.chat, &mut self.input).ui(app_ctx, ui)
DaveUi::new(self.model_config.trial, &self.chat, &mut self.input).ui(app_ctx, ui)
}
fn handle_new_chat(&mut self) {
@@ -195,7 +197,7 @@ You are an AI agent for the nostr protocol called Dave, created by Damus. nostr
let txn = Transaction::new(app_ctx.ndb).expect("txn");
self.chat
.iter()
.map(|c| c.to_api_msg(&txn, app_ctx.ndb))
.filter_map(|c| c.to_api_msg(&txn, app_ctx.ndb))
.collect()
};
tracing::debug!("sending messages, latest: {:?}", messages.last().unwrap());
@@ -242,6 +244,7 @@ You are an AI agent for the nostr protocol called Dave, created by Damus. nostr
Ok(token) => token,
Err(err) => {
tracing::error!("failed to get token: {err}");
let _ = tx.send(DaveApiResponse::Failed(err.to_string()));
return;
}
};

View File

@@ -5,6 +5,7 @@ use nostrdb::{Ndb, Transaction};
#[derive(Debug, Clone)]
pub enum Message {
System(String),
Error(String),
User(String),
Assistant(String),
ToolCalls(Vec<ToolCall>),
@@ -16,6 +17,7 @@ pub enum Message {
pub enum DaveApiResponse {
ToolCalls(Vec<ToolCall>),
Token(String),
Failed(String),
}
impl Message {
@@ -23,45 +25,49 @@ impl Message {
Self::ToolResponse(ToolResponse::error(id, msg))
}
pub fn to_api_msg(&self, txn: &Transaction, ndb: &Ndb) -> ChatCompletionRequestMessage {
pub fn to_api_msg(&self, txn: &Transaction, ndb: &Ndb) -> Option<ChatCompletionRequestMessage> {
match self {
Message::User(msg) => {
ChatCompletionRequestMessage::User(ChatCompletionRequestUserMessage {
Message::Error(_err) => None,
Message::User(msg) => Some(ChatCompletionRequestMessage::User(
ChatCompletionRequestUserMessage {
name: None,
content: ChatCompletionRequestUserMessageContent::Text(msg.clone()),
})
}
},
)),
Message::Assistant(msg) => {
ChatCompletionRequestMessage::Assistant(ChatCompletionRequestAssistantMessage {
Message::Assistant(msg) => Some(ChatCompletionRequestMessage::Assistant(
ChatCompletionRequestAssistantMessage {
content: Some(ChatCompletionRequestAssistantMessageContent::Text(
msg.clone(),
)),
..Default::default()
})
}
},
)),
Message::System(msg) => {
ChatCompletionRequestMessage::System(ChatCompletionRequestSystemMessage {
Message::System(msg) => Some(ChatCompletionRequestMessage::System(
ChatCompletionRequestSystemMessage {
content: ChatCompletionRequestSystemMessageContent::Text(msg.clone()),
..Default::default()
})
}
},
)),
Message::ToolCalls(calls) => {
ChatCompletionRequestMessage::Assistant(ChatCompletionRequestAssistantMessage {
Message::ToolCalls(calls) => Some(ChatCompletionRequestMessage::Assistant(
ChatCompletionRequestAssistantMessage {
tool_calls: Some(calls.iter().map(|c| c.to_api()).collect()),
..Default::default()
})
}
},
)),
Message::ToolResponse(resp) => {
let tool_response = resp.responses().format_for_dave(txn, ndb);
ChatCompletionRequestMessage::Tool(ChatCompletionRequestToolMessage {
tool_call_id: resp.id().to_owned(),
content: ChatCompletionRequestToolMessageContent::Text(tool_response),
})
Some(ChatCompletionRequestMessage::Tool(
ChatCompletionRequestToolMessage {
tool_call_id: resp.id().to_owned(),
content: ChatCompletionRequestToolMessageContent::Text(tool_response),
},
))
}
}
}

View File

@@ -10,6 +10,7 @@ use notedeck_ui::{icons::search_icon, NoteOptions, ProfilePic};
/// DaveUi holds all of the data it needs to render itself
pub struct DaveUi<'a> {
chat: &'a [Message],
trial: bool,
input: &'a mut String,
}
@@ -59,8 +60,8 @@ pub enum DaveAction {
}
impl<'a> DaveUi<'a> {
pub fn new(chat: &'a [Message], input: &'a mut String) -> Self {
DaveUi { chat, input }
pub fn new(trial: bool, chat: &'a [Message], input: &'a mut String) -> Self {
DaveUi { trial, chat, input }
}
fn chat_margin(ctx: &egui::Context) -> i8 {
@@ -141,11 +142,30 @@ impl<'a> DaveUi<'a> {
.or(DaveResponse { action })
}
fn error_chat(&self, err: &str, ui: &mut egui::Ui) {
if self.trial {
ui.add(egui::Label::new(
egui::RichText::new(
"The Dave Nostr AI assistant trial has ended :(. Thanks for testing! Zap-enabled Dave coming soon!",
)
.weak(),
));
} else {
ui.add(egui::Label::new(
egui::RichText::new(format!("An error occured: {err}")).weak(),
));
}
}
/// Render a chat message (user, assistant, tool call/response, etc)
fn render_chat(&self, ctx: &mut AppContext, ui: &mut egui::Ui) -> Option<NoteAction> {
let mut action: Option<NoteAction> = None;
for message in self.chat {
let r = match message {
Message::Error(err) => {
self.error_chat(err, ui);
None
}
Message::User(msg) => {
self.user_chat(msg, ui);
None