Compare commits

...

2 commits

Author SHA1 Message Date
Michael Zhang 2d424d763f move to ui 2024-06-18 14:16:38 -05:00
Michael Zhang b08f584ab0 rip out cozo 2024-06-17 15:08:03 -04:00
74 changed files with 1244 additions and 1300 deletions

1216
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -7,12 +7,12 @@ edition = "2021"
backoff = { version = "0.4.0", features = ["tokio"] } backoff = { version = "0.4.0", features = ["tokio"] }
bimap = "0.6.3" bimap = "0.6.3"
chrono = { version = "0.4.38", features = ["serde"] } chrono = { version = "0.4.38", features = ["serde"] }
cozo = { version = "0.7.6", features = ["storage-rocksdb"] }
futures = "0.3.30" futures = "0.3.30"
itertools = "0.13.0" itertools = "0.13.0"
miette = { version = "5.5.0", features = ["fancy", "backtrace"] } miette = { version = "5.5.0", features = ["fancy", "backtrace"] }
serde = { version = "1.0.203", features = ["derive"] } serde = { version = "1.0.203", features = ["derive"] }
serde_json = "1.0.117" serde_json = "1.0.117"
sqlx = { version = "0.7.4", features = ["runtime-tokio", "tls-rustls", "macros", "sqlite", "uuid", "chrono", "regexp"] }
sugars = "3.0.1" sugars = "3.0.1"
tantivy = { version = "0.22.0", features = ["zstd"] } tantivy = { version = "0.22.0", features = ["zstd"] }
tokio = { version = "1.38.0", features = ["full"] } tokio = { version = "1.38.0", features = ["full"] }

View file

@ -0,0 +1,3 @@
fn main() {
println!("cargo:rerun-if-changed=migrations");
}

View file

@ -0,0 +1,6 @@
CREATE TABLE "node" (
id TEXT PRIMARY KEY,
type TEXT,
updated_at DATETIME DEFAULT NOW(),
extra_data JSON
);

View file

@ -8,7 +8,8 @@ extern crate sugars;
pub mod migrations; pub mod migrations;
pub mod state; pub mod state;
pub mod mail; // pub mod mail;
pub mod messaging;
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;

View file

@ -1,160 +1,291 @@
use std::{collections::HashMap, time::Duration}; use std::{
collections::{HashMap, HashSet},
time::Duration,
};
use async_imap::Session;
use backoff::{exponential::ExponentialBackoff, SystemClock}; use backoff::{exponential::ExponentialBackoff, SystemClock};
use cozo::{DataValue, JsonData, ScriptMutability};
use futures::TryStreamExt; use futures::TryStreamExt;
use miette::{IntoDiagnostic, Result}; use itertools::Itertools;
use miette::{Context, IntoDiagnostic, Result};
use tokio::{net::TcpStream, time::sleep}; use tokio::{net::TcpStream, time::sleep};
use uuid::Uuid; use uuid::Uuid;
use crate::AppState; use crate::{mail, AppState};
pub async fn mail_loop(state: AppState) -> Result<()> { pub struct MailWorker {
backoff::future::retry( state: AppState,
ExponentialBackoff::<SystemClock>::default(),
|| async {
mail_loop_inner(&state).await?;
// For now, just sleep 30 seconds and then fetch again
// TODO: Run a bunch of connections at once and do IDLE over them (if possible)
sleep(Duration::from_secs(30)).await;
Ok(())
},
)
.await?;
Ok(())
} }
async fn mail_loop_inner(state: &AppState) -> Result<()> { impl MailWorker {
// Fetch the mail configs pub fn new(state: AppState) -> MailWorker {
let configs = state.fetch_mail_configs()?; MailWorker { state }
if configs.len() == 0 {
return Ok(());
} }
// TODO: Do all configs instead of just the first pub async fn mail_loop(self) -> Result<()> {
let config = &configs[0]; loop {
let mut policy = ExponentialBackoff::<SystemClock>::default();
policy.current_interval = Duration::from_secs(5);
policy.initial_interval = Duration::from_secs(5);
let stream = backoff::future::retry(policy, || async {
TcpStream::connect((config.imap_hostname.as_str(), config.imap_port)) match self.mail_loop_inner().await {
Ok(_) => {}
Err(err) => {
eprintln!("Mail error: {:?}", err);
Err(err)?;
}
}
// For now, just sleep 30 seconds and then fetch again
// TODO: Run a bunch of connections at once and do IDLE over them (if possible)
sleep(Duration::from_secs(30)).await;
Ok(())
})
.await?;
}
Ok(())
}
async fn mail_loop_inner(&self) -> Result<()> {
// Fetch the mail configs
let configs = self.state.fetch_mail_configs()?;
if configs.is_empty() {
return Ok(());
}
// TODO: Do all configs instead of just the first
let config = &configs[0];
let stream =
TcpStream::connect((config.imap_hostname.as_str(), config.imap_port))
.await
.into_diagnostic()?;
let client = async_imap::Client::new(stream);
let mut session = client
.login(&config.imap_username, &config.imap_password)
.await
.map_err(|(err, _)| err)
.into_diagnostic()?;
let all_mailbox_ids = self
.fetch_and_store_all_mailboxes(config.node_id.to_string(), &mut session)
.await
.context("Could not fetch mailboxes")?;
self
.fetch_all_mail_from_single_mailbox(
&mut session,
&all_mailbox_ids,
config.node_id.to_string(),
"INBOX",
)
.await
.context("Could not fetch mail from INBOX")?;
session.logout().await.into_diagnostic()?;
Ok(())
}
async fn fetch_and_store_all_mailboxes(
&self,
config_node_id: String,
session: &mut Session<TcpStream>,
) -> Result<HashMap<String, String>> {
// println!("Session: {:?}", session);
let mailboxes = session
.list(None, Some("*"))
.await
.into_diagnostic()?
.try_collect::<Vec<_>>()
.await .await
.into_diagnostic()?; .into_diagnostic()?;
let client = async_imap::Client::new(stream); let mut all_mailboxes = HashMap::new();
let mut session = client
.login(&config.imap_username, &config.imap_password)
.await
.map_err(|(err, _)| err)
.into_diagnostic()?;
// println!("Session: {:?}", session); // TODO: Make this more efficient by using bulk in query
let mailboxes = session
.list(None, Some("*"))
.await
.into_diagnostic()?
.try_collect::<Vec<_>>()
.await
.into_diagnostic()?;
let mailbox_names =
mailboxes.iter().map(|name| name.name()).collect::<Vec<_>>();
println!("mailboxes: {mailbox_names:?}");
// Get the mailbox with INBOX for mailbox in mailboxes {
let inbox_node_id = { let tx = self.state.db.multi_transaction(true);
let result = state.db.run_script("
?[node_id] :=
*mailbox{node_id, account_node_id, mailbox_name},
account_node_id = $account_node_id,
mailbox_name = 'INBOX'
", btmap! {"account_node_id".to_owned()=>DataValue::from(config.node_id.to_string())}, ScriptMutability::Immutable)?;
if result.rows.len() == 0 { let result = tx.run_script(
let new_node_id = Uuid::now_v7(); "
let new_node_id = new_node_id.to_string(); ?[node_id] :=
state.db.run_script(" *mailbox{node_id, account_node_id, mailbox_name},
?[node_id, account_node_id, mailbox_name] <- account_node_id = $account_node_id,
[[$new_node_id, $account_node_id, 'INBOX']] mailbox_name = $mailbox_name,
:put mailbox { node_id, account_node_id, mailbox_name } ",
", btmap! {
btmap! { "account_node_id".to_owned()=>DataValue::from(config_node_id.clone()),
"new_node_id".to_owned() => DataValue::from(new_node_id.clone()), "mailbox_name".to_owned()=>DataValue::from(mailbox.name().to_string()),
"account_node_id".to_owned() => DataValue::from(config.node_id.to_string()), },
}, )?;
ScriptMutability::Mutable)?;
new_node_id let node_id = if result.rows.len() == 0 {
} else { let new_node_id = Uuid::now_v7();
result.rows[0][0].get_str().unwrap().to_owned() let new_node_id = new_node_id.to_string();
let extra_data = json!({
"name": mailbox.name(),
});
tx.run_script("
?[node_id, account_node_id, mailbox_name, extra_data] <-
[[$new_node_id, $account_node_id, $mailbox_name, $extra_data]]
:put mailbox { node_id, account_node_id, mailbox_name, extra_data }
",
btmap! {
"new_node_id".to_owned() => DataValue::from(new_node_id.clone()),
"account_node_id".to_owned() => DataValue::from(config_node_id.clone()),
"mailbox_name".to_owned()=>DataValue::from(mailbox.name().to_string()),
"extra_data".to_owned()=>DataValue::Json(JsonData(extra_data)),
},
)?;
new_node_id
} else {
result.rows[0][0].get_str().unwrap().to_owned()
};
tx.commit()?;
all_mailboxes.insert(mailbox.name().to_owned(), node_id);
} }
};
println!("INBOX: {:?}", inbox_node_id);
let inbox = session.select("INBOX").await.into_diagnostic()?; // println!("All mailboxes: {:?}", all_mailboxes);
println!("last unseen: {:?}", inbox.unseen);
let messages = session Ok(all_mailboxes)
.fetch( }
"1:4",
"(FLAGS ENVELOPE BODY[HEADER] BODY[TEXT] INTERNALDATE)",
)
.await
.into_diagnostic()?
.try_collect::<Vec<_>>()
.await
.into_diagnostic()?;
println!(
"messages {:?}",
messages.iter().map(|f| f.body()).collect::<Vec<_>>()
);
let input_data = DataValue::List( async fn fetch_all_mail_from_single_mailbox(
messages &self,
session: &mut Session<TcpStream>,
all_mailbox_ids: &HashMap<String, String>,
config_node_id: String,
mailbox_name: impl AsRef<str>,
) -> Result<()> {
let mailbox_name = mailbox_name.as_ref();
let mailbox = session.select(mailbox_name).await.into_diagnostic()?;
let mailbox_node_id = all_mailbox_ids.get(mailbox_name).unwrap();
let extra_data = json!({
"uid_validity": mailbox.uid_validity,
"last_seen": mailbox.unseen,
});
// TODO: Validate uid validity here
let all_uids = session
.uid_search("ALL")
.await
.into_diagnostic()
.context("Could not fetch all UIDs")?;
println!("All UIDs ({}): {:?}", all_uids.len(), all_uids);
let messages = session
.uid_fetch(
all_uids.iter().join(","),
"(FLAGS ENVELOPE BODY[HEADER] BODY[TEXT] INTERNALDATE)",
)
.await
.into_diagnostic()?
.try_collect::<Vec<_>>()
.await
.into_diagnostic()
.context("Could not fetch messages")?;
println!(
"messages {:?}",
messages.iter().map(|f| f.body()).collect::<Vec<_>>()
);
let mut unique_message_ids = HashSet::new();
let data: Vec<_> = messages
.iter() .iter()
.map(|msg| { .map(|msg| {
let message_id = Uuid::now_v7(); let message_node_id = Uuid::now_v7();
let headers = let headers =
String::from_utf8(msg.header().unwrap().to_vec()).unwrap(); String::from_utf8(msg.header().unwrap().to_vec()).unwrap();
let headers = headers let headers = headers
.split("\r\n") .split("\r\n")
.filter_map(|s| { .filter_map(|s| {
// This is really bad lmao
let p = s.split(": ").collect::<Vec<_>>(); let p = s.split(": ").collect::<Vec<_>>();
if p.len() < 2 { if p.len() < 2 {
None None
} else { } else {
Some((p[0], p[1])) Some((p[0], p[1..].join(": ")))
} }
}) })
.collect::<HashMap<_, _>>(); .collect::<HashMap<_, _>>();
let message_id = headers
.get("Message-ID")
.map(|s| (*s).to_owned())
.unwrap_or(message_node_id.to_string());
unique_message_ids.insert(message_id.clone());
DataValue::List(vec![ DataValue::List(vec![
DataValue::from(message_id.to_string()), DataValue::from(message_node_id.to_string()),
DataValue::from(config.node_id.to_string()), DataValue::from(config_node_id.to_string()),
DataValue::from(inbox_node_id.clone()), DataValue::from(mailbox_node_id.clone()),
DataValue::from( DataValue::from(
headers headers
.get("Subject") .get("Subject")
.map(|s| (*s).to_owned()) .map(|s| (*s).to_owned())
.unwrap_or("Subject".to_owned()), .unwrap_or("Subject".to_owned()),
), ),
DataValue::Json(JsonData(serde_json::to_value(headers).unwrap())), DataValue::Json(JsonData(serde_json::to_value(&headers).unwrap())),
DataValue::Bytes(msg.text().unwrap().to_vec()), DataValue::Bytes(msg.text().unwrap().to_vec()),
DataValue::from(msg.internal_date().unwrap().to_rfc3339()), DataValue::from(msg.internal_date().unwrap().to_rfc3339()),
DataValue::from(message_id),
]) ])
}) })
.collect(), .collect();
);
state.db.run_script( println!("Adding {} messages to database...", data.len());
let input_data = DataValue::List(data);
// TODO: Can this be one query?
let tx = self.state.db.multi_transaction(true);
let unique_message_ids_data_value = DataValue::List(
unique_message_ids
.into_iter()
.map(|s| DataValue::from(s))
.collect_vec(),
);
let existing_ids = tx.run_script(
" "
?[node_id, account_node_id, mailbox_node_id, subject, headers, body, internal_date] <- $input_data ?[node_id] := *message { node_id, message_id },
:put message { node_id, account_node_id, mailbox_node_id, subject, headers, body, internal_date } is_in(message_id, $message_ids)
", ",
btmap! { btmap! { "message_ids".to_owned() => unique_message_ids_data_value },
"input_data".to_owned() => input_data,
},
ScriptMutability::Mutable,
)?; )?;
println!("Existing ids: {:?}", existing_ids);
session.logout().await.into_diagnostic()?; self
.state
.db
.run_script(
"
?[
node_id, account_node_id, mailbox_node_id, subject, headers, body,
internal_date, message_id
] <- $input_data
:put message {
node_id, account_node_id, mailbox_node_id, subject, headers, body,
internal_date, message_id
}
",
btmap! {
"input_data".to_owned() => input_data,
},
ScriptMutability::Mutable,
)
.context("Could not add message to database")?;
Ok(()) Ok(())
}
} }

View file

@ -0,0 +1,4 @@
//! Panorama uses an internal messaging system to pass content around
//!
//! This implementation is dead simple, just passes all messages and filters on the other end
pub struct Messaging {}

View file

@ -1,196 +1,200 @@
use cozo::DbInstance;
use miette::{IntoDiagnostic, Result}; use miette::{IntoDiagnostic, Result};
use sqlx::migrate::Migrator;
use serde_json::Value; use serde_json::Value;
use crate::ensure_ok; use crate::ensure_ok;
pub async fn run_migrations(db: &DbInstance) -> Result<()> { pub static MIGRATOR: Migrator = sqlx::migrate!();
let migration_status = check_migration_status(db).await?;
println!("migration status: {:?}", migration_status);
let migrations: Vec<Box<dyn for<'a> Fn(&'a DbInstance) -> Result<()>>> = // pub async fn run_migrations(db: &DbInstance) -> Result<()> {
vec![Box::new(no_op), Box::new(migration_01)]; // let migration_status = check_migration_status(db).await?;
// println!("migration status: {:?}", migration_status);
if let MigrationStatus::NoMigrations = migration_status { // let migrations: Vec<Box<dyn for<'a> Fn(&'a DbInstance) -> Result<()>>> =
let result = db.run_script_str( // vec![Box::new(no_op), Box::new(migration_01)];
"
{ :create migrations { yeah: Int default 0 => version: Int default 0 } }
{
?[yeah, version] <- [[0, 0]]
:put migrations { yeah, version }
}
",
"",
false,
);
ensure_ok(&result)?;
}
let start_at_migration = match migration_status { // if let MigrationStatus::NoMigrations = migration_status {
MigrationStatus::NoMigrations => 0, // let result = db.run_script_str(
MigrationStatus::HasVersion(n) => n, // "
}; // { :create migrations { yeah: Int default 0 => version: Int default 0 } }
let migrations_to_run = migrations // {
.iter() // ?[yeah, version] <- [[0, 0]]
.enumerate() // :put migrations { yeah, version }
.skip(start_at_migration as usize + 1); // }
// println!("running {} migrations...", migrations_to_run.len()); // ",
// "",
// false,
// );
// ensure_ok(&result)?;
// }
//TODO: This should all be done in a transaction // let start_at_migration = match migration_status {
for (idx, migration) in migrations_to_run { // MigrationStatus::NoMigrations => 0,
println!("running migration {idx}..."); // MigrationStatus::HasVersion(n) => n,
// };
// let migrations_to_run = migrations
// .iter()
// .enumerate()
// .skip(start_at_migration as usize + 1);
// // println!("running {} migrations...", migrations_to_run.len());
migration(db)?; // //TODO: This should all be done in a transaction
// for (idx, migration) in migrations_to_run {
// println!("running migration {idx}...");
let result = db.run_script_str( // migration(db)?;
"
?[yeah, version] <- [[0, $version]]
:put migrations { yeah => version }
",
&format!("{{\"version\":{}}}", idx),
false,
);
ensure_ok(&result)?; // let result = db.run_script_str(
// "
// ?[yeah, version] <- [[0, $version]]
// :put migrations { yeah => version }
// ",
// &format!("{{\"version\":{}}}", idx),
// false,
// );
println!("succeeded migration {idx}!"); // ensure_ok(&result)?;
}
Ok(()) // println!("succeeded migration {idx}!");
} // }
#[derive(Debug)] // Ok(())
enum MigrationStatus { // }
NoMigrations,
HasVersion(u64),
}
async fn check_migration_status(db: &DbInstance) -> Result<MigrationStatus> { // #[derive(Debug)]
let status = db.run_script_str( // enum MigrationStatus {
" // NoMigrations,
?[yeah, version] := *migrations[yeah, version] // HasVersion(u64),
", // }
"",
true,
);
println!("Status: {}", status);
let status: Value = serde_json::from_str(&status).into_diagnostic()?; // async fn check_migration_status(db: &DbInstance) -> Result<MigrationStatus> {
let status = status.as_object().unwrap(); // let status = db.run_script_str(
let ok = status.get("ok").unwrap().as_bool().unwrap_or(false); // "
if !ok { // ?[yeah, version] := *migrations[yeah, version]
let status_code = status.get("code").unwrap().as_str().unwrap(); // ",
if status_code == "query::relation_not_found" { // "",
return Ok(MigrationStatus::NoMigrations); // true,
} // );
} // println!("Status: {}", status);
let rows = status.get("rows").unwrap().as_array().unwrap(); // let status: Value = serde_json::from_str(&status).into_diagnostic()?;
let row = rows[0].as_array().unwrap(); // let status = status.as_object().unwrap();
let version = row[1].as_number().unwrap().as_u64().unwrap(); // let ok = status.get("ok").unwrap().as_bool().unwrap_or(false);
println!("row: {row:?}"); // if !ok {
// let status_code = status.get("code").unwrap().as_str().unwrap();
// if status_code == "query::relation_not_found" {
// return Ok(MigrationStatus::NoMigrations);
// }
// }
Ok(MigrationStatus::HasVersion(version)) // let rows = status.get("rows").unwrap().as_array().unwrap();
} // let row = rows[0].as_array().unwrap();
// let version = row[1].as_number().unwrap().as_u64().unwrap();
// println!("row: {row:?}");
fn no_op(_: &DbInstance) -> Result<()> { // Ok(MigrationStatus::HasVersion(version))
Ok(()) // }
}
fn migration_01(db: &DbInstance) -> Result<()> { // fn no_op(_: &DbInstance) -> Result<()> {
let result = db.run_script_str( // Ok(())
" // }
# Primary node type
{
:create node {
id: String
=>
type: String,
created_at: Float default now(),
updated_at: Float default now(),
extra_data: Json default {},
}
}
# Inverse mappings for easy querying // fn migration_01(db: &DbInstance) -> Result<()> {
{ :create node_has_key { key: String => id: String } } // let result = db.run_script_str(
{ ::index create node_has_key:inverse { id } } // "
{ :create node_managed_by_app { node_id: String => app: String } } // # Primary node type
{ :create node_refers_to { node_id: String => other_node_id: String } } // {
{ // :create node {
:create fqkey_to_dbkey { // id: String
key: String // =>
=> // type: String,
relation: String, // created_at: Float default now(),
field_name: String, // updated_at: Float default now(),
type: String, // extra_data: Json default {},
is_fts_enabled: Bool, // }
} // }
}
{
?[key, relation, field_name, type, is_fts_enabled] <- [
['panorama/journal/page/day', 'journal_day', 'day', 'string', false],
['panorama/journal/page/title', 'journal', 'title', 'string', true],
['panorama/journal/page/content', 'journal', 'content', 'string', true],
['panorama/mail/config/imap_hostname', 'mail_config', 'imap_hostname', 'string', false],
['panorama/mail/config/imap_port', 'mail_config', 'imap_port', 'int', false],
['panorama/mail/config/imap_username', 'mail_config', 'imap_username', 'string', false],
['panorama/mail/config/imap_password', 'mail_config', 'imap_password', 'string', false],
['panorama/mail/message/body', 'message', 'body', 'string', true],
['panorama/mail/message/subject', 'message', 'subject', 'string', true],
['panorama/mail/message/message_id', 'message', 'message_id', 'string', false],
]
:put fqkey_to_dbkey { key, relation, field_name, type, is_fts_enabled }
}
# Create journal type // # Inverse mappings for easy querying
{ :create journal { node_id: String => title: String default '', content: String } } // { :create node_has_key { key: String => id: String } }
{ :create journal_day { day: String => node_id: String } } // { ::index create node_has_key:inverse { id } }
// { :create node_managed_by_app { node_id: String => app: String } }
// { :create node_refers_to { node_id: String => other_node_id: String } }
// {
// :create fqkey_to_dbkey {
// key: String
// =>
// relation: String,
// field_name: String,
// type: String,
// is_fts_enabled: Bool,
// }
// }
// {
// ?[key, relation, field_name, type, is_fts_enabled] <- [
// ['panorama/journal/page/day', 'journal_day', 'day', 'string', false],
// ['panorama/journal/page/title', 'journal', 'title', 'string', true],
// ['panorama/journal/page/content', 'journal', 'content', 'string', true],
// ['panorama/mail/config/imap_hostname', 'mail_config', 'imap_hostname', 'string', false],
// ['panorama/mail/config/imap_port', 'mail_config', 'imap_port', 'int', false],
// ['panorama/mail/config/imap_username', 'mail_config', 'imap_username', 'string', false],
// ['panorama/mail/config/imap_password', 'mail_config', 'imap_password', 'string', false],
// ['panorama/mail/message/body', 'message', 'body', 'string', true],
// ['panorama/mail/message/subject', 'message', 'subject', 'string', true],
// ['panorama/mail/message/message_id', 'message', 'message_id', 'string', false],
// ]
// :put fqkey_to_dbkey { key, relation, field_name, type, is_fts_enabled }
// }
# Mail // # Create journal type
{ // { :create journal { node_id: String => title: String default '', content: String } }
:create mail_config { // { :create journal_day { day: String => node_id: String } }
node_id: String
=>
imap_hostname: String,
imap_port: Int,
imap_username: String,
imap_password: String,
}
}
{
:create mailbox {
node_id: String
=>
account_node_id: String,
mailbox_name: String,
}
}
{ ::index create mailbox:by_account_id_and_name { account_node_id, mailbox_name } }
{
:create message {
node_id: String
=>
message_id: String,
account_node_id: String,
mailbox_node_id: String,
subject: String,
headers: Json?,
body: Bytes,
internal_date: String,
}
}
{ ::index create message:message_id { message_id } }
{ ::index create message:date { internal_date } }
{ ::index create message:by_mailbox_id { mailbox_node_id } }
# Calendar // # Mail
", // {
"", // :create mail_config {
false, // node_id: String
); // =>
ensure_ok(&result)?; // imap_hostname: String,
// imap_port: Int,
// imap_username: String,
// imap_password: String,
// }
// }
// {
// :create mailbox {
// node_id: String
// =>
// account_node_id: String,
// mailbox_name: String,
// uid_validity: Int? default null,
// extra_data: Json default {},
// }
// }
// { ::index create mailbox:by_account_id_and_name { account_node_id, mailbox_name } }
// {
// :create message {
// node_id: String
// =>
// message_id: String,
// account_node_id: String,
// mailbox_node_id: String,
// subject: String,
// headers: Json?,
// body: Bytes,
// internal_date: String,
// }
// }
// { ::index create message:message_id { message_id } }
// { ::index create message:date { internal_date } }
// { ::index create message:by_mailbox_id { mailbox_node_id } }
Ok(()) // # Calendar
} // ",
// "",
// false,
// );
// ensure_ok(&result)?;
// Ok(())
// }

View file

@ -0,0 +1,3 @@
use crate::AppState;
impl AppState {}

View file

@ -47,6 +47,10 @@ impl AppState {
let mut all_relations = hmap! {}; let mut all_relations = hmap! {};
for relation_name in relation_names.iter() { for relation_name in relation_names.iter() {
if relation_name.contains(":") {
continue;
}
let mut relation_info = vec![]; let mut relation_info = vec![];
let columns = relation_columns.get(relation_name.as_str()).unwrap(); let columns = relation_columns.get(relation_name.as_str()).unwrap();

View file

@ -1,7 +1,7 @@
use std::str::FromStr; use std::str::FromStr;
use chrono::Local; use chrono::Local;
use cozo::ScriptMutability; // use cozo::ScriptMutability;
use miette::{IntoDiagnostic, Result}; use miette::{IntoDiagnostic, Result};
use uuid::Uuid; use uuid::Uuid;

View file

@ -1,21 +1,28 @@
pub mod export; // pub mod codetrack;
pub mod journal; // pub mod export;
pub mod mail; // pub mod journal;
pub mod node; // pub mod mail;
pub mod utils; // pub mod node;
// pub mod utils;
use std::{collections::HashMap, fs, path::Path}; use std::{collections::HashMap, fs, path::Path};
use bimap::BiMap; use bimap::BiMap;
use cozo::DbInstance; use miette::{Context, IntoDiagnostic, Result};
use miette::{IntoDiagnostic, Result}; use sqlx::{
sqlite::{SqliteConnectOptions, SqliteJournalMode, SqlitePoolOptions},
SqlitePool,
};
use tantivy::{ use tantivy::{
directory::MmapDirectory, directory::MmapDirectory,
schema::{Field, Schema, STORED, STRING, TEXT}, schema::{Field, Schema, STORED, STRING, TEXT},
Index, Index,
}; };
use crate::{mail::mail_loop, migrations::run_migrations}; use crate::{
// mail::MailWorker,
migrations::{self, MIGRATOR},
};
pub fn tantivy_schema() -> (Schema, BiMap<String, Field>) { pub fn tantivy_schema() -> (Schema, BiMap<String, Field>) {
let mut schema_builder = Schema::builder(); let mut schema_builder = Schema::builder();
@ -33,7 +40,7 @@ pub fn tantivy_schema() -> (Schema, BiMap<String, Field>) {
#[derive(Clone)] #[derive(Clone)]
pub struct AppState { pub struct AppState {
pub db: DbInstance, pub db: SqlitePool,
pub tantivy_index: Index, pub tantivy_index: Index,
pub tantivy_field_map: BiMap<String, Field>, pub tantivy_field_map: BiMap<String, Field>,
} }
@ -41,6 +48,10 @@ pub struct AppState {
impl AppState { impl AppState {
pub async fn new(panorama_dir: impl AsRef<Path>) -> Result<Self> { pub async fn new(panorama_dir: impl AsRef<Path>) -> Result<Self> {
let panorama_dir = panorama_dir.as_ref().to_path_buf(); let panorama_dir = panorama_dir.as_ref().to_path_buf();
fs::create_dir_all(&panorama_dir)
.into_diagnostic()
.context("Could not create panorama directory")?;
println!("Panorama dir: {}", panorama_dir.display()); println!("Panorama dir: {}", panorama_dir.display());
let (tantivy_index, tantivy_field_map) = { let (tantivy_index, tantivy_field_map) = {
@ -56,12 +67,14 @@ impl AppState {
}; };
let db_path = panorama_dir.join("db.sqlite"); let db_path = panorama_dir.join("db.sqlite");
let db = DbInstance::new( let sqlite_connect_options = SqliteConnectOptions::new()
"sqlite", .filename(db_path)
db_path.display().to_string(), .journal_mode(SqliteJournalMode::Wal);
Default::default(), let db = SqlitePoolOptions::new()
) .connect_with(sqlite_connect_options)
.unwrap(); .await
.into_diagnostic()
.context("Could not connect to SQLite database")?;
let state = AppState { let state = AppState {
db, db,
@ -74,10 +87,16 @@ impl AppState {
} }
async fn init(&self) -> Result<()> { async fn init(&self) -> Result<()> {
run_migrations(&self.db).await?; // run_migrations(&self.db).await?;
MIGRATOR
.run(&self.db)
.await
.into_diagnostic()
.context("Could not migrate database")?;
let state = self.clone(); // let state = self.clone();
tokio::spawn(async move { mail_loop(state).await }); // let mail_worker = MailWorker::new(state);
// tokio::spawn(mail_worker.mail_loop());
Ok(()) Ok(())
} }

View file

@ -4,7 +4,6 @@ use std::{
}; };
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use cozo::{DataValue, MultiTransaction, NamedRows};
use itertools::Itertools; use itertools::Itertools;
use miette::{bail, IntoDiagnostic, Result}; use miette::{bail, IntoDiagnostic, Result};
use serde_json::Value; use serde_json::Value;
@ -18,7 +17,7 @@ use uuid::Uuid;
use crate::{AppState, NodeId}; use crate::{AppState, NodeId};
use super::utils::{data_value_to_json_value, owned_value_to_json_value}; use super::utils::owned_value_to_json_value;
pub type ExtraData = BTreeMap<String, Value>; pub type ExtraData = BTreeMap<String, Value>;
@ -377,8 +376,9 @@ impl AppState {
.get_by_left("panorama/journal/page/content") .get_by_left("panorama/journal/page/content")
.unwrap() .unwrap()
.clone(); .clone();
let query_parser = let mut query_parser =
QueryParser::for_index(&self.tantivy_index, vec![journal_page_field]); QueryParser::for_index(&self.tantivy_index, vec![journal_page_field]);
query_parser.set_field_fuzzy(journal_page_field, true, 2, true);
let query = query_parser.parse_query(query).into_diagnostic()?; let query = query_parser.parse_query(query).into_diagnostic()?;
let top_docs = searcher let top_docs = searcher

View file

@ -1,4 +1,3 @@
use cozo::{DataValue, Num};
use itertools::Itertools; use itertools::Itertools;
use serde_json::{Number, Value}; use serde_json::{Number, Value};
use tantivy::schema::OwnedValue; use tantivy::schema::OwnedValue;
@ -30,28 +29,31 @@ pub fn owned_value_to_json_value(data_value: &OwnedValue) -> Value {
} }
} }
pub fn data_value_to_json_value(data_value: &DataValue) -> Value { // pub fn data_value_to_json_value(data_value: &DataValue) -> Value {
match data_value { // match data_value {
DataValue::Null => Value::Null, // DataValue::Null => Value::Null,
DataValue::Bool(b) => Value::Bool(*b), // DataValue::Bool(b) => Value::Bool(*b),
DataValue::Num(n) => Value::Number(match n { // DataValue::Num(n) => Value::Number(match n {
Num::Int(i) => Number::from(*i), // Num::Int(i) => Number::from(*i),
Num::Float(f) => Number::from_f64(*f).unwrap(), // Num::Float(f) => Number::from_f64(*f).unwrap(),
}), // }),
DataValue::Str(s) => Value::String(s.to_string()), // DataValue::Str(s) => Value::String(s.to_string()),
DataValue::List(v) => { // DataValue::List(v) => {
Value::Array(v.into_iter().map(data_value_to_json_value).collect_vec()) // Value::Array(v.into_iter().map(data_value_to_json_value).collect_vec())
} // }
DataValue::Json(v) => v.0.clone(), // DataValue::Json(v) => v.0.clone(),
_ => { // DataValue::Bytes(s) => {
println!("Converting unknown {:?}", data_value); // Value::String(String::from_utf8_lossy(s).to_string())
serde_json::to_value(data_value).unwrap() // }
} // DataValue::Bytes(s) => todo!(), // _ => {
// DataValue::Uuid(_) => todo!(), // println!("Converting unknown {:?}", data_value);
// DataValue::Regex(_) => todo!(), // serde_json::to_value(data_value).unwrap()
// DataValue::Set(_) => todo!(), // } // DataValue::Bytes(s) => todo!(),
// DataValue::Vec(_) => todo!(), // // DataValue::Uuid(_) => todo!(),
// DataValue::Validity(_) => todo!(), // // DataValue::Regex(_) => todo!(),
// DataValue::Bot => todo!(), // // DataValue::Set(_) => todo!(),
} // // DataValue::Vec(_) => todo!(),
} // // DataValue::Validity(_) => todo!(),
// // DataValue::Bot => todo!(),
// }
// }

View file

@ -9,7 +9,7 @@ edition = "2021"
anyhow = "1.0.86" anyhow = "1.0.86"
axum = "0.7.5" axum = "0.7.5"
chrono = { version = "0.4.38", features = ["serde"] } chrono = { version = "0.4.38", features = ["serde"] }
cozo = { version = "0.7.6", features = ["storage-rocksdb"] } # cozo = { version = "0.7.6", features = ["storage-rocksdb"] }
csv = "1.3.0" csv = "1.3.0"
dirs = "5.0.1" dirs = "5.0.1"
futures = "0.3.30" futures = "0.3.30"

View file

View file

@ -1,25 +0,0 @@
use std::{
fs::{self, File},
path::PathBuf,
};
use axum::{extract::State, Json};
use miette::IntoDiagnostic;
use serde_json::Value;
use crate::{error::AppResult, AppState};
// This code is really bad but gives me a quick way to look at all of the data
// in the data at once. Rip this out once there's any Real Security Mechanism.
pub async fn export(State(state): State<AppState>) -> AppResult<Json<Value>> {
let export = state.export().await?;
let base_dir = PathBuf::from("export");
fs::create_dir_all(&base_dir).into_diagnostic()?;
let file = File::create(base_dir.join("export.json")).into_diagnostic()?;
serde_json::to_writer_pretty(file, &export).into_diagnostic()?;
Ok(Json(export))
}

View file

@ -1,6 +1,5 @@
use axum::{extract::State, routing::get, Json, Router}; use axum::{extract::State, routing::get, Json, Router};
use chrono::Local; use chrono::Local;
use cozo::ScriptMutability;
use serde_json::Value; use serde_json::Value;
use utoipa::OpenApi; use utoipa::OpenApi;
use uuid::Uuid; use uuid::Uuid;
@ -9,26 +8,27 @@ use crate::{error::AppResult, AppState};
/// Node API /// Node API
#[derive(OpenApi)] #[derive(OpenApi)]
#[openapi(paths(get_todays_journal_id), components(schemas()))] #[openapi(paths(), components(schemas()))]
pub(super) struct JournalApi; pub(super) struct JournalApi;
pub(super) fn router() -> Router<AppState> { pub(super) fn router() -> Router<AppState> {
Router::new().route("/get_todays_journal_id", get(get_todays_journal_id)) Router::new()
// .route("/get_todays_journal_id", get(get_todays_journal_id))
} }
#[utoipa::path( // #[utoipa::path(
get, // get,
path = "/get_todays_journal_id", // path = "/get_todays_journal_id",
responses( // responses(
(status = 200), // (status = 200),
), // ),
)] // )]
pub async fn get_todays_journal_id( // pub async fn get_todays_journal_id(
State(state): State<AppState>, // State(state): State<AppState>,
) -> AppResult<Json<Value>> { // ) -> AppResult<Json<Value>> {
let node_id = state.get_todays_journal_id().await?; // let node_id = state.get_todays_journal_id().await?;
Ok(Json(json!({ // Ok(Json(json!({
"node_id": node_id.to_string(), // "node_id": node_id.to_string(),
}))) // })))
} // }

View file

@ -8,7 +8,6 @@ extern crate serde_json;
extern crate sugars; extern crate sugars;
mod error; mod error;
mod export;
mod journal; mod journal;
pub mod mail; pub mod mail;
mod node; mod node;
@ -27,12 +26,6 @@ use tower_http::{
use utoipa::OpenApi; use utoipa::OpenApi;
use utoipa_scalar::{Scalar, Servable}; use utoipa_scalar::{Scalar, Servable};
use crate::{
export::export,
mail::{get_mail, get_mail_config},
node::search_nodes,
};
pub async fn run() -> Result<()> { pub async fn run() -> Result<()> {
#[derive(OpenApi)] #[derive(OpenApi)]
#[openapi( #[openapi(
@ -61,11 +54,10 @@ pub async fn run() -> Result<()> {
let app = Router::new() let app = Router::new()
.merge(Scalar::with_url("/api/docs", ApiDoc::openapi())) .merge(Scalar::with_url("/api/docs", ApiDoc::openapi()))
.route("/", get(|| async { "Hello, World!" })) .route("/", get(|| async { "Hello, World!" }))
.route("/export", get(export))
.nest("/node", node::router().with_state(state.clone())) .nest("/node", node::router().with_state(state.clone()))
.nest("/journal", journal::router().with_state(state.clone())) .nest("/journal", journal::router().with_state(state.clone()))
.route("/mail/config", get(get_mail_config)) // .route("/mail/config", get(get_mail_config))
.route("/mail", get(get_mail)) // .route("/mail", get(get_mail))
.layer(ServiceBuilder::new().layer(cors_layer)) .layer(ServiceBuilder::new().layer(cors_layer))
.layer(ServiceBuilder::new().layer(trace_layer)) .layer(ServiceBuilder::new().layer(trace_layer))
.with_state(state.clone()); .with_state(state.clone());

View file

@ -1,54 +1,53 @@
use axum::{extract::State, Json}; use axum::{extract::State, Json};
use cozo::ScriptMutability;
use panorama_core::AppState; use panorama_core::AppState;
use serde_json::Value; use serde_json::Value;
use crate::error::AppResult; use crate::error::AppResult;
pub async fn get_mail_config( // pub async fn get_mail_config(
State(state): State<AppState>, // State(state): State<AppState>,
) -> AppResult<Json<Value>> { // ) -> AppResult<Json<Value>> {
let configs = state.fetch_mail_configs()?; // let configs = state.fetch_mail_configs()?;
Ok(Json(json!({ "configs": configs }))) // Ok(Json(json!({ "configs": configs })))
} // }
pub async fn get_mail(State(state): State<AppState>) -> AppResult<Json<Value>> { // pub async fn get_mail(State(state): State<AppState>) -> AppResult<Json<Value>> {
let mailboxes = state.db.run_script(" // let mailboxes = state.db.run_script("
?[node_id, account_node_id, mailbox_name] := *mailbox {node_id, account_node_id, mailbox_name} // ?[node_id, account_node_id, mailbox_name] := *mailbox {node_id, account_node_id, mailbox_name}
", Default::default(), ScriptMutability::Immutable)?; // ", Default::default(), ScriptMutability::Immutable)?;
let mailboxes = mailboxes // let mailboxes = mailboxes
.rows // .rows
.iter() // .iter()
.map(|mb| { // .map(|mb| {
json!({ // json!({
"node_id": mb[0].get_str().unwrap(), // "node_id": mb[0].get_str().unwrap(),
"account_node_id": mb[1].get_str().unwrap(), // "account_node_id": mb[1].get_str().unwrap(),
"mailbox_name": mb[2].get_str().unwrap(), // "mailbox_name": mb[2].get_str().unwrap(),
}) // })
}) // })
.collect::<Vec<_>>(); // .collect::<Vec<_>>();
let messages = state.db.run_script(" // let messages = state.db.run_script("
?[node_id, subject, body, internal_date] := *message {node_id, subject, body, internal_date} // ?[node_id, subject, body, internal_date] := *message {node_id, subject, body, internal_date}
:limit 10 // :limit 10
", Default::default(), ScriptMutability::Immutable)?; // ", Default::default(), ScriptMutability::Immutable)?;
let messages = messages // let messages = messages
.rows // .rows
.iter() // .iter()
.map(|m| { // .map(|m| {
json!({ // json!({
"node_id": m[0].get_str().unwrap(), // "node_id": m[0].get_str().unwrap(),
"subject": m[1].get_str().unwrap(), // "subject": m[1].get_str().unwrap(),
"body": m[2].get_str(), // "body": m[2].get_str(),
"internal_date": m[3].get_str().unwrap(), // "internal_date": m[3].get_str().unwrap(),
}) // })
}) // })
.collect::<Vec<_>>(); // .collect::<Vec<_>>();
Ok(Json(json!({ // Ok(Json(json!({
"mailboxes": mailboxes, // "mailboxes": mailboxes,
"messages": messages, // "messages": messages,
}))) // })))
} // }

View file

@ -10,11 +10,10 @@ use axum::{
Json, Router, Json, Router,
}; };
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use cozo::{DataValue, MultiTransaction};
use itertools::Itertools; use itertools::Itertools;
use miette::IntoDiagnostic; use miette::IntoDiagnostic;
use panorama_core::{ use panorama_core::{
state::node::{CreateOrUpdate, ExtraData}, // state::node::{CreateOrUpdate, ExtraData},
NodeId, NodeId,
}; };
use serde_json::Value; use serde_json::Value;
@ -25,172 +24,169 @@ use crate::{error::AppResult, AppState};
/// Node API /// Node API
#[derive(OpenApi)] #[derive(OpenApi)]
#[openapi( #[openapi(paths(), components(schemas()))]
paths(get_node, update_node, create_node),
components(schemas(GetNodeResult))
)]
pub(super) struct NodeApi; pub(super) struct NodeApi;
pub(super) fn router() -> Router<AppState> { pub(super) fn router() -> Router<AppState> {
Router::new() Router::new()
.route("/", put(create_node)) // .route("/", put(create_node))
.route("/:id", get(get_node)) // .route("/:id", get(get_node))
.route("/:id", post(update_node)) // .route("/:id", post(update_node))
.route("/search", get(search_nodes)) // .route("/search", get(search_nodes))
} }
#[derive(Serialize, Deserialize, ToSchema, Clone)] // #[derive(Serialize, Deserialize, ToSchema, Clone)]
struct GetNodeResult { // struct GetNodeResult {
node_id: String, // node_id: String,
fields: HashMap<String, Value>, // fields: HashMap<String, Value>,
created_at: DateTime<Utc>, // created_at: DateTime<Utc>,
updated_at: DateTime<Utc>, // updated_at: DateTime<Utc>,
} // }
/// Get node info // /// Get node info
/// // ///
/// This endpoint retrieves all the fields for a particular node // /// This endpoint retrieves all the fields for a particular node
#[utoipa::path( // #[utoipa::path(
get, // get,
path = "/{id}", // path = "/{id}",
responses( // responses(
(status = 200, body = [GetNodeResult]), // (status = 200, body = [GetNodeResult]),
(status = 404, description = "the node ID provided was not found") // (status = 404, description = "the node ID provided was not found")
), // ),
params( // params(
("id" = String, Path, description = "Node ID"), // ("id" = String, Path, description = "Node ID"),
), // ),
)] // )]
pub async fn get_node( // pub async fn get_node(
State(state): State<AppState>, // State(state): State<AppState>,
Path(node_id): Path<String>, // Path(node_id): Path<String>,
) -> AppResult<(StatusCode, Json<Value>)> { // ) -> AppResult<(StatusCode, Json<Value>)> {
let node_info = state.get_node(&node_id).await?; // let node_info = state.get_node(&node_id).await?;
Ok(( // Ok((
StatusCode::OK, // StatusCode::OK,
Json(json!({ // Json(json!({
"node_id": node_id, // "node_id": node_id,
"fields": node_info.fields, // "fields": node_info.fields,
"created_at": node_info.created_at, // "created_at": node_info.created_at,
"updated_at": node_info.updated_at, // "updated_at": node_info.updated_at,
})), // })),
)) // ))
} // }
#[derive(Deserialize, Debug)] // #[derive(Deserialize, Debug)]
pub struct UpdateData { // pub struct UpdateData {
extra_data: Option<ExtraData>, // extra_data: Option<ExtraData>,
} // }
/// Update node info // /// Update node info
#[utoipa::path( // #[utoipa::path(
post, // post,
path = "/{id}", // path = "/{id}",
responses( // responses(
(status = 200) // (status = 200)
), // ),
params( // params(
("id" = String, Path, description = "Node ID"), // ("id" = String, Path, description = "Node ID"),
) // )
)] // )]
pub async fn update_node( // pub async fn update_node(
State(state): State<AppState>, // State(state): State<AppState>,
Path(node_id): Path<String>, // Path(node_id): Path<String>,
Json(opts): Json<UpdateData>, // Json(opts): Json<UpdateData>,
) -> AppResult<Json<Value>> { // ) -> AppResult<Json<Value>> {
let node_id = NodeId(Uuid::from_str(&node_id).into_diagnostic()?); // let node_id = NodeId(Uuid::from_str(&node_id).into_diagnostic()?);
let node_info = state // let node_info = state
.create_or_update_node(CreateOrUpdate::Update { node_id }, opts.extra_data) // .create_or_update_node(CreateOrUpdate::Update { node_id }, opts.extra_data)
.await?; // .await?;
Ok(Json(json!({ // Ok(Json(json!({
"node_id": node_info.node_id.to_string(), // "node_id": node_info.node_id.to_string(),
}))) // })))
} // }
#[derive(Debug, Deserialize)] // #[derive(Debug, Deserialize)]
pub struct CreateNodeOpts { // pub struct CreateNodeOpts {
// TODO: Allow submitting a string // // TODO: Allow submitting a string
// id: Option<String>, // // id: Option<String>,
#[serde(rename = "type")] // #[serde(rename = "type")]
ty: String, // ty: String,
extra_data: Option<ExtraData>, // extra_data: Option<ExtraData>,
} // }
#[utoipa::path( // #[utoipa::path(
put, // put,
path = "/", // path = "/",
responses((status = 200)), // responses((status = 200)),
)] // )]
pub async fn create_node( // pub async fn create_node(
State(state): State<AppState>, // State(state): State<AppState>,
Json(opts): Json<CreateNodeOpts>, // Json(opts): Json<CreateNodeOpts>,
) -> AppResult<Json<Value>> { // ) -> AppResult<Json<Value>> {
let node_info = state // let node_info = state
.create_or_update_node( // .create_or_update_node(
CreateOrUpdate::Create { r#type: opts.ty }, // CreateOrUpdate::Create { r#type: opts.ty },
opts.extra_data, // opts.extra_data,
) // )
.await?; // .await?;
Ok(Json(json!({ // Ok(Json(json!({
"node_id": node_info.node_id.to_string(), // "node_id": node_info.node_id.to_string(),
}))) // })))
} // }
#[derive(Deserialize)] // #[derive(Deserialize)]
pub struct SearchQuery { // pub struct SearchQuery {
query: String, // query: String,
} // }
#[utoipa::path( // #[utoipa::path(
get, // get,
path = "/search", // path = "/search",
responses((status = 200)), // responses((status = 200)),
)] // )]
pub async fn search_nodes( // pub async fn search_nodes(
State(state): State<AppState>, // State(state): State<AppState>,
Query(query): Query<SearchQuery>, // Query(query): Query<SearchQuery>,
) -> AppResult<Json<Value>> { // ) -> AppResult<Json<Value>> {
let search_result = state.search_nodes(query.query).await?; // let search_result = state.search_nodes(query.query).await?;
let search_result = search_result // let search_result = search_result
.into_iter() // .into_iter()
.map(|(id, value)| value["fields"].clone()) // .map(|(id, value)| value["fields"].clone())
.collect_vec(); // .collect_vec();
Ok(Json(json!({ // Ok(Json(json!({
"results": search_result, // "results": search_result,
}))) // })))
} // }
fn get_rows_for_extra_keys( // fn get_rows_for_extra_keys(
tx: &MultiTransaction, // tx: &MultiTransaction,
extra_data: &ExtraData, // extra_data: &ExtraData,
) -> AppResult<HashMap<String, (String, String, String)>> { // ) -> AppResult<HashMap<String, (String, String, String)>> {
let result = tx.run_script( // let result = tx.run_script(
" // "
?[key, relation, field_name, type] := // ?[key, relation, field_name, type] :=
*fqkey_to_dbkey{key, relation, field_name, type}, // *fqkey_to_dbkey{key, relation, field_name, type},
is_in(key, $keys) // is_in(key, $keys)
", // ",
btmap! { // btmap! {
"keys".to_owned() => DataValue::List( // "keys".to_owned() => DataValue::List(
extra_data // extra_data
.keys() // .keys()
.map(|s| DataValue::from(s.as_str())) // .map(|s| DataValue::from(s.as_str()))
.collect::<Vec<_>>() // .collect::<Vec<_>>()
), // ),
}, // },
)?; // )?;
let s = |s: &DataValue| s.get_str().unwrap().to_owned(); // let s = |s: &DataValue| s.get_str().unwrap().to_owned();
Ok( // Ok(
result // result
.rows // .rows
.into_iter() // .into_iter()
.map(|row| (s(&row[0]), (s(&row[1]), s(&row[2]), s(&row[3])))) // .map(|row| (s(&row[0]), (s(&row[1]), s(&row[2]), s(&row[3]))))
.collect::<HashMap<_, _>>(), // .collect::<HashMap<_, _>>(),
) // )
} // }

View file

View file

@ -1,5 +1,5 @@
{ {
"name": "panorama-app", "name": "panorama",
"version": "0.1.0", "version": "0.1.0",
"type": "module", "type": "module",
"scripts": { "scripts": {

View file

Before

Width:  |  Height:  |  Size: 2.5 KiB

After

Width:  |  Height:  |  Size: 2.5 KiB

View file

Before

Width:  |  Height:  |  Size: 1.5 KiB

After

Width:  |  Height:  |  Size: 1.5 KiB

View file

Before

Width:  |  Height:  |  Size: 5.9 KiB

After

Width:  |  Height:  |  Size: 5.9 KiB

View file

Before

Width:  |  Height:  |  Size: 17 KiB

After

Width:  |  Height:  |  Size: 17 KiB

View file

Before

Width:  |  Height:  |  Size: 1 KiB

After

Width:  |  Height:  |  Size: 1 KiB

View file

Before

Width:  |  Height:  |  Size: 4.5 KiB

After

Width:  |  Height:  |  Size: 4.5 KiB

View file

Before

Width:  |  Height:  |  Size: 6.8 KiB

After

Width:  |  Height:  |  Size: 6.8 KiB

View file

Before

Width:  |  Height:  |  Size: 6.9 KiB

After

Width:  |  Height:  |  Size: 6.9 KiB

View file

Before

Width:  |  Height:  |  Size: 20 KiB

After

Width:  |  Height:  |  Size: 20 KiB

View file

Before

Width:  |  Height:  |  Size: 965 B

After

Width:  |  Height:  |  Size: 965 B

View file

Before

Width:  |  Height:  |  Size: 22 KiB

After

Width:  |  Height:  |  Size: 22 KiB

View file

Before

Width:  |  Height:  |  Size: 1.4 KiB

After

Width:  |  Height:  |  Size: 1.4 KiB

View file

Before

Width:  |  Height:  |  Size: 2.6 KiB

After

Width:  |  Height:  |  Size: 2.6 KiB

View file

Before

Width:  |  Height:  |  Size: 3.4 KiB

After

Width:  |  Height:  |  Size: 3.4 KiB

View file

Before

Width:  |  Height:  |  Size: 1.6 KiB

After

Width:  |  Height:  |  Size: 1.6 KiB

View file

Before

Width:  |  Height:  |  Size: 24 KiB

After

Width:  |  Height:  |  Size: 24 KiB

View file

Before

Width:  |  Height:  |  Size: 50 KiB

After

Width:  |  Height:  |  Size: 50 KiB

View file

Before

Width:  |  Height:  |  Size: 4 KiB

After

Width:  |  Height:  |  Size: 4 KiB

View file

@ -1,20 +1,18 @@
import styles from "./SearchBar.module.scss"; import styles from "./SearchBar.module.scss";
import { import {
FloatingFocusManager,
FloatingOverlay, FloatingOverlay,
FloatingPortal, FloatingPortal,
autoUpdate, autoUpdate,
offset, offset,
useClick,
useDismiss, useDismiss,
useFloating, useFloating,
useFocus, useFocus,
useInteractions, useInteractions,
} from "@floating-ui/react"; } from "@floating-ui/react";
import { useDebounce } from "use-debounce"; import { useCallback, useEffect, useState } from "react";
import { useEffect, useState } from "react";
import { atom, useAtom, useSetAtom } from "jotai"; import { atom, useAtom, useSetAtom } from "jotai";
import { useNodeControls } from "../App"; import { useNodeControls } from "../App";
import { useDebounce, useDebouncedCallback } from "use-debounce";
const searchQueryAtom = atom(""); const searchQueryAtom = atom("");
const showMenuAtom = atom(false); const showMenuAtom = atom(false);
@ -37,8 +35,7 @@ export default function SearchBar() {
useDismiss(context), useDismiss(context),
]); ]);
useEffect(() => { const performSearch = useCallback(() => {
setSearchResults([]);
const trimmed = searchQuery.trim(); const trimmed = searchQuery.trim();
if (trimmed === "") return; if (trimmed === "") return;
@ -63,7 +60,11 @@ export default function SearchBar() {
onFocus={() => setShowMenu(true)} onFocus={() => setShowMenu(true)}
ref={refs.setReference} ref={refs.setReference}
value={searchQuery} value={searchQuery}
onChange={(evt) => setSearchQuery(evt.target.value)} onChange={(evt) => {
setSearchQuery(evt.target.value);
if (evt.target.value) performSearch();
else setSearchResults([]);
}}
{...getReferenceProps()} {...getReferenceProps()}
/> />
</div> </div>

View file

@ -7,18 +7,15 @@ import remarkMath from "remark-math";
import rehypeKatex from "rehype-katex"; import rehypeKatex from "rehype-katex";
import { parse as parseDate, format as formatDate } from "date-fns"; import { parse as parseDate, format as formatDate } from "date-fns";
import { useDebounce } from "use-debounce"; import { useDebounce } from "use-debounce";
import {
const JOURNAL_PAGE_CONTENT_FIELD_NAME = "panorama/journal/page/content"; JOURNAL_PAGE_CONTENT_FIELD_NAME,
const JOURNAL_PAGE_TITLE_FIELD_NAME = "panorama/journal/page/title"; JOURNAL_PAGE_TITLE_FIELD_NAME,
NodeInfo,
} from "../../lib/data";
export interface JournalPageProps { export interface JournalPageProps {
id: string; id: string;
data: { data: NodeInfo;
day?: string;
title?: string;
content: string;
fields: object;
};
} }
export default function JournalPage({ id, data }: JournalPageProps) { export default function JournalPage({ id, data }: JournalPageProps) {

11
ui/src/lib/data.ts Normal file
View file

@ -0,0 +1,11 @@
export interface NodeInfo {
fields: Partial<NodeFields>;
}
export const JOURNAL_PAGE_CONTENT_FIELD_NAME = "panorama/journal/page/content";
export const JOURNAL_PAGE_TITLE_FIELD_NAME = "panorama/journal/page/title";
export interface NodeFields {
[JOURNAL_PAGE_CONTENT_FIELD_NAME]: string;
[JOURNAL_PAGE_TITLE_FIELD_NAME]: string;
}