create node
This commit is contained in:
parent
5c2a35935e
commit
07965faf1b
8 changed files with 156 additions and 25 deletions
|
@ -11,10 +11,22 @@ pub mod state;
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests;
|
mod tests;
|
||||||
|
|
||||||
|
use std::fmt;
|
||||||
|
|
||||||
pub use crate::state::AppState;
|
pub use crate::state::AppState;
|
||||||
|
|
||||||
use miette::{bail, IntoDiagnostic, Result};
|
use miette::{bail, IntoDiagnostic, Result};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
|
pub struct NodeId(Uuid);
|
||||||
|
|
||||||
|
impl fmt::Display for NodeId {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
write!(f, "{}", self.0.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn ensure_ok(s: &str) -> Result<()> {
|
pub fn ensure_ok(s: &str) -> Result<()> {
|
||||||
let status: Value = serde_json::from_str(&s).into_diagnostic()?;
|
let status: Value = serde_json::from_str(&s).into_diagnostic()?;
|
||||||
|
|
9
crates/panorama-core/src/state/journal.rs
Normal file
9
crates/panorama-core/src/state/journal.rs
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
use miette::Result;
|
||||||
|
|
||||||
|
use crate::{AppState, NodeId};
|
||||||
|
|
||||||
|
impl AppState {
|
||||||
|
pub fn get_todays_journal_id(&self) -> Result<NodeId> {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,4 +1,4 @@
|
||||||
use std::{collections::HashMap, time::Duration};
|
use std::{collections::HashMap, str::FromStr, time::Duration};
|
||||||
|
|
||||||
use cozo::{DataValue, JsonData, ScriptMutability};
|
use cozo::{DataValue, JsonData, ScriptMutability};
|
||||||
use futures::TryStreamExt;
|
use futures::TryStreamExt;
|
||||||
|
@ -6,11 +6,11 @@ use miette::{IntoDiagnostic, Result};
|
||||||
use tokio::{net::TcpStream, time::sleep};
|
use tokio::{net::TcpStream, time::sleep};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::AppState;
|
use crate::{AppState, NodeId};
|
||||||
|
|
||||||
#[derive(Debug, Serialize)]
|
#[derive(Debug, Serialize)]
|
||||||
pub struct MailConfig {
|
pub struct MailConfig {
|
||||||
node_id: String,
|
node_id: NodeId,
|
||||||
imap_hostname: String,
|
imap_hostname: String,
|
||||||
imap_port: u16,
|
imap_port: u16,
|
||||||
imap_username: String,
|
imap_username: String,
|
||||||
|
@ -34,7 +34,7 @@ impl AppState {
|
||||||
.rows
|
.rows
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|row| MailConfig {
|
.map(|row| MailConfig {
|
||||||
node_id: row[0].get_str().unwrap().to_owned(),
|
node_id: NodeId(Uuid::from_str(row[0].get_str().unwrap()).unwrap()),
|
||||||
imap_hostname: row[1].get_str().unwrap().to_owned(),
|
imap_hostname: row[1].get_str().unwrap().to_owned(),
|
||||||
imap_port: row[2].get_int().unwrap() as u16,
|
imap_port: row[2].get_int().unwrap() as u16,
|
||||||
imap_username: row[3].get_str().unwrap().to_owned(),
|
imap_username: row[3].get_str().unwrap().to_owned(),
|
||||||
|
@ -105,7 +105,7 @@ impl AppState {
|
||||||
*mailbox{node_id, account_node_id, mailbox_name},
|
*mailbox{node_id, account_node_id, mailbox_name},
|
||||||
account_node_id = $account_node_id,
|
account_node_id = $account_node_id,
|
||||||
mailbox_name = 'INBOX'
|
mailbox_name = 'INBOX'
|
||||||
", btmap! {"account_node_id".to_owned()=>DataValue::from(config.node_id.to_owned())}, ScriptMutability::Immutable)?;
|
", btmap! {"account_node_id".to_owned()=>DataValue::from(config.node_id.to_string())}, ScriptMutability::Immutable)?;
|
||||||
|
|
||||||
if result.rows.len() == 0 {
|
if result.rows.len() == 0 {
|
||||||
let new_node_id = Uuid::now_v7();
|
let new_node_id = Uuid::now_v7();
|
||||||
|
@ -117,7 +117,7 @@ impl AppState {
|
||||||
",
|
",
|
||||||
btmap! {
|
btmap! {
|
||||||
"new_node_id".to_owned() => DataValue::from(new_node_id.clone()),
|
"new_node_id".to_owned() => DataValue::from(new_node_id.clone()),
|
||||||
"account_node_id".to_owned() => DataValue::from(config.node_id.to_owned()),
|
"account_node_id".to_owned() => DataValue::from(config.node_id.to_string()),
|
||||||
},
|
},
|
||||||
ScriptMutability::Mutable)?;
|
ScriptMutability::Mutable)?;
|
||||||
new_node_id
|
new_node_id
|
||||||
|
@ -165,7 +165,7 @@ impl AppState {
|
||||||
.collect::<HashMap<_, _>>();
|
.collect::<HashMap<_, _>>();
|
||||||
DataValue::List(vec![
|
DataValue::List(vec![
|
||||||
DataValue::from(message_id.to_string()),
|
DataValue::from(message_id.to_string()),
|
||||||
DataValue::from(config.node_id.clone()),
|
DataValue::from(config.node_id.to_string()),
|
||||||
DataValue::from(inbox_node_id.clone()),
|
DataValue::from(inbox_node_id.clone()),
|
||||||
DataValue::from(
|
DataValue::from(
|
||||||
headers
|
headers
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
pub mod export;
|
pub mod export;
|
||||||
|
pub mod journal;
|
||||||
pub mod mail;
|
pub mod mail;
|
||||||
pub mod node;
|
pub mod node;
|
||||||
|
|
||||||
|
|
|
@ -1,19 +1,22 @@
|
||||||
use std::collections::{BTreeMap, HashMap};
|
use std::{
|
||||||
|
collections::{BTreeMap, HashMap},
|
||||||
|
str::FromStr,
|
||||||
|
};
|
||||||
|
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use cozo::{DataValue, MultiTransaction, ScriptMutability};
|
use cozo::{DataValue, MultiTransaction, NamedRows, ScriptMutability};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use miette::Result;
|
use miette::Result;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::AppState;
|
use crate::{AppState, NodeId};
|
||||||
|
|
||||||
pub type ExtraData = BTreeMap<String, Value>;
|
pub type ExtraData = BTreeMap<String, Value>;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct NodeInfo {
|
pub struct NodeInfo {
|
||||||
pub node_id: String,
|
pub node_id: NodeId,
|
||||||
pub created_at: DateTime<Utc>,
|
pub created_at: DateTime<Utc>,
|
||||||
pub updated_at: DateTime<Utc>,
|
pub updated_at: DateTime<Utc>,
|
||||||
pub fields: Option<HashMap<String, DataValue>>,
|
pub fields: Option<HashMap<String, DataValue>>,
|
||||||
|
@ -30,23 +33,119 @@ pub type FieldMapping = HashMap<String, FieldInfo>;
|
||||||
|
|
||||||
impl AppState {
|
impl AppState {
|
||||||
/// Get all properties of a node
|
/// Get all properties of a node
|
||||||
pub async fn get_node(&self, node_id: impl AsRef<str>) -> Result<NodeInfo> {
|
pub async fn get_node(&self, node_id: &NodeId) -> Result<NodeInfo> {
|
||||||
let node_id = node_id.as_ref().to_owned();
|
let tx = self.db.multi_transaction(false);
|
||||||
|
|
||||||
let result = self.db.run_script(
|
let result = tx.run_script(
|
||||||
"
|
"
|
||||||
?[relation, field_name, type, is_fts_enabled] :=
|
?[key, relation, field_name, type, is_fts_enabled] :=
|
||||||
*node_has_key { key, id },
|
*node_has_key { key, id },
|
||||||
*fqkey_to_dbkey { key, relation, field_name, type, is_fts_enabled },
|
*fqkey_to_dbkey { key, relation, field_name, type, is_fts_enabled },
|
||||||
id = $node_id
|
id = $node_id
|
||||||
",
|
",
|
||||||
btmap! {"node_id".to_owned() => node_id.clone().into()},
|
btmap! {"node_id".to_owned() => node_id.to_string().into()},
|
||||||
ScriptMutability::Immutable,
|
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
println!("FIELDS: {:?}", result);
|
println!("FIELDS: {:?}", result);
|
||||||
|
|
||||||
todo!()
|
let field_mapping = AppState::rows_to_field_mapping(result)?;
|
||||||
|
|
||||||
|
// Group the keys by which relation they're in
|
||||||
|
let result_by_relation = field_mapping
|
||||||
|
.iter()
|
||||||
|
.into_group_map_by(|(_, FieldInfo { relation_name, .. })| relation_name);
|
||||||
|
|
||||||
|
let mut all_relation_queries = vec![];
|
||||||
|
let mut all_relation_constraints = vec![];
|
||||||
|
let mut all_fields = vec![];
|
||||||
|
let mut field_counter = 0;
|
||||||
|
for (i, (relation, fields)) in result_by_relation.iter().enumerate() {
|
||||||
|
let constraint_name = format!("c{i}");
|
||||||
|
|
||||||
|
let mut keys = vec![];
|
||||||
|
let mut constraints = vec![];
|
||||||
|
for (key, field_info) in fields.iter() {
|
||||||
|
let counted_field_name = format!("f{field_counter}");
|
||||||
|
field_counter += 1;
|
||||||
|
|
||||||
|
keys.push(counted_field_name.clone());
|
||||||
|
constraints.push(format!(
|
||||||
|
"{}: {}",
|
||||||
|
field_info.relation_field.to_owned(),
|
||||||
|
counted_field_name,
|
||||||
|
));
|
||||||
|
all_fields.push((
|
||||||
|
counted_field_name,
|
||||||
|
field_info.relation_field.to_owned(),
|
||||||
|
key,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
let keys = keys.join(", ");
|
||||||
|
let constraints = constraints.join(", ");
|
||||||
|
all_relation_queries.push(format!(
|
||||||
|
"
|
||||||
|
{constraint_name}[{keys}] :=
|
||||||
|
*{relation}{{ node_id, {constraints} }},
|
||||||
|
node_id = $node_id
|
||||||
|
"
|
||||||
|
));
|
||||||
|
all_relation_constraints.push(format!("{constraint_name}[{keys}],"))
|
||||||
|
}
|
||||||
|
|
||||||
|
let all_relation_constraints = all_relation_constraints.join("\n");
|
||||||
|
let all_relation_queries = all_relation_queries.join("\n\n");
|
||||||
|
let all_field_names = all_fields
|
||||||
|
.iter()
|
||||||
|
.map(|(field_name, _, _)| field_name)
|
||||||
|
.join(", ");
|
||||||
|
let query = format!(
|
||||||
|
"
|
||||||
|
{all_relation_queries}
|
||||||
|
|
||||||
|
?[type, extra_data, created_at, updated_at, {all_field_names}] :=
|
||||||
|
*node {{ id, type, created_at, updated_at, extra_data }},
|
||||||
|
{all_relation_constraints}
|
||||||
|
id = $node_id
|
||||||
|
"
|
||||||
|
);
|
||||||
|
println!("QUERY: {query}");
|
||||||
|
|
||||||
|
let result = tx.run_script(
|
||||||
|
&query,
|
||||||
|
btmap! { "node_id".to_owned() => node_id.to_string().into(), },
|
||||||
|
)?;
|
||||||
|
|
||||||
|
println!("RESULT: {result:?}");
|
||||||
|
|
||||||
|
let created_at = DateTime::from_timestamp_millis(
|
||||||
|
(result.rows[0][2].get_float().unwrap() * 1000.0) as i64,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
let updated_at = DateTime::from_timestamp_millis(
|
||||||
|
(result.rows[0][3].get_float().unwrap() * 1000.0) as i64,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let mut fields = HashMap::new();
|
||||||
|
|
||||||
|
for row in result
|
||||||
|
.rows
|
||||||
|
.into_iter()
|
||||||
|
.map(|row| row.into_iter().skip(4).zip(all_fields.iter()))
|
||||||
|
{
|
||||||
|
for (value, (_, _, field_name)) in row {
|
||||||
|
fields.insert(field_name.to_string(), value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
println!("FIELDS: {:?}", fields);
|
||||||
|
|
||||||
|
Ok(NodeInfo {
|
||||||
|
node_id: node_id.clone(),
|
||||||
|
created_at,
|
||||||
|
updated_at,
|
||||||
|
fields: Some(fields),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn create_node(
|
pub async fn create_node(
|
||||||
|
@ -81,7 +180,7 @@ impl AppState {
|
||||||
|
|
||||||
// Group the keys by which relation they're in
|
// Group the keys by which relation they're in
|
||||||
let result_by_relation = field_mapping.iter().into_group_map_by(
|
let result_by_relation = field_mapping.iter().into_group_map_by(
|
||||||
|(key, FieldInfo { relation_name, .. })| relation_name,
|
|(_, FieldInfo { relation_name, .. })| relation_name,
|
||||||
);
|
);
|
||||||
|
|
||||||
for (relation, fields) in result_by_relation.iter() {
|
for (relation, fields) in result_by_relation.iter() {
|
||||||
|
@ -166,14 +265,14 @@ impl AppState {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
Ok(NodeInfo {
|
Ok(NodeInfo {
|
||||||
node_id,
|
node_id: NodeId(Uuid::from_str(&node_id).unwrap()),
|
||||||
created_at,
|
created_at,
|
||||||
updated_at,
|
updated_at,
|
||||||
fields: None,
|
fields: None,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_rows_for_extra_keys(
|
fn get_rows_for_extra_keys(
|
||||||
&self,
|
&self,
|
||||||
tx: &MultiTransaction,
|
tx: &MultiTransaction,
|
||||||
keys: &[String],
|
keys: &[String],
|
||||||
|
@ -193,6 +292,10 @@ impl AppState {
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
|
AppState::rows_to_field_mapping(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn rows_to_field_mapping(result: NamedRows) -> Result<FieldMapping> {
|
||||||
let s = |s: &DataValue| s.get_str().unwrap().to_owned();
|
let s = |s: &DataValue| s.get_str().unwrap().to_owned();
|
||||||
|
|
||||||
Ok(
|
Ok(
|
||||||
|
@ -200,6 +303,7 @@ impl AppState {
|
||||||
.rows
|
.rows
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|row| {
|
.map(|row| {
|
||||||
|
println!("ROW {:?}", row);
|
||||||
(
|
(
|
||||||
s(&row[0]),
|
s(&row[0]),
|
||||||
FieldInfo {
|
FieldInfo {
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
use core::panic;
|
||||||
|
|
||||||
use cozo::DbInstance;
|
use cozo::DbInstance;
|
||||||
use miette::Result;
|
use miette::Result;
|
||||||
use tantivy::Index;
|
use tantivy::Index;
|
||||||
|
@ -33,9 +35,15 @@ pub async fn test_create_node() -> Result<()> {
|
||||||
serde_json::to_string_pretty(&state.export().await.unwrap()).unwrap()
|
serde_json::to_string_pretty(&state.export().await.unwrap()).unwrap()
|
||||||
);
|
);
|
||||||
|
|
||||||
let node = state.get_node(node_info.node_id).await?;
|
let mut node = state.get_node(&node_info.node_id).await?;
|
||||||
|
|
||||||
println!("node: {:?}", node);
|
println!("node: {:?}", node);
|
||||||
|
|
||||||
|
assert!(node.fields.is_some());
|
||||||
|
|
||||||
|
let fields = node.fields.take().unwrap();
|
||||||
|
|
||||||
|
assert!(fields.contains_key("panorama/journal/page/content"));
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,7 +12,6 @@ mod export;
|
||||||
mod journal;
|
mod journal;
|
||||||
pub mod mail;
|
pub mod mail;
|
||||||
mod node;
|
mod node;
|
||||||
mod query_builder;
|
|
||||||
|
|
||||||
use std::fs;
|
use std::fs;
|
||||||
|
|
||||||
|
|
|
@ -1,2 +0,0 @@
|
||||||
#[derive(Default)]
|
|
||||||
pub struct QueryBuilder {}
|
|
Loading…
Reference in a new issue