This commit is contained in:
Michael Zhang 2024-06-18 15:20:42 -05:00
parent 21728e6de5
commit f771a7d20f
4 changed files with 377 additions and 343 deletions

View file

@ -25,13 +25,13 @@ CREATE TABLE app (
app_license TEXT app_license TEXT
); );
CREATE TABLE app_table ( CREATE TABLE app_table_mapping (
app_id INTEGER NOT NULL, app_id INTEGER NOT NULL,
app_table_name TEXT NOT NULL, app_table_name TEXT NOT NULL,
db_table_name TEXT NOT NULL db_table_name TEXT NOT NULL
); );
CREATE TABLE full_key_to_db_key ( CREATE TABLE key_mapping (
full_key TEXT NOT NULL, full_key TEXT NOT NULL,
app_id INTEGER NOT NULL, app_id INTEGER NOT NULL,
app_table_name TEXT NOT NULL, app_table_name TEXT NOT NULL,

View file

@ -4,6 +4,7 @@ pub mod apps;
// pub mod journal; // pub mod journal;
// pub mod mail; // pub mod mail;
pub mod node; pub mod node;
pub mod node_raw;
// pub mod utils; // pub mod utils;
use std::{collections::HashMap, fs, path::Path}; use std::{collections::HashMap, fs, path::Path};

View file

@ -1,17 +1,17 @@
use std::{ use std::collections::{BTreeMap, HashMap};
collections::{BTreeMap, HashMap},
str::FromStr,
};
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use itertools::Itertools; use itertools::Itertools;
use miette::{bail, IntoDiagnostic, Result}; use miette::{bail, Context, Error, IntoDiagnostic, Report, Result};
use serde_json::Value; use serde_json::Value;
use sqlx::{Acquire, Connection, FromRow}; use sqlx::{
query::Query, sqlite::SqliteArguments, Acquire, Connection, Executor,
FromRow, QueryBuilder, Sqlite,
};
use tantivy::schema::{OwnedValue, Value as _}; use tantivy::schema::{OwnedValue, Value as _};
use uuid::Uuid; use uuid::Uuid;
use crate::{AppState, NodeId}; use crate::{state::node_raw::FieldMappingRow, AppState, NodeId};
// use super::utils::owned_value_to_json_value; // use super::utils::owned_value_to_json_value;
@ -25,177 +25,167 @@ pub struct NodeInfo {
pub fields: Option<HashMap<String, Value>>, pub fields: Option<HashMap<String, Value>>,
} }
#[derive(Debug)]
pub struct FieldInfo {
pub relation_name: String,
pub relation_field: String,
pub r#type: String,
pub is_fts_enabled: bool,
}
pub type FieldMapping = HashMap<String, FieldInfo>;
impl AppState { impl AppState {
/// Get all properties of a node /// Get all properties of a node
pub async fn get_node(&self, node_id: impl AsRef<str>) -> Result<NodeInfo> { pub async fn get_node(&self, node_id: impl AsRef<str>) -> Result<NodeInfo> {
let node_id = node_id.as_ref().to_owned(); let node_id = node_id.as_ref().to_owned();
let conn = self.conn().await?; let mut conn = self.conn().await?;
#[derive(FromRow)]
struct FieldMappingRow {
full_key: String,
app_id: i64,
app_table_name: String,
app_table_field: String,
}
conn conn
.transaction(|tx| { .transaction::<_, _, sqlx::Error>(|tx| {
Box::pin(async move { Box::pin(async move {
let result = sqlx::query_as!( let node_id = node_id.clone();
FieldMappingRow, let field_mapping =
" AppState::get_related_field_list_for_node_id(&mut **tx, &node_id)
SELECT .await?;
node_has_key.full_key, app_id, app_table_name, app_table_field
FROM node_has_key
INNER JOIN full_key_to_db_key
ON node_has_key.full_key = full_key_to_db_key.full_key
WHERE node_id = $1
",
node_id
)
.fetch_all(&mut **tx)
.await
.into_diagnostic()?;
let field_mapping = result
.into_iter()
.map(|row| (row.full_key.clone(), row))
.collect::<HashMap<_, _>>();
// Group the keys by which relation they're in // Group the keys by which relation they're in
let result_by_relation = field_mapping.iter().into_group_map_by( let fields_by_table = field_mapping.iter().into_group_map_by(
|( |FieldMappingRow {
_,
FieldMappingRow {
app_id, app_id,
app_table_name, app_table_name,
.. ..
}, }| (app_id, app_table_name),
)| (app_id, app_table_name),
); );
let mut all_relation_queries = vec![]; // Run the query that grabs all of the relevant fields, and coalesce
let mut all_relation_constraints = vec![]; // the fields back
let mut all_fields = vec![]; let related_fields =
let mut field_counter = 0; AppState::query_related_fields(&mut **tx, &fields_by_table).await?;
for (i, (relation, fields)) in result_by_relation.iter().enumerate() {
let constraint_name = format!("c{i}");
let mut keys = vec![]; println!("Related fields: {:?}", related_fields);
let mut constraints = vec![];
for (key, field_info) in fields.iter() {
let counted_field_name = format!("f{field_counter}");
field_counter += 1;
keys.push(counted_field_name.clone()); // let created_at = DateTime::from_timestamp_millis(
constraints.push(format!( // (result.rows[0][2].get_float().unwrap() * 1000.0) as i64,
"{}: {}", // )
field_info.relation_field.to_owned(), // .unwrap();
counted_field_name,
));
all_fields.push((
counted_field_name,
field_info.relation_field.to_owned(),
key,
))
}
let keys = keys.join(", "); // let updated_at = DateTime::from_timestamp_millis(
let constraints = constraints.join(", "); // (result.rows[0][3].get_float().unwrap() * 1000.0) as i64,
all_relation_queries.push(format!( // )
" // .unwrap();
{constraint_name}[{keys}] :=
*{relation}{{ node_id, {constraints} }},
node_id = $node_id
"
));
all_relation_constraints.push(format!("{constraint_name}[{keys}],"))
}
let all_relation_constraints = all_relation_constraints.join("\n"); // let mut fields = HashMap::new();
let all_relation_queries = all_relation_queries.join("\n\n");
let all_field_names = all_fields
.iter()
.map(|(field_name, _, _)| field_name)
.join(", ");
let query = format!(
"
{all_relation_queries}
?[type, extra_data, created_at, updated_at, {all_field_names}] := // for row in result
*node {{ id, type, created_at, updated_at, extra_data }}, // .rows
{all_relation_constraints} // .into_iter()
id = $node_id // .map(|row| row.into_iter().skip(4).zip(all_fields.iter()))
" // {
); // for (value, (_, _, field_name)) in row {
// fields.insert(
let result = tx.run_script( // field_name.to_string(),
&query, // data_value_to_json_value(&value),
btmap! { "node_id".to_owned() => node_id.to_string().into(), }, // );
)?; // }
if result.rows.is_empty() {
bail!("Not found")
}
let created_at = DateTime::from_timestamp_millis(
(result.rows[0][2].get_float().unwrap() * 1000.0) as i64,
)
.unwrap();
let updated_at = DateTime::from_timestamp_millis(
(result.rows[0][3].get_float().unwrap() * 1000.0) as i64,
)
.unwrap();
let mut fields = HashMap::new();
for row in result
.rows
.into_iter()
.map(|row| row.into_iter().skip(4).zip(all_fields.iter()))
{
for (value, (_, _, field_name)) in row {
fields.insert(
field_name.to_string(),
data_value_to_json_value(&value),
);
}
}
Ok(NodeInfo {
node_id: NodeId(Uuid::from_str(&node_id).unwrap()),
created_at,
updated_at,
fields: Some(fields),
})
})
})
.await?;
Ok(())
}
}
// #[derive(Debug)]
// pub enum CreateOrUpdate {
// Create { r#type: String },
// Update { node_id: NodeId },
// } // }
// impl AppState { todo!()
// // TODO: Split this out into create and update
// Ok(NodeInfo {
// node_id: NodeId(Uuid::from_str(&node_id).unwrap()),
// created_at,
// updated_at,
// fields: Some(fields),
// })
})
})
.await
.into_diagnostic()?;
todo!()
// Ok(())
}
async fn query_related_fields<'e, 'c: 'e, X>(
x: X,
fields_by_table: &HashMap<(&i64, &String), Vec<&FieldMappingRow>>,
) -> sqlx::Result<HashMap<String, Value>>
where
X: 'e + Executor<'c, Database = Sqlite>,
{
let mut query = QueryBuilder::new("");
let mut mapping = HashMap::new();
let mut ctr = 0;
let mut selected_fields = vec![];
for ((app_id, app_table_name), fields) in fields_by_table.iter() {
let table_gen_name = format!("c{ctr}");
ctr += 1;
let mut keys = vec![];
for field_info in fields.iter() {
let field_gen_name = format!("f{ctr}");
ctr += 1;
mapping.insert(&field_info.full_key, field_gen_name.clone());
keys.push(field_gen_name.clone());
selected_fields.push(format!(
"{}.{} as {}",
table_gen_name, field_info.app_table_field, field_gen_name
));
// constraints.push(format!(
// "{}: {}",
// field_info.relation_field.to_owned(),
// field_gen_name,
// ));
// all_fields.push((
// field_gen_name,
// field_info.relation_field.to_owned(),
// key,
// ))
}
// let keys = keys.join(", ");
// let constraints = constraints.join(", ");
// all_relation_queries.push(format!(
// "
// {table_gen_name}[{keys}] :=
// *{relation}{{ node_id, {constraints} }},
// node_id = $node_id
// "
// ));
// all_relation_constraints.push(format!("{table_gen_name}[{keys}],"))
}
query.push("SELECT");
query.push(selected_fields.join(", "));
query.push("FROM");
println!("Query: {:?}", query.sql());
// let all_relation_constraints = all_relation_constraints.join("\n");
// let all_relation_queries = all_relation_queries.join("\n\n");
// let all_field_names = all_fields
// .iter()
// .map(|(field_name, _, _)| field_name)
// .join(", ");
// let _query = format!(
// "
// {all_relation_queries}
// ?[type, extra_data, created_at, updated_at, {all_field_names}] :=
// *node {{ id, type, created_at, updated_at, extra_data }},
// {all_relation_constraints}
// id = $node_id
// "
// );
let rows = query.build().fetch_all(x).await.into_diagnostic();
todo!()
}
}
#[derive(Debug)]
pub enum CreateOrUpdate {
Create { r#type: String },
Update { node_id: NodeId },
}
impl AppState {
// TODO: Split this out into create and update
// pub async fn create_or_update_node( // pub async fn create_or_update_node(
// &self, // &self,
// opts: CreateOrUpdate, // opts: CreateOrUpdate,
@ -214,8 +204,6 @@ impl AppState {
// println!("Request: {opts:?} {extra_data:?}"); // println!("Request: {opts:?} {extra_data:?}");
// let tx = self.db.multi_transaction(true);
// let (created_at, updated_at) = match opts { // let (created_at, updated_at) = match opts {
// CreateOrUpdate::Create { ref r#type } => { // CreateOrUpdate::Create { ref r#type } => {
// let node_result = tx.run_script( // let node_result = tx.run_script(
@ -385,6 +373,9 @@ impl AppState {
// fields: None, // fields: None,
// }) // })
// } // }
}
// impl AppState {
// pub async fn update_node() {} // pub async fn update_node() {}

View file

@ -0,0 +1,42 @@
use sqlx::{Executor, FromRow, Sqlite};
use crate::AppState;
#[derive(FromRow)]
pub struct FieldMappingRow {
pub(crate) full_key: String,
pub(crate) app_id: i64,
pub(crate) app_table_name: String,
pub(crate) app_table_field: String,
pub(crate) db_table_name: Option<String>,
}
impl AppState {
pub async fn get_related_field_list_for_node_id<'e, 'c: 'e, X>(
x: X,
node_id: &str,
) -> sqlx::Result<Vec<FieldMappingRow>>
where
X: 'e + Executor<'c, Database = Sqlite>,
{
sqlx::query_as!(
FieldMappingRow,
"
SELECT
node_has_key.full_key, key_mapping.app_id,
key_mapping.app_table_name, app_table_field,
app_table_mapping.db_table_name
FROM node_has_key
INNER JOIN key_mapping
ON node_has_key.full_key = key_mapping.full_key
INNER JOIN app_table_mapping
ON key_mapping.app_id = app_table_mapping.app_id
AND key_mapping.app_table_name = app_table_mapping.app_table_name
WHERE node_id = $1
",
node_id
)
.fetch_all(x)
.await
}
}