This commit is contained in:
Michael Zhang 2024-06-18 14:16:51 -05:00
parent 2d424d763f
commit 21728e6de5
11 changed files with 540 additions and 415 deletions

1
.env Normal file
View file

@ -0,0 +1 @@
DATABASE_URL=sqlite:////Users/michael/Projects/panorama/test.db

3
.gitignore vendored
View file

@ -2,4 +2,5 @@ node_modules
dist dist
target target
.DS_Store .DS_Store
**/export/export.json **/export/export.json
test.db

1
Cargo.lock generated
View file

@ -3291,6 +3291,7 @@ dependencies = [
"tantivy", "tantivy",
"tokio", "tokio",
"uuid", "uuid",
"walkdir",
] ]
[[package]] [[package]]

View file

@ -1,2 +1,2 @@
workspace.resolver = "2" workspace.resolver = "2"
workspace.members = ["crates/*", "app/src-tauri"] workspace.members = ["crates/*", "ui/src-tauri"]

View file

View file

@ -0,0 +1,3 @@
fn main() {
}

View file

@ -17,6 +17,7 @@ sugars = "3.0.1"
tantivy = { version = "0.22.0", features = ["zstd"] } tantivy = { version = "0.22.0", features = ["zstd"] }
tokio = { version = "1.38.0", features = ["full"] } tokio = { version = "1.38.0", features = ["full"] }
uuid = { version = "1.8.0", features = ["v7"] } uuid = { version = "1.8.0", features = ["v7"] }
walkdir = "2.5.0"
[dependencies.async-imap] [dependencies.async-imap]
version = "0.9.7" version = "0.9.7"

View file

@ -1,6 +1,40 @@
CREATE TABLE "node" ( CREATE TABLE node (
id TEXT PRIMARY KEY, node_id TEXT PRIMARY KEY,
type TEXT, node_type TEXT NOT NULL,
updated_at DATETIME DEFAULT NOW(), updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
extra_data JSON extra_data JSON
);
CREATE TABLE node_has_key (
node_id TEXT NOT NULL,
full_key TEXT NOT NULL,
PRIMARY KEY (node_id, full_key)
);
CREATE INDEX node_has_key_idx_node_id ON node_has_key(node_id);
CREATE INDEX node_has_key_idx_full_key ON node_has_key(full_key);
-- App-related tables
CREATE TABLE app (
app_id INTEGER PRIMARY KEY AUTOINCREMENT,
app_name TEXT NOT NULL,
app_version TEXT NOT NULL,
app_version_hash TEXT,
app_description TEXT,
app_homepage TEXT,
app_repository TEXT,
app_license TEXT
);
CREATE TABLE app_table (
app_id INTEGER NOT NULL,
app_table_name TEXT NOT NULL,
db_table_name TEXT NOT NULL
);
CREATE TABLE full_key_to_db_key (
full_key TEXT NOT NULL,
app_id INTEGER NOT NULL,
app_table_name TEXT NOT NULL,
app_table_field TEXT NOT NULL,
is_fts_enabled BOOLEAN NOT NULL DEFAULT FALSE
); );

View file

@ -0,0 +1,46 @@
use std::{
fs,
path::{Path, PathBuf},
};
use miette::{IntoDiagnostic, Result};
use crate::AppState;
impl AppState {
pub async fn install_apps_from_search_paths(&self) -> Result<()> {
let search_paths =
vec![PathBuf::from("/Users/michael/Projects/panorama/apps")];
let mut found = Vec::new();
for path in search_paths {
let read_dir = fs::read_dir(path).into_diagnostic()?;
for dir_entry in read_dir {
let dir_entry = dir_entry.into_diagnostic()?;
let path = dir_entry.path();
let manifest_path = path.join("manifest.yml");
if manifest_path.exists() {
found.push(path);
}
}
}
for path in found {
self.install_app_from_path(path).await?;
}
Ok(())
}
async fn install_app_from_path(&self, path: impl AsRef<Path>) -> Result<()> {
let app_path = path.as_ref();
let manifest_path = app_path.join("manifest.yml");
// Install tables
Ok(())
}
}

View file

@ -1,8 +1,9 @@
pub mod apps;
// pub mod codetrack; // pub mod codetrack;
// pub mod export; // pub mod export;
// pub mod journal; // pub mod journal;
// pub mod mail; // pub mod mail;
// pub mod node; pub mod node;
// pub mod utils; // pub mod utils;
use std::{collections::HashMap, fs, path::Path}; use std::{collections::HashMap, fs, path::Path};
@ -10,8 +11,9 @@ use std::{collections::HashMap, fs, path::Path};
use bimap::BiMap; use bimap::BiMap;
use miette::{Context, IntoDiagnostic, Result}; use miette::{Context, IntoDiagnostic, Result};
use sqlx::{ use sqlx::{
pool::PoolConnection,
sqlite::{SqliteConnectOptions, SqliteJournalMode, SqlitePoolOptions}, sqlite::{SqliteConnectOptions, SqliteJournalMode, SqlitePoolOptions},
SqlitePool, Sqlite, SqliteConnection, SqlitePool,
}; };
use tantivy::{ use tantivy::{
directory::MmapDirectory, directory::MmapDirectory,
@ -69,7 +71,8 @@ impl AppState {
let db_path = panorama_dir.join("db.sqlite"); let db_path = panorama_dir.join("db.sqlite");
let sqlite_connect_options = SqliteConnectOptions::new() let sqlite_connect_options = SqliteConnectOptions::new()
.filename(db_path) .filename(db_path)
.journal_mode(SqliteJournalMode::Wal); .journal_mode(SqliteJournalMode::Wal)
.create_if_missing(true);
let db = SqlitePoolOptions::new() let db = SqlitePoolOptions::new()
.connect_with(sqlite_connect_options) .connect_with(sqlite_connect_options)
.await .await
@ -86,6 +89,10 @@ impl AppState {
Ok(state) Ok(state)
} }
pub async fn conn(&self) -> Result<PoolConnection<Sqlite>> {
self.db.acquire().await.into_diagnostic()
}
async fn init(&self) -> Result<()> { async fn init(&self) -> Result<()> {
// run_migrations(&self.db).await?; // run_migrations(&self.db).await?;
MIGRATOR MIGRATOR

View file

@ -7,17 +7,13 @@ use chrono::{DateTime, Utc};
use itertools::Itertools; use itertools::Itertools;
use miette::{bail, IntoDiagnostic, Result}; use miette::{bail, IntoDiagnostic, Result};
use serde_json::Value; use serde_json::Value;
use tantivy::{ use sqlx::{Acquire, Connection, FromRow};
collector::TopDocs, use tantivy::schema::{OwnedValue, Value as _};
query::QueryParser,
schema::{OwnedValue, Value as _},
Document, TantivyDocument, Term,
};
use uuid::Uuid; use uuid::Uuid;
use crate::{AppState, NodeId}; use crate::{AppState, NodeId};
use super::utils::owned_value_to_json_value; // use super::utils::owned_value_to_json_value;
pub type ExtraData = BTreeMap<String, Value>; pub type ExtraData = BTreeMap<String, Value>;
@ -43,71 +39,98 @@ impl AppState {
/// Get all properties of a node /// Get all properties of a node
pub async fn get_node(&self, node_id: impl AsRef<str>) -> Result<NodeInfo> { pub async fn get_node(&self, node_id: impl AsRef<str>) -> Result<NodeInfo> {
let node_id = node_id.as_ref().to_owned(); let node_id = node_id.as_ref().to_owned();
let tx = self.db.multi_transaction(false); let conn = self.conn().await?;
let result = tx.run_script( #[derive(FromRow)]
" struct FieldMappingRow {
?[key, relation, field_name, type, is_fts_enabled] := full_key: String,
*node_has_key { key, id }, app_id: i64,
*fqkey_to_dbkey { key, relation, field_name, type, is_fts_enabled }, app_table_name: String,
id = $node_id app_table_field: String,
", }
btmap! {"node_id".to_owned() => node_id.to_string().into()},
)?;
let field_mapping = AppState::rows_to_field_mapping(result)?; conn
.transaction(|tx| {
Box::pin(async move {
let result = sqlx::query_as!(
FieldMappingRow,
"
SELECT
node_has_key.full_key, app_id, app_table_name, app_table_field
FROM node_has_key
INNER JOIN full_key_to_db_key
ON node_has_key.full_key = full_key_to_db_key.full_key
WHERE node_id = $1
",
node_id
)
.fetch_all(&mut **tx)
.await
.into_diagnostic()?;
// Group the keys by which relation they're in let field_mapping = result
let result_by_relation = field_mapping .into_iter()
.iter() .map(|row| (row.full_key.clone(), row))
.into_group_map_by(|(_, FieldInfo { relation_name, .. })| relation_name); .collect::<HashMap<_, _>>();
let mut all_relation_queries = vec![]; // Group the keys by which relation they're in
let mut all_relation_constraints = vec![]; let result_by_relation = field_mapping.iter().into_group_map_by(
let mut all_fields = vec![]; |(
let mut field_counter = 0; _,
for (i, (relation, fields)) in result_by_relation.iter().enumerate() { FieldMappingRow {
let constraint_name = format!("c{i}"); app_id,
app_table_name,
..
},
)| (app_id, app_table_name),
);
let mut keys = vec![]; let mut all_relation_queries = vec![];
let mut constraints = vec![]; let mut all_relation_constraints = vec![];
for (key, field_info) in fields.iter() { let mut all_fields = vec![];
let counted_field_name = format!("f{field_counter}"); let mut field_counter = 0;
field_counter += 1; for (i, (relation, fields)) in result_by_relation.iter().enumerate() {
let constraint_name = format!("c{i}");
keys.push(counted_field_name.clone()); let mut keys = vec![];
constraints.push(format!( let mut constraints = vec![];
"{}: {}", for (key, field_info) in fields.iter() {
field_info.relation_field.to_owned(), let counted_field_name = format!("f{field_counter}");
counted_field_name, field_counter += 1;
));
all_fields.push((
counted_field_name,
field_info.relation_field.to_owned(),
key,
))
}
let keys = keys.join(", "); keys.push(counted_field_name.clone());
let constraints = constraints.join(", "); constraints.push(format!(
all_relation_queries.push(format!( "{}: {}",
" field_info.relation_field.to_owned(),
counted_field_name,
));
all_fields.push((
counted_field_name,
field_info.relation_field.to_owned(),
key,
))
}
let keys = keys.join(", ");
let constraints = constraints.join(", ");
all_relation_queries.push(format!(
"
{constraint_name}[{keys}] := {constraint_name}[{keys}] :=
*{relation}{{ node_id, {constraints} }}, *{relation}{{ node_id, {constraints} }},
node_id = $node_id node_id = $node_id
" "
)); ));
all_relation_constraints.push(format!("{constraint_name}[{keys}],")) all_relation_constraints.push(format!("{constraint_name}[{keys}],"))
} }
let all_relation_constraints = all_relation_constraints.join("\n"); let all_relation_constraints = all_relation_constraints.join("\n");
let all_relation_queries = all_relation_queries.join("\n\n"); let all_relation_queries = all_relation_queries.join("\n\n");
let all_field_names = all_fields let all_field_names = all_fields
.iter() .iter()
.map(|(field_name, _, _)| field_name) .map(|(field_name, _, _)| field_name)
.join(", "); .join(", ");
let query = format!( let query = format!(
" "
{all_relation_queries} {all_relation_queries}
?[type, extra_data, created_at, updated_at, {all_field_names}] := ?[type, extra_data, created_at, updated_at, {all_field_names}] :=
@ -115,361 +138,369 @@ impl AppState {
{all_relation_constraints} {all_relation_constraints}
id = $node_id id = $node_id
" "
); );
let result = tx.run_script( let result = tx.run_script(
&query, &query,
btmap! { "node_id".to_owned() => node_id.to_string().into(), }, btmap! { "node_id".to_owned() => node_id.to_string().into(), },
)?; )?;
if result.rows.is_empty() { if result.rows.is_empty() {
bail!("Not found") bail!("Not found")
}
let created_at = DateTime::from_timestamp_millis(
(result.rows[0][2].get_float().unwrap() * 1000.0) as i64,
)
.unwrap();
let updated_at = DateTime::from_timestamp_millis(
(result.rows[0][3].get_float().unwrap() * 1000.0) as i64,
)
.unwrap();
let mut fields = HashMap::new();
for row in result
.rows
.into_iter()
.map(|row| row.into_iter().skip(4).zip(all_fields.iter()))
{
for (value, (_, _, field_name)) in row {
fields.insert(field_name.to_string(), data_value_to_json_value(&value));
}
}
Ok(NodeInfo {
node_id: NodeId(Uuid::from_str(&node_id).unwrap()),
created_at,
updated_at,
fields: Some(fields),
})
}
}
#[derive(Debug)]
pub enum CreateOrUpdate {
Create { r#type: String },
Update { node_id: NodeId },
}
impl AppState {
// TODO: Split this out into create and update
pub async fn create_or_update_node(
&self,
opts: CreateOrUpdate,
extra_data: Option<ExtraData>,
) -> Result<NodeInfo> {
let node_id = match opts {
CreateOrUpdate::Create { .. } => NodeId(Uuid::now_v7()),
CreateOrUpdate::Update { ref node_id } => node_id.clone(),
};
let node_id = node_id.to_string();
let action = match opts {
CreateOrUpdate::Create { .. } => "put",
CreateOrUpdate::Update { .. } => "update",
};
println!("Request: {opts:?} {extra_data:?}");
let tx = self.db.multi_transaction(true);
let (created_at, updated_at) = match opts {
CreateOrUpdate::Create { ref r#type } => {
let node_result = tx.run_script(
"
?[id, type] <- [[$node_id, $type]]
:put node { id, type }
:returning
",
btmap! {
"node_id".to_owned() => DataValue::from(node_id.clone()),
"type".to_owned() => DataValue::from(r#type.to_owned()),
},
)?;
let created_at = DateTime::from_timestamp_millis(
(node_result.rows[0][3].get_float().unwrap() * 1000.0) as i64,
)
.unwrap();
let updated_at = DateTime::from_timestamp_millis(
(node_result.rows[0][4].get_float().unwrap() * 1000.0) as i64,
)
.unwrap();
(created_at, updated_at)
}
CreateOrUpdate::Update { .. } => {
let node_result = tx.run_script(
"
?[id, type, created_at, updated_at] := *node { id, type, created_at, updated_at },
id = $node_id
",
btmap! {
"node_id".to_owned() => DataValue::from(node_id.clone()),
},
)?;
let created_at = DateTime::from_timestamp_millis(
(node_result.rows[0][2].get_float().unwrap() * 1000.0) as i64,
)
.unwrap();
let updated_at = DateTime::from_timestamp_millis(
(node_result.rows[0][3].get_float().unwrap() * 1000.0) as i64,
)
.unwrap();
(created_at, updated_at)
}
};
if let Some(extra_data) = extra_data {
let node_id_field = self
.tantivy_field_map
.get_by_left("node_id")
.unwrap()
.clone();
if !extra_data.is_empty() {
let keys = extra_data.keys().map(|s| s.to_owned()).collect::<Vec<_>>();
let field_mapping =
self.get_rows_for_extra_keys(&tx, keys.as_slice())?;
// Group the keys by which relation they're in
let result_by_relation = field_mapping.iter().into_group_map_by(
|(_, FieldInfo { relation_name, .. })| relation_name,
);
for (relation, fields) in result_by_relation.iter() {
let mut doc = btmap! { node_id_field.clone() => OwnedValue::Str(node_id.to_owned()) };
let fields_mapping = fields
.into_iter()
.map(
|(
key,
FieldInfo {
relation_field,
r#type,
is_fts_enabled,
..
},
)| {
let new_value = extra_data.get(*key).unwrap();
// TODO: Make this more generic
let new_value = match r#type.as_str() {
"int" => DataValue::from(new_value.as_i64().unwrap()),
_ => DataValue::from(new_value.as_str().unwrap()),
};
if *is_fts_enabled {
if let Some(field) = self.tantivy_field_map.get_by_left(*key)
{
doc.insert(
field.clone(),
OwnedValue::Str(new_value.get_str().unwrap().to_owned()),
);
}
}
(relation_field.to_owned(), new_value)
},
)
.collect::<BTreeMap<_, _>>();
let mut writer =
self.tantivy_index.writer(15_000_000).into_diagnostic()?;
let delete_term =
Term::from_field_text(node_id_field.clone(), &node_id);
writer.delete_term(delete_term);
writer.add_document(doc).into_diagnostic()?;
writer.commit().into_diagnostic()?;
drop(writer);
let keys = fields_mapping.keys().collect::<Vec<_>>();
let keys_joined = keys.iter().join(", ");
if !keys.is_empty() {
let query = format!(
"
?[ node_id, {keys_joined} ] <- [$input_data]
:{action} {relation} {{ node_id, {keys_joined} }}
"
);
let mut params = vec![];
params.push(DataValue::from(node_id.clone()));
for key in keys {
params.push(fields_mapping[key].clone());
}
let result = tx.run_script(
&query,
btmap! {
"input_data".to_owned() => DataValue::List(params),
},
);
} }
}
let input = DataValue::List( let created_at = DateTime::from_timestamp_millis(
keys (result.rows[0][2].get_float().unwrap() * 1000.0) as i64,
.iter() )
.map(|s| { .unwrap();
DataValue::List(vec![
DataValue::from(s.to_owned()),
DataValue::from(node_id.clone()),
])
})
.collect_vec(),
);
tx.run_script( let updated_at = DateTime::from_timestamp_millis(
" (result.rows[0][3].get_float().unwrap() * 1000.0) as i64,
?[key, id] <- $input_data )
:put node_has_key { key, id } .unwrap();
",
btmap! {
"input_data".to_owned() => input
},
)?;
}
}
tx.commit()?; let mut fields = HashMap::new();
Ok(NodeInfo { for row in result
node_id: NodeId(Uuid::from_str(&node_id).unwrap()), .rows
created_at,
updated_at,
fields: None,
})
}
pub async fn update_node() {}
pub async fn search_nodes(
&self,
query: impl AsRef<str>,
) -> Result<Vec<(NodeId, Value)>> {
let query = query.as_ref();
let reader = self.tantivy_index.reader().into_diagnostic()?;
let searcher = reader.searcher();
let node_id_field = self
.tantivy_field_map
.get_by_left("node_id")
.unwrap()
.clone();
let journal_page_field = self
.tantivy_field_map
.get_by_left("panorama/journal/page/content")
.unwrap()
.clone();
let mut query_parser =
QueryParser::for_index(&self.tantivy_index, vec![journal_page_field]);
query_parser.set_field_fuzzy(journal_page_field, true, 2, true);
let query = query_parser.parse_query(query).into_diagnostic()?;
let top_docs = searcher
.search(&query, &TopDocs::with_limit(10))
.into_diagnostic()?;
Ok(
top_docs
.into_iter()
.map(|(score, doc_address)| {
let retrieved_doc =
searcher.doc::<TantivyDocument>(doc_address).unwrap();
let node_id = retrieved_doc
.get_first(node_id_field.clone())
.unwrap()
.as_str()
.unwrap();
let all_fields = retrieved_doc.get_sorted_field_values();
let node_id = NodeId(Uuid::from_str(node_id).unwrap());
let fields = all_fields
.into_iter() .into_iter()
.map(|(field, values)| { .map(|row| row.into_iter().skip(4).zip(all_fields.iter()))
( {
self.tantivy_field_map.get_by_right(&field).unwrap(), for (value, (_, _, field_name)) in row {
if values.len() == 1 { fields.insert(
owned_value_to_json_value(values[0]) field_name.to_string(),
} else { data_value_to_json_value(&value),
Value::Array( );
values }
.into_iter() }
.map(owned_value_to_json_value)
.collect_vec(), Ok(NodeInfo {
) node_id: NodeId(Uuid::from_str(&node_id).unwrap()),
}, created_at,
) updated_at,
}) fields: Some(fields),
.collect::<HashMap<_, _>>(); })
(
node_id,
json!({
"score": score,
"fields": fields,
}),
)
}) })
.collect::<Vec<_>>(), })
) .await?;
}
fn get_rows_for_extra_keys( Ok(())
&self,
tx: &MultiTransaction,
keys: &[String],
) -> Result<FieldMapping> {
let result = tx.run_script(
"
?[key, relation, field_name, type, is_fts_enabled] :=
*fqkey_to_dbkey{key, relation, field_name, type, is_fts_enabled},
is_in(key, $keys)
",
btmap! {
"keys".to_owned() => DataValue::List(
keys.into_iter()
.map(|s| DataValue::from(s.as_str()))
.collect::<Vec<_>>()
),
},
)?;
AppState::rows_to_field_mapping(result)
}
fn rows_to_field_mapping(result: NamedRows) -> Result<FieldMapping> {
let s = |s: &DataValue| s.get_str().unwrap().to_owned();
Ok(
result
.rows
.into_iter()
.map(|row| {
(
s(&row[0]),
FieldInfo {
relation_name: s(&row[1]),
relation_field: s(&row[2]),
r#type: s(&row[3]),
is_fts_enabled: row[4].get_bool().unwrap(),
},
)
})
.collect::<HashMap<_, _>>(),
)
} }
} }
// #[derive(Debug)]
// pub enum CreateOrUpdate {
// Create { r#type: String },
// Update { node_id: NodeId },
// }
// impl AppState {
// // TODO: Split this out into create and update
// pub async fn create_or_update_node(
// &self,
// opts: CreateOrUpdate,
// extra_data: Option<ExtraData>,
// ) -> Result<NodeInfo> {
// let node_id = match opts {
// CreateOrUpdate::Create { .. } => NodeId(Uuid::now_v7()),
// CreateOrUpdate::Update { ref node_id } => node_id.clone(),
// };
// let node_id = node_id.to_string();
// let action = match opts {
// CreateOrUpdate::Create { .. } => "put",
// CreateOrUpdate::Update { .. } => "update",
// };
// println!("Request: {opts:?} {extra_data:?}");
// let tx = self.db.multi_transaction(true);
// let (created_at, updated_at) = match opts {
// CreateOrUpdate::Create { ref r#type } => {
// let node_result = tx.run_script(
// "
// ?[id, type] <- [[$node_id, $type]]
// :put node { id, type }
// :returning
// ",
// btmap! {
// "node_id".to_owned() => DataValue::from(node_id.clone()),
// "type".to_owned() => DataValue::from(r#type.to_owned()),
// },
// )?;
// let created_at = DateTime::from_timestamp_millis(
// (node_result.rows[0][3].get_float().unwrap() * 1000.0) as i64,
// )
// .unwrap();
// let updated_at = DateTime::from_timestamp_millis(
// (node_result.rows[0][4].get_float().unwrap() * 1000.0) as i64,
// )
// .unwrap();
// (created_at, updated_at)
// }
// CreateOrUpdate::Update { .. } => {
// let node_result = tx.run_script(
// "
// ?[id, type, created_at, updated_at] := *node { id, type, created_at, updated_at },
// id = $node_id
// ",
// btmap! {
// "node_id".to_owned() => DataValue::from(node_id.clone()),
// },
// )?;
// let created_at = DateTime::from_timestamp_millis(
// (node_result.rows[0][2].get_float().unwrap() * 1000.0) as i64,
// )
// .unwrap();
// let updated_at = DateTime::from_timestamp_millis(
// (node_result.rows[0][3].get_float().unwrap() * 1000.0) as i64,
// )
// .unwrap();
// (created_at, updated_at)
// }
// };
// if let Some(extra_data) = extra_data {
// let node_id_field = self
// .tantivy_field_map
// .get_by_left("node_id")
// .unwrap()
// .clone();
// if !extra_data.is_empty() {
// let keys = extra_data.keys().map(|s| s.to_owned()).collect::<Vec<_>>();
// let field_mapping =
// self.get_rows_for_extra_keys(&tx, keys.as_slice())?;
// // Group the keys by which relation they're in
// let result_by_relation = field_mapping.iter().into_group_map_by(
// |(_, FieldInfo { relation_name, .. })| relation_name,
// );
// for (relation, fields) in result_by_relation.iter() {
// let mut doc = btmap! { node_id_field.clone() => OwnedValue::Str(node_id.to_owned()) };
// let fields_mapping = fields
// .into_iter()
// .map(
// |(
// key,
// FieldInfo {
// relation_field,
// r#type,
// is_fts_enabled,
// ..
// },
// )| {
// let new_value = extra_data.get(*key).unwrap();
// // TODO: Make this more generic
// let new_value = match r#type.as_str() {
// "int" => DataValue::from(new_value.as_i64().unwrap()),
// _ => DataValue::from(new_value.as_str().unwrap()),
// };
// if *is_fts_enabled {
// if let Some(field) = self.tantivy_field_map.get_by_left(*key)
// {
// doc.insert(
// field.clone(),
// OwnedValue::Str(new_value.get_str().unwrap().to_owned()),
// );
// }
// }
// (relation_field.to_owned(), new_value)
// },
// )
// .collect::<BTreeMap<_, _>>();
// let mut writer =
// self.tantivy_index.writer(15_000_000).into_diagnostic()?;
// let delete_term =
// Term::from_field_text(node_id_field.clone(), &node_id);
// writer.delete_term(delete_term);
// writer.add_document(doc).into_diagnostic()?;
// writer.commit().into_diagnostic()?;
// drop(writer);
// let keys = fields_mapping.keys().collect::<Vec<_>>();
// let keys_joined = keys.iter().join(", ");
// if !keys.is_empty() {
// let query = format!(
// "
// ?[ node_id, {keys_joined} ] <- [$input_data]
// :{action} {relation} {{ node_id, {keys_joined} }}
// "
// );
// let mut params = vec![];
// params.push(DataValue::from(node_id.clone()));
// for key in keys {
// params.push(fields_mapping[key].clone());
// }
// let result = tx.run_script(
// &query,
// btmap! {
// "input_data".to_owned() => DataValue::List(params),
// },
// );
// }
// }
// let input = DataValue::List(
// keys
// .iter()
// .map(|s| {
// DataValue::List(vec![
// DataValue::from(s.to_owned()),
// DataValue::from(node_id.clone()),
// ])
// })
// .collect_vec(),
// );
// tx.run_script(
// "
// ?[key, id] <- $input_data
// :put node_has_key { key, id }
// ",
// btmap! {
// "input_data".to_owned() => input
// },
// )?;
// }
// }
// tx.commit()?;
// Ok(NodeInfo {
// node_id: NodeId(Uuid::from_str(&node_id).unwrap()),
// created_at,
// updated_at,
// fields: None,
// })
// }
// pub async fn update_node() {}
// pub async fn search_nodes(
// &self,
// query: impl AsRef<str>,
// ) -> Result<Vec<(NodeId, Value)>> {
// let query = query.as_ref();
// let reader = self.tantivy_index.reader().into_diagnostic()?;
// let searcher = reader.searcher();
// let node_id_field = self
// .tantivy_field_map
// .get_by_left("node_id")
// .unwrap()
// .clone();
// let journal_page_field = self
// .tantivy_field_map
// .get_by_left("panorama/journal/page/content")
// .unwrap()
// .clone();
// let mut query_parser =
// QueryParser::for_index(&self.tantivy_index, vec![journal_page_field]);
// query_parser.set_field_fuzzy(journal_page_field, true, 2, true);
// let query = query_parser.parse_query(query).into_diagnostic()?;
// let top_docs = searcher
// .search(&query, &TopDocs::with_limit(10))
// .into_diagnostic()?;
// Ok(
// top_docs
// .into_iter()
// .map(|(score, doc_address)| {
// let retrieved_doc =
// searcher.doc::<TantivyDocument>(doc_address).unwrap();
// let node_id = retrieved_doc
// .get_first(node_id_field.clone())
// .unwrap()
// .as_str()
// .unwrap();
// let all_fields = retrieved_doc.get_sorted_field_values();
// let node_id = NodeId(Uuid::from_str(node_id).unwrap());
// let fields = all_fields
// .into_iter()
// .map(|(field, values)| {
// (
// self.tantivy_field_map.get_by_right(&field).unwrap(),
// if values.len() == 1 {
// owned_value_to_json_value(values[0])
// } else {
// Value::Array(
// values
// .into_iter()
// .map(owned_value_to_json_value)
// .collect_vec(),
// )
// },
// )
// })
// .collect::<HashMap<_, _>>();
// (
// node_id,
// json!({
// "score": score,
// "fields": fields,
// }),
// )
// })
// .collect::<Vec<_>>(),
// )
// }
// fn get_rows_for_extra_keys(
// &self,
// tx: &MultiTransaction,
// keys: &[String],
// ) -> Result<FieldMapping> {
// let result = tx.run_script(
// "
// ?[key, relation, field_name, type, is_fts_enabled] :=
// *fqkey_to_dbkey{key, relation, field_name, type, is_fts_enabled},
// is_in(key, $keys)
// ",
// btmap! {
// "keys".to_owned() => DataValue::List(
// keys.into_iter()
// .map(|s| DataValue::from(s.as_str()))
// .collect::<Vec<_>>()
// ),
// },
// )?;
// AppState::rows_to_field_mapping(result)
// }
// fn rows_to_field_mapping(result: NamedRows) -> Result<FieldMapping> {
// let s = |s: &DataValue| s.get_str().unwrap().to_owned();
// Ok(
// result
// .rows
// .into_iter()
// .map(|row| {
// (
// s(&row[0]),
// FieldInfo {
// relation_name: s(&row[1]),
// relation_field: s(&row[2]),
// r#type: s(&row[3]),
// is_fts_enabled: row[4].get_bool().unwrap(),
// },
// )
// })
// .collect::<HashMap<_, _>>(),
// )
// }
// }