This commit is contained in:
Michael Zhang 2024-06-18 14:16:51 -05:00
parent 2d424d763f
commit 21728e6de5
11 changed files with 540 additions and 415 deletions

1
.env Normal file
View file

@ -0,0 +1 @@
DATABASE_URL=sqlite:////Users/michael/Projects/panorama/test.db

1
.gitignore vendored
View file

@ -3,3 +3,4 @@ dist
target target
.DS_Store .DS_Store
**/export/export.json **/export/export.json
test.db

1
Cargo.lock generated
View file

@ -3291,6 +3291,7 @@ dependencies = [
"tantivy", "tantivy",
"tokio", "tokio",
"uuid", "uuid",
"walkdir",
] ]
[[package]] [[package]]

View file

@ -1,2 +1,2 @@
workspace.resolver = "2" workspace.resolver = "2"
workspace.members = ["crates/*", "app/src-tauri"] workspace.members = ["crates/*", "ui/src-tauri"]

View file

View file

@ -0,0 +1,3 @@
fn main() {
}

View file

@ -17,6 +17,7 @@ sugars = "3.0.1"
tantivy = { version = "0.22.0", features = ["zstd"] } tantivy = { version = "0.22.0", features = ["zstd"] }
tokio = { version = "1.38.0", features = ["full"] } tokio = { version = "1.38.0", features = ["full"] }
uuid = { version = "1.8.0", features = ["v7"] } uuid = { version = "1.8.0", features = ["v7"] }
walkdir = "2.5.0"
[dependencies.async-imap] [dependencies.async-imap]
version = "0.9.7" version = "0.9.7"

View file

@ -1,6 +1,40 @@
CREATE TABLE "node" ( CREATE TABLE node (
id TEXT PRIMARY KEY, node_id TEXT PRIMARY KEY,
type TEXT, node_type TEXT NOT NULL,
updated_at DATETIME DEFAULT NOW(), updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
extra_data JSON extra_data JSON
); );
CREATE TABLE node_has_key (
node_id TEXT NOT NULL,
full_key TEXT NOT NULL,
PRIMARY KEY (node_id, full_key)
);
CREATE INDEX node_has_key_idx_node_id ON node_has_key(node_id);
CREATE INDEX node_has_key_idx_full_key ON node_has_key(full_key);
-- App-related tables
CREATE TABLE app (
app_id INTEGER PRIMARY KEY AUTOINCREMENT,
app_name TEXT NOT NULL,
app_version TEXT NOT NULL,
app_version_hash TEXT,
app_description TEXT,
app_homepage TEXT,
app_repository TEXT,
app_license TEXT
);
CREATE TABLE app_table (
app_id INTEGER NOT NULL,
app_table_name TEXT NOT NULL,
db_table_name TEXT NOT NULL
);
CREATE TABLE full_key_to_db_key (
full_key TEXT NOT NULL,
app_id INTEGER NOT NULL,
app_table_name TEXT NOT NULL,
app_table_field TEXT NOT NULL,
is_fts_enabled BOOLEAN NOT NULL DEFAULT FALSE
);

View file

@ -0,0 +1,46 @@
use std::{
fs,
path::{Path, PathBuf},
};
use miette::{IntoDiagnostic, Result};
use crate::AppState;
impl AppState {
pub async fn install_apps_from_search_paths(&self) -> Result<()> {
let search_paths =
vec![PathBuf::from("/Users/michael/Projects/panorama/apps")];
let mut found = Vec::new();
for path in search_paths {
let read_dir = fs::read_dir(path).into_diagnostic()?;
for dir_entry in read_dir {
let dir_entry = dir_entry.into_diagnostic()?;
let path = dir_entry.path();
let manifest_path = path.join("manifest.yml");
if manifest_path.exists() {
found.push(path);
}
}
}
for path in found {
self.install_app_from_path(path).await?;
}
Ok(())
}
async fn install_app_from_path(&self, path: impl AsRef<Path>) -> Result<()> {
let app_path = path.as_ref();
let manifest_path = app_path.join("manifest.yml");
// Install tables
Ok(())
}
}

View file

@ -1,8 +1,9 @@
pub mod apps;
// pub mod codetrack; // pub mod codetrack;
// pub mod export; // pub mod export;
// pub mod journal; // pub mod journal;
// pub mod mail; // pub mod mail;
// pub mod node; pub mod node;
// pub mod utils; // pub mod utils;
use std::{collections::HashMap, fs, path::Path}; use std::{collections::HashMap, fs, path::Path};
@ -10,8 +11,9 @@ use std::{collections::HashMap, fs, path::Path};
use bimap::BiMap; use bimap::BiMap;
use miette::{Context, IntoDiagnostic, Result}; use miette::{Context, IntoDiagnostic, Result};
use sqlx::{ use sqlx::{
pool::PoolConnection,
sqlite::{SqliteConnectOptions, SqliteJournalMode, SqlitePoolOptions}, sqlite::{SqliteConnectOptions, SqliteJournalMode, SqlitePoolOptions},
SqlitePool, Sqlite, SqliteConnection, SqlitePool,
}; };
use tantivy::{ use tantivy::{
directory::MmapDirectory, directory::MmapDirectory,
@ -69,7 +71,8 @@ impl AppState {
let db_path = panorama_dir.join("db.sqlite"); let db_path = panorama_dir.join("db.sqlite");
let sqlite_connect_options = SqliteConnectOptions::new() let sqlite_connect_options = SqliteConnectOptions::new()
.filename(db_path) .filename(db_path)
.journal_mode(SqliteJournalMode::Wal); .journal_mode(SqliteJournalMode::Wal)
.create_if_missing(true);
let db = SqlitePoolOptions::new() let db = SqlitePoolOptions::new()
.connect_with(sqlite_connect_options) .connect_with(sqlite_connect_options)
.await .await
@ -86,6 +89,10 @@ impl AppState {
Ok(state) Ok(state)
} }
pub async fn conn(&self) -> Result<PoolConnection<Sqlite>> {
self.db.acquire().await.into_diagnostic()
}
async fn init(&self) -> Result<()> { async fn init(&self) -> Result<()> {
// run_migrations(&self.db).await?; // run_migrations(&self.db).await?;
MIGRATOR MIGRATOR

View file

@ -7,17 +7,13 @@ use chrono::{DateTime, Utc};
use itertools::Itertools; use itertools::Itertools;
use miette::{bail, IntoDiagnostic, Result}; use miette::{bail, IntoDiagnostic, Result};
use serde_json::Value; use serde_json::Value;
use tantivy::{ use sqlx::{Acquire, Connection, FromRow};
collector::TopDocs, use tantivy::schema::{OwnedValue, Value as _};
query::QueryParser,
schema::{OwnedValue, Value as _},
Document, TantivyDocument, Term,
};
use uuid::Uuid; use uuid::Uuid;
use crate::{AppState, NodeId}; use crate::{AppState, NodeId};
use super::utils::owned_value_to_json_value; // use super::utils::owned_value_to_json_value;
pub type ExtraData = BTreeMap<String, Value>; pub type ExtraData = BTreeMap<String, Value>;
@ -43,24 +39,51 @@ impl AppState {
/// Get all properties of a node /// Get all properties of a node
pub async fn get_node(&self, node_id: impl AsRef<str>) -> Result<NodeInfo> { pub async fn get_node(&self, node_id: impl AsRef<str>) -> Result<NodeInfo> {
let node_id = node_id.as_ref().to_owned(); let node_id = node_id.as_ref().to_owned();
let tx = self.db.multi_transaction(false); let conn = self.conn().await?;
let result = tx.run_script( #[derive(FromRow)]
struct FieldMappingRow {
full_key: String,
app_id: i64,
app_table_name: String,
app_table_field: String,
}
conn
.transaction(|tx| {
Box::pin(async move {
let result = sqlx::query_as!(
FieldMappingRow,
" "
?[key, relation, field_name, type, is_fts_enabled] := SELECT
*node_has_key { key, id }, node_has_key.full_key, app_id, app_table_name, app_table_field
*fqkey_to_dbkey { key, relation, field_name, type, is_fts_enabled }, FROM node_has_key
id = $node_id INNER JOIN full_key_to_db_key
ON node_has_key.full_key = full_key_to_db_key.full_key
WHERE node_id = $1
", ",
btmap! {"node_id".to_owned() => node_id.to_string().into()}, node_id
)?; )
.fetch_all(&mut **tx)
.await
.into_diagnostic()?;
let field_mapping = AppState::rows_to_field_mapping(result)?; let field_mapping = result
.into_iter()
.map(|row| (row.full_key.clone(), row))
.collect::<HashMap<_, _>>();
// Group the keys by which relation they're in // Group the keys by which relation they're in
let result_by_relation = field_mapping let result_by_relation = field_mapping.iter().into_group_map_by(
.iter() |(
.into_group_map_by(|(_, FieldInfo { relation_name, .. })| relation_name); _,
FieldMappingRow {
app_id,
app_table_name,
..
},
)| (app_id, app_table_name),
);
let mut all_relation_queries = vec![]; let mut all_relation_queries = vec![];
let mut all_relation_constraints = vec![]; let mut all_relation_constraints = vec![];
@ -144,7 +167,10 @@ impl AppState {
.map(|row| row.into_iter().skip(4).zip(all_fields.iter())) .map(|row| row.into_iter().skip(4).zip(all_fields.iter()))
{ {
for (value, (_, _, field_name)) in row { for (value, (_, _, field_name)) in row {
fields.insert(field_name.to_string(), data_value_to_json_value(&value)); fields.insert(
field_name.to_string(),
data_value_to_json_value(&value),
);
} }
} }
@ -154,322 +180,327 @@ impl AppState {
updated_at, updated_at,
fields: Some(fields), fields: Some(fields),
}) })
})
})
.await?;
Ok(())
} }
} }
#[derive(Debug)] // #[derive(Debug)]
pub enum CreateOrUpdate { // pub enum CreateOrUpdate {
Create { r#type: String }, // Create { r#type: String },
Update { node_id: NodeId }, // Update { node_id: NodeId },
} // }
impl AppState { // impl AppState {
// TODO: Split this out into create and update // // TODO: Split this out into create and update
pub async fn create_or_update_node( // pub async fn create_or_update_node(
&self, // &self,
opts: CreateOrUpdate, // opts: CreateOrUpdate,
extra_data: Option<ExtraData>, // extra_data: Option<ExtraData>,
) -> Result<NodeInfo> { // ) -> Result<NodeInfo> {
let node_id = match opts { // let node_id = match opts {
CreateOrUpdate::Create { .. } => NodeId(Uuid::now_v7()), // CreateOrUpdate::Create { .. } => NodeId(Uuid::now_v7()),
CreateOrUpdate::Update { ref node_id } => node_id.clone(), // CreateOrUpdate::Update { ref node_id } => node_id.clone(),
}; // };
let node_id = node_id.to_string(); // let node_id = node_id.to_string();
let action = match opts { // let action = match opts {
CreateOrUpdate::Create { .. } => "put", // CreateOrUpdate::Create { .. } => "put",
CreateOrUpdate::Update { .. } => "update", // CreateOrUpdate::Update { .. } => "update",
}; // };
println!("Request: {opts:?} {extra_data:?}"); // println!("Request: {opts:?} {extra_data:?}");
let tx = self.db.multi_transaction(true); // let tx = self.db.multi_transaction(true);
let (created_at, updated_at) = match opts { // let (created_at, updated_at) = match opts {
CreateOrUpdate::Create { ref r#type } => { // CreateOrUpdate::Create { ref r#type } => {
let node_result = tx.run_script( // let node_result = tx.run_script(
" // "
?[id, type] <- [[$node_id, $type]] // ?[id, type] <- [[$node_id, $type]]
:put node { id, type } // :put node { id, type }
:returning // :returning
", // ",
btmap! { // btmap! {
"node_id".to_owned() => DataValue::from(node_id.clone()), // "node_id".to_owned() => DataValue::from(node_id.clone()),
"type".to_owned() => DataValue::from(r#type.to_owned()), // "type".to_owned() => DataValue::from(r#type.to_owned()),
}, // },
)?; // )?;
let created_at = DateTime::from_timestamp_millis( // let created_at = DateTime::from_timestamp_millis(
(node_result.rows[0][3].get_float().unwrap() * 1000.0) as i64, // (node_result.rows[0][3].get_float().unwrap() * 1000.0) as i64,
) // )
.unwrap(); // .unwrap();
let updated_at = DateTime::from_timestamp_millis( // let updated_at = DateTime::from_timestamp_millis(
(node_result.rows[0][4].get_float().unwrap() * 1000.0) as i64, // (node_result.rows[0][4].get_float().unwrap() * 1000.0) as i64,
) // )
.unwrap(); // .unwrap();
(created_at, updated_at) // (created_at, updated_at)
} // }
CreateOrUpdate::Update { .. } => { // CreateOrUpdate::Update { .. } => {
let node_result = tx.run_script( // let node_result = tx.run_script(
" // "
?[id, type, created_at, updated_at] := *node { id, type, created_at, updated_at }, // ?[id, type, created_at, updated_at] := *node { id, type, created_at, updated_at },
id = $node_id // id = $node_id
", // ",
btmap! { // btmap! {
"node_id".to_owned() => DataValue::from(node_id.clone()), // "node_id".to_owned() => DataValue::from(node_id.clone()),
}, // },
)?; // )?;
let created_at = DateTime::from_timestamp_millis( // let created_at = DateTime::from_timestamp_millis(
(node_result.rows[0][2].get_float().unwrap() * 1000.0) as i64, // (node_result.rows[0][2].get_float().unwrap() * 1000.0) as i64,
) // )
.unwrap(); // .unwrap();
let updated_at = DateTime::from_timestamp_millis( // let updated_at = DateTime::from_timestamp_millis(
(node_result.rows[0][3].get_float().unwrap() * 1000.0) as i64, // (node_result.rows[0][3].get_float().unwrap() * 1000.0) as i64,
) // )
.unwrap(); // .unwrap();
(created_at, updated_at) // (created_at, updated_at)
} // }
}; // };
if let Some(extra_data) = extra_data { // if let Some(extra_data) = extra_data {
let node_id_field = self // let node_id_field = self
.tantivy_field_map // .tantivy_field_map
.get_by_left("node_id") // .get_by_left("node_id")
.unwrap() // .unwrap()
.clone(); // .clone();
if !extra_data.is_empty() { // if !extra_data.is_empty() {
let keys = extra_data.keys().map(|s| s.to_owned()).collect::<Vec<_>>(); // let keys = extra_data.keys().map(|s| s.to_owned()).collect::<Vec<_>>();
let field_mapping = // let field_mapping =
self.get_rows_for_extra_keys(&tx, keys.as_slice())?; // self.get_rows_for_extra_keys(&tx, keys.as_slice())?;
// Group the keys by which relation they're in // // Group the keys by which relation they're in
let result_by_relation = field_mapping.iter().into_group_map_by( // let result_by_relation = field_mapping.iter().into_group_map_by(
|(_, FieldInfo { relation_name, .. })| relation_name, // |(_, FieldInfo { relation_name, .. })| relation_name,
); // );
for (relation, fields) in result_by_relation.iter() { // for (relation, fields) in result_by_relation.iter() {
let mut doc = btmap! { node_id_field.clone() => OwnedValue::Str(node_id.to_owned()) }; // let mut doc = btmap! { node_id_field.clone() => OwnedValue::Str(node_id.to_owned()) };
let fields_mapping = fields // let fields_mapping = fields
.into_iter() // .into_iter()
.map( // .map(
|( // |(
key, // key,
FieldInfo { // FieldInfo {
relation_field, // relation_field,
r#type, // r#type,
is_fts_enabled, // is_fts_enabled,
.. // ..
}, // },
)| { // )| {
let new_value = extra_data.get(*key).unwrap(); // let new_value = extra_data.get(*key).unwrap();
// TODO: Make this more generic // // TODO: Make this more generic
let new_value = match r#type.as_str() { // let new_value = match r#type.as_str() {
"int" => DataValue::from(new_value.as_i64().unwrap()), // "int" => DataValue::from(new_value.as_i64().unwrap()),
_ => DataValue::from(new_value.as_str().unwrap()), // _ => DataValue::from(new_value.as_str().unwrap()),
}; // };
if *is_fts_enabled { // if *is_fts_enabled {
if let Some(field) = self.tantivy_field_map.get_by_left(*key) // if let Some(field) = self.tantivy_field_map.get_by_left(*key)
{ // {
doc.insert( // doc.insert(
field.clone(), // field.clone(),
OwnedValue::Str(new_value.get_str().unwrap().to_owned()), // OwnedValue::Str(new_value.get_str().unwrap().to_owned()),
); // );
} // }
} // }
(relation_field.to_owned(), new_value) // (relation_field.to_owned(), new_value)
}, // },
) // )
.collect::<BTreeMap<_, _>>(); // .collect::<BTreeMap<_, _>>();
let mut writer = // let mut writer =
self.tantivy_index.writer(15_000_000).into_diagnostic()?; // self.tantivy_index.writer(15_000_000).into_diagnostic()?;
let delete_term = // let delete_term =
Term::from_field_text(node_id_field.clone(), &node_id); // Term::from_field_text(node_id_field.clone(), &node_id);
writer.delete_term(delete_term); // writer.delete_term(delete_term);
writer.add_document(doc).into_diagnostic()?; // writer.add_document(doc).into_diagnostic()?;
writer.commit().into_diagnostic()?; // writer.commit().into_diagnostic()?;
drop(writer); // drop(writer);
let keys = fields_mapping.keys().collect::<Vec<_>>(); // let keys = fields_mapping.keys().collect::<Vec<_>>();
let keys_joined = keys.iter().join(", "); // let keys_joined = keys.iter().join(", ");
if !keys.is_empty() { // if !keys.is_empty() {
let query = format!( // let query = format!(
" // "
?[ node_id, {keys_joined} ] <- [$input_data] // ?[ node_id, {keys_joined} ] <- [$input_data]
:{action} {relation} {{ node_id, {keys_joined} }} // :{action} {relation} {{ node_id, {keys_joined} }}
" // "
); // );
let mut params = vec![]; // let mut params = vec![];
params.push(DataValue::from(node_id.clone())); // params.push(DataValue::from(node_id.clone()));
for key in keys { // for key in keys {
params.push(fields_mapping[key].clone()); // params.push(fields_mapping[key].clone());
} // }
let result = tx.run_script( // let result = tx.run_script(
&query, // &query,
btmap! { // btmap! {
"input_data".to_owned() => DataValue::List(params), // "input_data".to_owned() => DataValue::List(params),
}, // },
); // );
} // }
} // }
let input = DataValue::List( // let input = DataValue::List(
keys // keys
.iter() // .iter()
.map(|s| { // .map(|s| {
DataValue::List(vec![ // DataValue::List(vec![
DataValue::from(s.to_owned()), // DataValue::from(s.to_owned()),
DataValue::from(node_id.clone()), // DataValue::from(node_id.clone()),
]) // ])
}) // })
.collect_vec(), // .collect_vec(),
); // );
tx.run_script( // tx.run_script(
" // "
?[key, id] <- $input_data // ?[key, id] <- $input_data
:put node_has_key { key, id } // :put node_has_key { key, id }
", // ",
btmap! { // btmap! {
"input_data".to_owned() => input // "input_data".to_owned() => input
}, // },
)?; // )?;
} // }
} // }
tx.commit()?; // tx.commit()?;
Ok(NodeInfo { // Ok(NodeInfo {
node_id: NodeId(Uuid::from_str(&node_id).unwrap()), // node_id: NodeId(Uuid::from_str(&node_id).unwrap()),
created_at, // created_at,
updated_at, // updated_at,
fields: None, // fields: None,
}) // })
} // }
pub async fn update_node() {} // pub async fn update_node() {}
pub async fn search_nodes( // pub async fn search_nodes(
&self, // &self,
query: impl AsRef<str>, // query: impl AsRef<str>,
) -> Result<Vec<(NodeId, Value)>> { // ) -> Result<Vec<(NodeId, Value)>> {
let query = query.as_ref(); // let query = query.as_ref();
let reader = self.tantivy_index.reader().into_diagnostic()?; // let reader = self.tantivy_index.reader().into_diagnostic()?;
let searcher = reader.searcher(); // let searcher = reader.searcher();
let node_id_field = self // let node_id_field = self
.tantivy_field_map // .tantivy_field_map
.get_by_left("node_id") // .get_by_left("node_id")
.unwrap() // .unwrap()
.clone(); // .clone();
let journal_page_field = self // let journal_page_field = self
.tantivy_field_map // .tantivy_field_map
.get_by_left("panorama/journal/page/content") // .get_by_left("panorama/journal/page/content")
.unwrap() // .unwrap()
.clone(); // .clone();
let mut query_parser = // let mut query_parser =
QueryParser::for_index(&self.tantivy_index, vec![journal_page_field]); // QueryParser::for_index(&self.tantivy_index, vec![journal_page_field]);
query_parser.set_field_fuzzy(journal_page_field, true, 2, true); // query_parser.set_field_fuzzy(journal_page_field, true, 2, true);
let query = query_parser.parse_query(query).into_diagnostic()?; // let query = query_parser.parse_query(query).into_diagnostic()?;
let top_docs = searcher // let top_docs = searcher
.search(&query, &TopDocs::with_limit(10)) // .search(&query, &TopDocs::with_limit(10))
.into_diagnostic()?; // .into_diagnostic()?;
Ok( // Ok(
top_docs // top_docs
.into_iter() // .into_iter()
.map(|(score, doc_address)| { // .map(|(score, doc_address)| {
let retrieved_doc = // let retrieved_doc =
searcher.doc::<TantivyDocument>(doc_address).unwrap(); // searcher.doc::<TantivyDocument>(doc_address).unwrap();
let node_id = retrieved_doc // let node_id = retrieved_doc
.get_first(node_id_field.clone()) // .get_first(node_id_field.clone())
.unwrap() // .unwrap()
.as_str() // .as_str()
.unwrap(); // .unwrap();
let all_fields = retrieved_doc.get_sorted_field_values(); // let all_fields = retrieved_doc.get_sorted_field_values();
let node_id = NodeId(Uuid::from_str(node_id).unwrap()); // let node_id = NodeId(Uuid::from_str(node_id).unwrap());
let fields = all_fields // let fields = all_fields
.into_iter() // .into_iter()
.map(|(field, values)| { // .map(|(field, values)| {
( // (
self.tantivy_field_map.get_by_right(&field).unwrap(), // self.tantivy_field_map.get_by_right(&field).unwrap(),
if values.len() == 1 { // if values.len() == 1 {
owned_value_to_json_value(values[0]) // owned_value_to_json_value(values[0])
} else { // } else {
Value::Array( // Value::Array(
values // values
.into_iter() // .into_iter()
.map(owned_value_to_json_value) // .map(owned_value_to_json_value)
.collect_vec(), // .collect_vec(),
) // )
}, // },
) // )
}) // })
.collect::<HashMap<_, _>>(); // .collect::<HashMap<_, _>>();
( // (
node_id, // node_id,
json!({ // json!({
"score": score, // "score": score,
"fields": fields, // "fields": fields,
}), // }),
) // )
}) // })
.collect::<Vec<_>>(), // .collect::<Vec<_>>(),
) // )
} // }
fn get_rows_for_extra_keys( // fn get_rows_for_extra_keys(
&self, // &self,
tx: &MultiTransaction, // tx: &MultiTransaction,
keys: &[String], // keys: &[String],
) -> Result<FieldMapping> { // ) -> Result<FieldMapping> {
let result = tx.run_script( // let result = tx.run_script(
" // "
?[key, relation, field_name, type, is_fts_enabled] := // ?[key, relation, field_name, type, is_fts_enabled] :=
*fqkey_to_dbkey{key, relation, field_name, type, is_fts_enabled}, // *fqkey_to_dbkey{key, relation, field_name, type, is_fts_enabled},
is_in(key, $keys) // is_in(key, $keys)
", // ",
btmap! { // btmap! {
"keys".to_owned() => DataValue::List( // "keys".to_owned() => DataValue::List(
keys.into_iter() // keys.into_iter()
.map(|s| DataValue::from(s.as_str())) // .map(|s| DataValue::from(s.as_str()))
.collect::<Vec<_>>() // .collect::<Vec<_>>()
), // ),
}, // },
)?; // )?;
AppState::rows_to_field_mapping(result) // AppState::rows_to_field_mapping(result)
} // }
fn rows_to_field_mapping(result: NamedRows) -> Result<FieldMapping> { // fn rows_to_field_mapping(result: NamedRows) -> Result<FieldMapping> {
let s = |s: &DataValue| s.get_str().unwrap().to_owned(); // let s = |s: &DataValue| s.get_str().unwrap().to_owned();
Ok( // Ok(
result // result
.rows // .rows
.into_iter() // .into_iter()
.map(|row| { // .map(|row| {
( // (
s(&row[0]), // s(&row[0]),
FieldInfo { // FieldInfo {
relation_name: s(&row[1]), // relation_name: s(&row[1]),
relation_field: s(&row[2]), // relation_field: s(&row[2]),
r#type: s(&row[3]), // r#type: s(&row[3]),
is_fts_enabled: row[4].get_bool().unwrap(), // is_fts_enabled: row[4].get_bool().unwrap(),
}, // },
) // )
}) // })
.collect::<HashMap<_, _>>(), // .collect::<HashMap<_, _>>(),
) // )
} // }
} // }