Compare commits
No commits in common. "2d424d763fdab99f8bd56af6e08f8ac5425d6e5e" and "e0b7ebedacb5874e93c467593414c71b1321d36d" have entirely different histories.
2d424d763f
...
e0b7ebedac
1216
Cargo.lock
generated
0
ui/.gitignore → app/.gitignore
vendored
|
@ -1,5 +1,5 @@
|
||||||
{
|
{
|
||||||
"name": "panorama",
|
"name": "panorama-app",
|
||||||
"version": "0.1.0",
|
"version": "0.1.0",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
Before Width: | Height: | Size: 2.5 KiB After Width: | Height: | Size: 2.5 KiB |
Before Width: | Height: | Size: 1.5 KiB After Width: | Height: | Size: 1.5 KiB |
Before Width: | Height: | Size: 5.9 KiB After Width: | Height: | Size: 5.9 KiB |
Before Width: | Height: | Size: 17 KiB After Width: | Height: | Size: 17 KiB |
Before Width: | Height: | Size: 1 KiB After Width: | Height: | Size: 1 KiB |
Before Width: | Height: | Size: 4.5 KiB After Width: | Height: | Size: 4.5 KiB |
Before Width: | Height: | Size: 6.8 KiB After Width: | Height: | Size: 6.8 KiB |
Before Width: | Height: | Size: 6.9 KiB After Width: | Height: | Size: 6.9 KiB |
Before Width: | Height: | Size: 20 KiB After Width: | Height: | Size: 20 KiB |
Before Width: | Height: | Size: 965 B After Width: | Height: | Size: 965 B |
Before Width: | Height: | Size: 22 KiB After Width: | Height: | Size: 22 KiB |
Before Width: | Height: | Size: 1.4 KiB After Width: | Height: | Size: 1.4 KiB |
Before Width: | Height: | Size: 2.6 KiB After Width: | Height: | Size: 2.6 KiB |
Before Width: | Height: | Size: 3.4 KiB After Width: | Height: | Size: 3.4 KiB |
Before Width: | Height: | Size: 1.6 KiB After Width: | Height: | Size: 1.6 KiB |
Before Width: | Height: | Size: 24 KiB After Width: | Height: | Size: 24 KiB |
Before Width: | Height: | Size: 50 KiB After Width: | Height: | Size: 50 KiB |
Before Width: | Height: | Size: 4 KiB After Width: | Height: | Size: 4 KiB |
|
@ -1,18 +1,20 @@
|
||||||
import styles from "./SearchBar.module.scss";
|
import styles from "./SearchBar.module.scss";
|
||||||
import {
|
import {
|
||||||
|
FloatingFocusManager,
|
||||||
FloatingOverlay,
|
FloatingOverlay,
|
||||||
FloatingPortal,
|
FloatingPortal,
|
||||||
autoUpdate,
|
autoUpdate,
|
||||||
offset,
|
offset,
|
||||||
|
useClick,
|
||||||
useDismiss,
|
useDismiss,
|
||||||
useFloating,
|
useFloating,
|
||||||
useFocus,
|
useFocus,
|
||||||
useInteractions,
|
useInteractions,
|
||||||
} from "@floating-ui/react";
|
} from "@floating-ui/react";
|
||||||
import { useCallback, useEffect, useState } from "react";
|
import { useDebounce } from "use-debounce";
|
||||||
|
import { useEffect, useState } from "react";
|
||||||
import { atom, useAtom, useSetAtom } from "jotai";
|
import { atom, useAtom, useSetAtom } from "jotai";
|
||||||
import { useNodeControls } from "../App";
|
import { useNodeControls } from "../App";
|
||||||
import { useDebounce, useDebouncedCallback } from "use-debounce";
|
|
||||||
|
|
||||||
const searchQueryAtom = atom("");
|
const searchQueryAtom = atom("");
|
||||||
const showMenuAtom = atom(false);
|
const showMenuAtom = atom(false);
|
||||||
|
@ -35,7 +37,8 @@ export default function SearchBar() {
|
||||||
useDismiss(context),
|
useDismiss(context),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const performSearch = useCallback(() => {
|
useEffect(() => {
|
||||||
|
setSearchResults([]);
|
||||||
const trimmed = searchQuery.trim();
|
const trimmed = searchQuery.trim();
|
||||||
if (trimmed === "") return;
|
if (trimmed === "") return;
|
||||||
|
|
||||||
|
@ -60,11 +63,7 @@ export default function SearchBar() {
|
||||||
onFocus={() => setShowMenu(true)}
|
onFocus={() => setShowMenu(true)}
|
||||||
ref={refs.setReference}
|
ref={refs.setReference}
|
||||||
value={searchQuery}
|
value={searchQuery}
|
||||||
onChange={(evt) => {
|
onChange={(evt) => setSearchQuery(evt.target.value)}
|
||||||
setSearchQuery(evt.target.value);
|
|
||||||
if (evt.target.value) performSearch();
|
|
||||||
else setSearchResults([]);
|
|
||||||
}}
|
|
||||||
{...getReferenceProps()}
|
{...getReferenceProps()}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
|
@ -7,15 +7,18 @@ import remarkMath from "remark-math";
|
||||||
import rehypeKatex from "rehype-katex";
|
import rehypeKatex from "rehype-katex";
|
||||||
import { parse as parseDate, format as formatDate } from "date-fns";
|
import { parse as parseDate, format as formatDate } from "date-fns";
|
||||||
import { useDebounce } from "use-debounce";
|
import { useDebounce } from "use-debounce";
|
||||||
import {
|
|
||||||
JOURNAL_PAGE_CONTENT_FIELD_NAME,
|
const JOURNAL_PAGE_CONTENT_FIELD_NAME = "panorama/journal/page/content";
|
||||||
JOURNAL_PAGE_TITLE_FIELD_NAME,
|
const JOURNAL_PAGE_TITLE_FIELD_NAME = "panorama/journal/page/title";
|
||||||
NodeInfo,
|
|
||||||
} from "../../lib/data";
|
|
||||||
|
|
||||||
export interface JournalPageProps {
|
export interface JournalPageProps {
|
||||||
id: string;
|
id: string;
|
||||||
data: NodeInfo;
|
data: {
|
||||||
|
day?: string;
|
||||||
|
title?: string;
|
||||||
|
content: string;
|
||||||
|
fields: object;
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export default function JournalPage({ id, data }: JournalPageProps) {
|
export default function JournalPage({ id, data }: JournalPageProps) {
|
0
ui/src/vite-env.d.ts → app/src/vite-env.d.ts
vendored
|
@ -7,12 +7,12 @@ edition = "2021"
|
||||||
backoff = { version = "0.4.0", features = ["tokio"] }
|
backoff = { version = "0.4.0", features = ["tokio"] }
|
||||||
bimap = "0.6.3"
|
bimap = "0.6.3"
|
||||||
chrono = { version = "0.4.38", features = ["serde"] }
|
chrono = { version = "0.4.38", features = ["serde"] }
|
||||||
|
cozo = { version = "0.7.6", features = ["storage-rocksdb"] }
|
||||||
futures = "0.3.30"
|
futures = "0.3.30"
|
||||||
itertools = "0.13.0"
|
itertools = "0.13.0"
|
||||||
miette = { version = "5.5.0", features = ["fancy", "backtrace"] }
|
miette = { version = "5.5.0", features = ["fancy", "backtrace"] }
|
||||||
serde = { version = "1.0.203", features = ["derive"] }
|
serde = { version = "1.0.203", features = ["derive"] }
|
||||||
serde_json = "1.0.117"
|
serde_json = "1.0.117"
|
||||||
sqlx = { version = "0.7.4", features = ["runtime-tokio", "tls-rustls", "macros", "sqlite", "uuid", "chrono", "regexp"] }
|
|
||||||
sugars = "3.0.1"
|
sugars = "3.0.1"
|
||||||
tantivy = { version = "0.22.0", features = ["zstd"] }
|
tantivy = { version = "0.22.0", features = ["zstd"] }
|
||||||
tokio = { version = "1.38.0", features = ["full"] }
|
tokio = { version = "1.38.0", features = ["full"] }
|
||||||
|
|
|
@ -1,3 +0,0 @@
|
||||||
fn main() {
|
|
||||||
println!("cargo:rerun-if-changed=migrations");
|
|
||||||
}
|
|
|
@ -1,6 +0,0 @@
|
||||||
CREATE TABLE "node" (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
type TEXT,
|
|
||||||
updated_at DATETIME DEFAULT NOW(),
|
|
||||||
extra_data JSON
|
|
||||||
);
|
|
|
@ -8,8 +8,7 @@ extern crate sugars;
|
||||||
pub mod migrations;
|
pub mod migrations;
|
||||||
pub mod state;
|
pub mod state;
|
||||||
|
|
||||||
// pub mod mail;
|
pub mod mail;
|
||||||
pub mod messaging;
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests;
|
mod tests;
|
||||||
|
|
||||||
|
|
|
@ -1,291 +1,160 @@
|
||||||
use std::{
|
use std::{collections::HashMap, time::Duration};
|
||||||
collections::{HashMap, HashSet},
|
|
||||||
time::Duration,
|
|
||||||
};
|
|
||||||
|
|
||||||
use async_imap::Session;
|
|
||||||
use backoff::{exponential::ExponentialBackoff, SystemClock};
|
use backoff::{exponential::ExponentialBackoff, SystemClock};
|
||||||
|
use cozo::{DataValue, JsonData, ScriptMutability};
|
||||||
use futures::TryStreamExt;
|
use futures::TryStreamExt;
|
||||||
use itertools::Itertools;
|
use miette::{IntoDiagnostic, Result};
|
||||||
use miette::{Context, IntoDiagnostic, Result};
|
|
||||||
use tokio::{net::TcpStream, time::sleep};
|
use tokio::{net::TcpStream, time::sleep};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::{mail, AppState};
|
use crate::AppState;
|
||||||
|
|
||||||
pub struct MailWorker {
|
pub async fn mail_loop(state: AppState) -> Result<()> {
|
||||||
state: AppState,
|
backoff::future::retry(
|
||||||
|
ExponentialBackoff::<SystemClock>::default(),
|
||||||
|
|| async {
|
||||||
|
mail_loop_inner(&state).await?;
|
||||||
|
// For now, just sleep 30 seconds and then fetch again
|
||||||
|
// TODO: Run a bunch of connections at once and do IDLE over them (if possible)
|
||||||
|
sleep(Duration::from_secs(30)).await;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MailWorker {
|
async fn mail_loop_inner(state: &AppState) -> Result<()> {
|
||||||
pub fn new(state: AppState) -> MailWorker {
|
// Fetch the mail configs
|
||||||
MailWorker { state }
|
let configs = state.fetch_mail_configs()?;
|
||||||
|
if configs.len() == 0 {
|
||||||
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn mail_loop(self) -> Result<()> {
|
// TODO: Do all configs instead of just the first
|
||||||
loop {
|
let config = &configs[0];
|
||||||
let mut policy = ExponentialBackoff::<SystemClock>::default();
|
|
||||||
policy.current_interval = Duration::from_secs(5);
|
|
||||||
policy.initial_interval = Duration::from_secs(5);
|
|
||||||
|
|
||||||
backoff::future::retry(policy, || async {
|
let stream =
|
||||||
match self.mail_loop_inner().await {
|
TcpStream::connect((config.imap_hostname.as_str(), config.imap_port))
|
||||||
Ok(_) => {}
|
|
||||||
Err(err) => {
|
|
||||||
eprintln!("Mail error: {:?}", err);
|
|
||||||
Err(err)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// For now, just sleep 30 seconds and then fetch again
|
|
||||||
// TODO: Run a bunch of connections at once and do IDLE over them (if possible)
|
|
||||||
sleep(Duration::from_secs(30)).await;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
})
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn mail_loop_inner(&self) -> Result<()> {
|
|
||||||
// Fetch the mail configs
|
|
||||||
let configs = self.state.fetch_mail_configs()?;
|
|
||||||
if configs.is_empty() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: Do all configs instead of just the first
|
|
||||||
let config = &configs[0];
|
|
||||||
|
|
||||||
let stream =
|
|
||||||
TcpStream::connect((config.imap_hostname.as_str(), config.imap_port))
|
|
||||||
.await
|
|
||||||
.into_diagnostic()?;
|
|
||||||
|
|
||||||
let client = async_imap::Client::new(stream);
|
|
||||||
let mut session = client
|
|
||||||
.login(&config.imap_username, &config.imap_password)
|
|
||||||
.await
|
|
||||||
.map_err(|(err, _)| err)
|
|
||||||
.into_diagnostic()?;
|
|
||||||
|
|
||||||
let all_mailbox_ids = self
|
|
||||||
.fetch_and_store_all_mailboxes(config.node_id.to_string(), &mut session)
|
|
||||||
.await
|
|
||||||
.context("Could not fetch mailboxes")?;
|
|
||||||
|
|
||||||
self
|
|
||||||
.fetch_all_mail_from_single_mailbox(
|
|
||||||
&mut session,
|
|
||||||
&all_mailbox_ids,
|
|
||||||
config.node_id.to_string(),
|
|
||||||
"INBOX",
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.context("Could not fetch mail from INBOX")?;
|
|
||||||
|
|
||||||
session.logout().await.into_diagnostic()?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn fetch_and_store_all_mailboxes(
|
|
||||||
&self,
|
|
||||||
config_node_id: String,
|
|
||||||
session: &mut Session<TcpStream>,
|
|
||||||
) -> Result<HashMap<String, String>> {
|
|
||||||
// println!("Session: {:?}", session);
|
|
||||||
let mailboxes = session
|
|
||||||
.list(None, Some("*"))
|
|
||||||
.await
|
|
||||||
.into_diagnostic()?
|
|
||||||
.try_collect::<Vec<_>>()
|
|
||||||
.await
|
.await
|
||||||
.into_diagnostic()?;
|
.into_diagnostic()?;
|
||||||
|
|
||||||
let mut all_mailboxes = HashMap::new();
|
let client = async_imap::Client::new(stream);
|
||||||
|
let mut session = client
|
||||||
|
.login(&config.imap_username, &config.imap_password)
|
||||||
|
.await
|
||||||
|
.map_err(|(err, _)| err)
|
||||||
|
.into_diagnostic()?;
|
||||||
|
|
||||||
// TODO: Make this more efficient by using bulk in query
|
// println!("Session: {:?}", session);
|
||||||
|
let mailboxes = session
|
||||||
|
.list(None, Some("*"))
|
||||||
|
.await
|
||||||
|
.into_diagnostic()?
|
||||||
|
.try_collect::<Vec<_>>()
|
||||||
|
.await
|
||||||
|
.into_diagnostic()?;
|
||||||
|
let mailbox_names =
|
||||||
|
mailboxes.iter().map(|name| name.name()).collect::<Vec<_>>();
|
||||||
|
println!("mailboxes: {mailbox_names:?}");
|
||||||
|
|
||||||
for mailbox in mailboxes {
|
// Get the mailbox with INBOX
|
||||||
let tx = self.state.db.multi_transaction(true);
|
let inbox_node_id = {
|
||||||
|
let result = state.db.run_script("
|
||||||
|
?[node_id] :=
|
||||||
|
*mailbox{node_id, account_node_id, mailbox_name},
|
||||||
|
account_node_id = $account_node_id,
|
||||||
|
mailbox_name = 'INBOX'
|
||||||
|
", btmap! {"account_node_id".to_owned()=>DataValue::from(config.node_id.to_string())}, ScriptMutability::Immutable)?;
|
||||||
|
|
||||||
let result = tx.run_script(
|
if result.rows.len() == 0 {
|
||||||
"
|
let new_node_id = Uuid::now_v7();
|
||||||
?[node_id] :=
|
let new_node_id = new_node_id.to_string();
|
||||||
*mailbox{node_id, account_node_id, mailbox_name},
|
state.db.run_script("
|
||||||
account_node_id = $account_node_id,
|
?[node_id, account_node_id, mailbox_name] <-
|
||||||
mailbox_name = $mailbox_name,
|
[[$new_node_id, $account_node_id, 'INBOX']]
|
||||||
",
|
:put mailbox { node_id, account_node_id, mailbox_name }
|
||||||
btmap! {
|
",
|
||||||
"account_node_id".to_owned()=>DataValue::from(config_node_id.clone()),
|
btmap! {
|
||||||
"mailbox_name".to_owned()=>DataValue::from(mailbox.name().to_string()),
|
"new_node_id".to_owned() => DataValue::from(new_node_id.clone()),
|
||||||
},
|
"account_node_id".to_owned() => DataValue::from(config.node_id.to_string()),
|
||||||
)?;
|
},
|
||||||
|
ScriptMutability::Mutable)?;
|
||||||
let node_id = if result.rows.len() == 0 {
|
new_node_id
|
||||||
let new_node_id = Uuid::now_v7();
|
} else {
|
||||||
let new_node_id = new_node_id.to_string();
|
result.rows[0][0].get_str().unwrap().to_owned()
|
||||||
let extra_data = json!({
|
|
||||||
"name": mailbox.name(),
|
|
||||||
});
|
|
||||||
tx.run_script("
|
|
||||||
?[node_id, account_node_id, mailbox_name, extra_data] <-
|
|
||||||
[[$new_node_id, $account_node_id, $mailbox_name, $extra_data]]
|
|
||||||
:put mailbox { node_id, account_node_id, mailbox_name, extra_data }
|
|
||||||
",
|
|
||||||
btmap! {
|
|
||||||
"new_node_id".to_owned() => DataValue::from(new_node_id.clone()),
|
|
||||||
"account_node_id".to_owned() => DataValue::from(config_node_id.clone()),
|
|
||||||
"mailbox_name".to_owned()=>DataValue::from(mailbox.name().to_string()),
|
|
||||||
"extra_data".to_owned()=>DataValue::Json(JsonData(extra_data)),
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
new_node_id
|
|
||||||
} else {
|
|
||||||
result.rows[0][0].get_str().unwrap().to_owned()
|
|
||||||
};
|
|
||||||
|
|
||||||
tx.commit()?;
|
|
||||||
|
|
||||||
all_mailboxes.insert(mailbox.name().to_owned(), node_id);
|
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
println!("INBOX: {:?}", inbox_node_id);
|
||||||
|
|
||||||
// println!("All mailboxes: {:?}", all_mailboxes);
|
let inbox = session.select("INBOX").await.into_diagnostic()?;
|
||||||
|
println!("last unseen: {:?}", inbox.unseen);
|
||||||
|
|
||||||
Ok(all_mailboxes)
|
let messages = session
|
||||||
}
|
.fetch(
|
||||||
|
"1:4",
|
||||||
|
"(FLAGS ENVELOPE BODY[HEADER] BODY[TEXT] INTERNALDATE)",
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.into_diagnostic()?
|
||||||
|
.try_collect::<Vec<_>>()
|
||||||
|
.await
|
||||||
|
.into_diagnostic()?;
|
||||||
|
println!(
|
||||||
|
"messages {:?}",
|
||||||
|
messages.iter().map(|f| f.body()).collect::<Vec<_>>()
|
||||||
|
);
|
||||||
|
|
||||||
async fn fetch_all_mail_from_single_mailbox(
|
let input_data = DataValue::List(
|
||||||
&self,
|
messages
|
||||||
session: &mut Session<TcpStream>,
|
|
||||||
all_mailbox_ids: &HashMap<String, String>,
|
|
||||||
config_node_id: String,
|
|
||||||
mailbox_name: impl AsRef<str>,
|
|
||||||
) -> Result<()> {
|
|
||||||
let mailbox_name = mailbox_name.as_ref();
|
|
||||||
let mailbox = session.select(mailbox_name).await.into_diagnostic()?;
|
|
||||||
let mailbox_node_id = all_mailbox_ids.get(mailbox_name).unwrap();
|
|
||||||
|
|
||||||
let extra_data = json!({
|
|
||||||
"uid_validity": mailbox.uid_validity,
|
|
||||||
"last_seen": mailbox.unseen,
|
|
||||||
});
|
|
||||||
|
|
||||||
// TODO: Validate uid validity here
|
|
||||||
|
|
||||||
let all_uids = session
|
|
||||||
.uid_search("ALL")
|
|
||||||
.await
|
|
||||||
.into_diagnostic()
|
|
||||||
.context("Could not fetch all UIDs")?;
|
|
||||||
|
|
||||||
println!("All UIDs ({}): {:?}", all_uids.len(), all_uids);
|
|
||||||
|
|
||||||
let messages = session
|
|
||||||
.uid_fetch(
|
|
||||||
all_uids.iter().join(","),
|
|
||||||
"(FLAGS ENVELOPE BODY[HEADER] BODY[TEXT] INTERNALDATE)",
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.into_diagnostic()?
|
|
||||||
.try_collect::<Vec<_>>()
|
|
||||||
.await
|
|
||||||
.into_diagnostic()
|
|
||||||
.context("Could not fetch messages")?;
|
|
||||||
println!(
|
|
||||||
"messages {:?}",
|
|
||||||
messages.iter().map(|f| f.body()).collect::<Vec<_>>()
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut unique_message_ids = HashSet::new();
|
|
||||||
let data: Vec<_> = messages
|
|
||||||
.iter()
|
.iter()
|
||||||
.map(|msg| {
|
.map(|msg| {
|
||||||
let message_node_id = Uuid::now_v7();
|
let message_id = Uuid::now_v7();
|
||||||
let headers =
|
let headers =
|
||||||
String::from_utf8(msg.header().unwrap().to_vec()).unwrap();
|
String::from_utf8(msg.header().unwrap().to_vec()).unwrap();
|
||||||
let headers = headers
|
let headers = headers
|
||||||
.split("\r\n")
|
.split("\r\n")
|
||||||
.filter_map(|s| {
|
.filter_map(|s| {
|
||||||
// This is really bad lmao
|
|
||||||
let p = s.split(": ").collect::<Vec<_>>();
|
let p = s.split(": ").collect::<Vec<_>>();
|
||||||
if p.len() < 2 {
|
if p.len() < 2 {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
Some((p[0], p[1..].join(": ")))
|
Some((p[0], p[1]))
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.collect::<HashMap<_, _>>();
|
.collect::<HashMap<_, _>>();
|
||||||
|
|
||||||
let message_id = headers
|
|
||||||
.get("Message-ID")
|
|
||||||
.map(|s| (*s).to_owned())
|
|
||||||
.unwrap_or(message_node_id.to_string());
|
|
||||||
unique_message_ids.insert(message_id.clone());
|
|
||||||
|
|
||||||
DataValue::List(vec![
|
DataValue::List(vec![
|
||||||
DataValue::from(message_node_id.to_string()),
|
DataValue::from(message_id.to_string()),
|
||||||
DataValue::from(config_node_id.to_string()),
|
DataValue::from(config.node_id.to_string()),
|
||||||
DataValue::from(mailbox_node_id.clone()),
|
DataValue::from(inbox_node_id.clone()),
|
||||||
DataValue::from(
|
DataValue::from(
|
||||||
headers
|
headers
|
||||||
.get("Subject")
|
.get("Subject")
|
||||||
.map(|s| (*s).to_owned())
|
.map(|s| (*s).to_owned())
|
||||||
.unwrap_or("Subject".to_owned()),
|
.unwrap_or("Subject".to_owned()),
|
||||||
),
|
),
|
||||||
DataValue::Json(JsonData(serde_json::to_value(&headers).unwrap())),
|
DataValue::Json(JsonData(serde_json::to_value(headers).unwrap())),
|
||||||
DataValue::Bytes(msg.text().unwrap().to_vec()),
|
DataValue::Bytes(msg.text().unwrap().to_vec()),
|
||||||
DataValue::from(msg.internal_date().unwrap().to_rfc3339()),
|
DataValue::from(msg.internal_date().unwrap().to_rfc3339()),
|
||||||
DataValue::from(message_id),
|
|
||||||
])
|
])
|
||||||
})
|
})
|
||||||
.collect();
|
.collect(),
|
||||||
|
);
|
||||||
|
|
||||||
println!("Adding {} messages to database...", data.len());
|
state.db.run_script(
|
||||||
let input_data = DataValue::List(data);
|
|
||||||
|
|
||||||
// TODO: Can this be one query?
|
|
||||||
let tx = self.state.db.multi_transaction(true);
|
|
||||||
|
|
||||||
let unique_message_ids_data_value = DataValue::List(
|
|
||||||
unique_message_ids
|
|
||||||
.into_iter()
|
|
||||||
.map(|s| DataValue::from(s))
|
|
||||||
.collect_vec(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let existing_ids = tx.run_script(
|
|
||||||
"
|
"
|
||||||
?[node_id] := *message { node_id, message_id },
|
?[node_id, account_node_id, mailbox_node_id, subject, headers, body, internal_date] <- $input_data
|
||||||
is_in(message_id, $message_ids)
|
:put message { node_id, account_node_id, mailbox_node_id, subject, headers, body, internal_date }
|
||||||
",
|
",
|
||||||
btmap! { "message_ids".to_owned() => unique_message_ids_data_value },
|
btmap! {
|
||||||
|
"input_data".to_owned() => input_data,
|
||||||
|
},
|
||||||
|
ScriptMutability::Mutable,
|
||||||
)?;
|
)?;
|
||||||
println!("Existing ids: {:?}", existing_ids);
|
|
||||||
|
|
||||||
self
|
session.logout().await.into_diagnostic()?;
|
||||||
.state
|
|
||||||
.db
|
|
||||||
.run_script(
|
|
||||||
"
|
|
||||||
?[
|
|
||||||
node_id, account_node_id, mailbox_node_id, subject, headers, body,
|
|
||||||
internal_date, message_id
|
|
||||||
] <- $input_data
|
|
||||||
:put message {
|
|
||||||
node_id, account_node_id, mailbox_node_id, subject, headers, body,
|
|
||||||
internal_date, message_id
|
|
||||||
}
|
|
||||||
",
|
|
||||||
btmap! {
|
|
||||||
"input_data".to_owned() => input_data,
|
|
||||||
},
|
|
||||||
ScriptMutability::Mutable,
|
|
||||||
)
|
|
||||||
.context("Could not add message to database")?;
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +0,0 @@
|
||||||
//! Panorama uses an internal messaging system to pass content around
|
|
||||||
//!
|
|
||||||
//! This implementation is dead simple, just passes all messages and filters on the other end
|
|
||||||
pub struct Messaging {}
|
|
|
@ -1,200 +1,196 @@
|
||||||
|
use cozo::DbInstance;
|
||||||
use miette::{IntoDiagnostic, Result};
|
use miette::{IntoDiagnostic, Result};
|
||||||
use sqlx::migrate::Migrator;
|
|
||||||
|
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use crate::ensure_ok;
|
use crate::ensure_ok;
|
||||||
|
|
||||||
pub static MIGRATOR: Migrator = sqlx::migrate!();
|
pub async fn run_migrations(db: &DbInstance) -> Result<()> {
|
||||||
|
let migration_status = check_migration_status(db).await?;
|
||||||
|
println!("migration status: {:?}", migration_status);
|
||||||
|
|
||||||
// pub async fn run_migrations(db: &DbInstance) -> Result<()> {
|
let migrations: Vec<Box<dyn for<'a> Fn(&'a DbInstance) -> Result<()>>> =
|
||||||
// let migration_status = check_migration_status(db).await?;
|
vec![Box::new(no_op), Box::new(migration_01)];
|
||||||
// println!("migration status: {:?}", migration_status);
|
|
||||||
|
|
||||||
// let migrations: Vec<Box<dyn for<'a> Fn(&'a DbInstance) -> Result<()>>> =
|
if let MigrationStatus::NoMigrations = migration_status {
|
||||||
// vec![Box::new(no_op), Box::new(migration_01)];
|
let result = db.run_script_str(
|
||||||
|
"
|
||||||
|
{ :create migrations { yeah: Int default 0 => version: Int default 0 } }
|
||||||
|
{
|
||||||
|
?[yeah, version] <- [[0, 0]]
|
||||||
|
:put migrations { yeah, version }
|
||||||
|
}
|
||||||
|
",
|
||||||
|
"",
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
ensure_ok(&result)?;
|
||||||
|
}
|
||||||
|
|
||||||
// if let MigrationStatus::NoMigrations = migration_status {
|
let start_at_migration = match migration_status {
|
||||||
// let result = db.run_script_str(
|
MigrationStatus::NoMigrations => 0,
|
||||||
// "
|
MigrationStatus::HasVersion(n) => n,
|
||||||
// { :create migrations { yeah: Int default 0 => version: Int default 0 } }
|
};
|
||||||
// {
|
let migrations_to_run = migrations
|
||||||
// ?[yeah, version] <- [[0, 0]]
|
.iter()
|
||||||
// :put migrations { yeah, version }
|
.enumerate()
|
||||||
// }
|
.skip(start_at_migration as usize + 1);
|
||||||
// ",
|
// println!("running {} migrations...", migrations_to_run.len());
|
||||||
// "",
|
|
||||||
// false,
|
|
||||||
// );
|
|
||||||
// ensure_ok(&result)?;
|
|
||||||
// }
|
|
||||||
|
|
||||||
// let start_at_migration = match migration_status {
|
//TODO: This should all be done in a transaction
|
||||||
// MigrationStatus::NoMigrations => 0,
|
for (idx, migration) in migrations_to_run {
|
||||||
// MigrationStatus::HasVersion(n) => n,
|
println!("running migration {idx}...");
|
||||||
// };
|
|
||||||
// let migrations_to_run = migrations
|
|
||||||
// .iter()
|
|
||||||
// .enumerate()
|
|
||||||
// .skip(start_at_migration as usize + 1);
|
|
||||||
// // println!("running {} migrations...", migrations_to_run.len());
|
|
||||||
|
|
||||||
// //TODO: This should all be done in a transaction
|
migration(db)?;
|
||||||
// for (idx, migration) in migrations_to_run {
|
|
||||||
// println!("running migration {idx}...");
|
|
||||||
|
|
||||||
// migration(db)?;
|
let result = db.run_script_str(
|
||||||
|
"
|
||||||
|
?[yeah, version] <- [[0, $version]]
|
||||||
|
:put migrations { yeah => version }
|
||||||
|
",
|
||||||
|
&format!("{{\"version\":{}}}", idx),
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
|
||||||
// let result = db.run_script_str(
|
ensure_ok(&result)?;
|
||||||
// "
|
|
||||||
// ?[yeah, version] <- [[0, $version]]
|
|
||||||
// :put migrations { yeah => version }
|
|
||||||
// ",
|
|
||||||
// &format!("{{\"version\":{}}}", idx),
|
|
||||||
// false,
|
|
||||||
// );
|
|
||||||
|
|
||||||
// ensure_ok(&result)?;
|
println!("succeeded migration {idx}!");
|
||||||
|
}
|
||||||
|
|
||||||
// println!("succeeded migration {idx}!");
|
Ok(())
|
||||||
// }
|
}
|
||||||
|
|
||||||
// Ok(())
|
#[derive(Debug)]
|
||||||
// }
|
enum MigrationStatus {
|
||||||
|
NoMigrations,
|
||||||
|
HasVersion(u64),
|
||||||
|
}
|
||||||
|
|
||||||
// #[derive(Debug)]
|
async fn check_migration_status(db: &DbInstance) -> Result<MigrationStatus> {
|
||||||
// enum MigrationStatus {
|
let status = db.run_script_str(
|
||||||
// NoMigrations,
|
"
|
||||||
// HasVersion(u64),
|
?[yeah, version] := *migrations[yeah, version]
|
||||||
// }
|
",
|
||||||
|
"",
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
println!("Status: {}", status);
|
||||||
|
|
||||||
// async fn check_migration_status(db: &DbInstance) -> Result<MigrationStatus> {
|
let status: Value = serde_json::from_str(&status).into_diagnostic()?;
|
||||||
// let status = db.run_script_str(
|
let status = status.as_object().unwrap();
|
||||||
// "
|
let ok = status.get("ok").unwrap().as_bool().unwrap_or(false);
|
||||||
// ?[yeah, version] := *migrations[yeah, version]
|
if !ok {
|
||||||
// ",
|
let status_code = status.get("code").unwrap().as_str().unwrap();
|
||||||
// "",
|
if status_code == "query::relation_not_found" {
|
||||||
// true,
|
return Ok(MigrationStatus::NoMigrations);
|
||||||
// );
|
}
|
||||||
// println!("Status: {}", status);
|
}
|
||||||
|
|
||||||
// let status: Value = serde_json::from_str(&status).into_diagnostic()?;
|
let rows = status.get("rows").unwrap().as_array().unwrap();
|
||||||
// let status = status.as_object().unwrap();
|
let row = rows[0].as_array().unwrap();
|
||||||
// let ok = status.get("ok").unwrap().as_bool().unwrap_or(false);
|
let version = row[1].as_number().unwrap().as_u64().unwrap();
|
||||||
// if !ok {
|
println!("row: {row:?}");
|
||||||
// let status_code = status.get("code").unwrap().as_str().unwrap();
|
|
||||||
// if status_code == "query::relation_not_found" {
|
|
||||||
// return Ok(MigrationStatus::NoMigrations);
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
// let rows = status.get("rows").unwrap().as_array().unwrap();
|
Ok(MigrationStatus::HasVersion(version))
|
||||||
// let row = rows[0].as_array().unwrap();
|
}
|
||||||
// let version = row[1].as_number().unwrap().as_u64().unwrap();
|
|
||||||
// println!("row: {row:?}");
|
|
||||||
|
|
||||||
// Ok(MigrationStatus::HasVersion(version))
|
fn no_op(_: &DbInstance) -> Result<()> {
|
||||||
// }
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
// fn no_op(_: &DbInstance) -> Result<()> {
|
fn migration_01(db: &DbInstance) -> Result<()> {
|
||||||
// Ok(())
|
let result = db.run_script_str(
|
||||||
// }
|
"
|
||||||
|
# Primary node type
|
||||||
|
{
|
||||||
|
:create node {
|
||||||
|
id: String
|
||||||
|
=>
|
||||||
|
type: String,
|
||||||
|
created_at: Float default now(),
|
||||||
|
updated_at: Float default now(),
|
||||||
|
extra_data: Json default {},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// fn migration_01(db: &DbInstance) -> Result<()> {
|
# Inverse mappings for easy querying
|
||||||
// let result = db.run_script_str(
|
{ :create node_has_key { key: String => id: String } }
|
||||||
// "
|
{ ::index create node_has_key:inverse { id } }
|
||||||
// # Primary node type
|
{ :create node_managed_by_app { node_id: String => app: String } }
|
||||||
// {
|
{ :create node_refers_to { node_id: String => other_node_id: String } }
|
||||||
// :create node {
|
{
|
||||||
// id: String
|
:create fqkey_to_dbkey {
|
||||||
// =>
|
key: String
|
||||||
// type: String,
|
=>
|
||||||
// created_at: Float default now(),
|
relation: String,
|
||||||
// updated_at: Float default now(),
|
field_name: String,
|
||||||
// extra_data: Json default {},
|
type: String,
|
||||||
// }
|
is_fts_enabled: Bool,
|
||||||
// }
|
}
|
||||||
|
}
|
||||||
|
{
|
||||||
|
?[key, relation, field_name, type, is_fts_enabled] <- [
|
||||||
|
['panorama/journal/page/day', 'journal_day', 'day', 'string', false],
|
||||||
|
['panorama/journal/page/title', 'journal', 'title', 'string', true],
|
||||||
|
['panorama/journal/page/content', 'journal', 'content', 'string', true],
|
||||||
|
['panorama/mail/config/imap_hostname', 'mail_config', 'imap_hostname', 'string', false],
|
||||||
|
['panorama/mail/config/imap_port', 'mail_config', 'imap_port', 'int', false],
|
||||||
|
['panorama/mail/config/imap_username', 'mail_config', 'imap_username', 'string', false],
|
||||||
|
['panorama/mail/config/imap_password', 'mail_config', 'imap_password', 'string', false],
|
||||||
|
['panorama/mail/message/body', 'message', 'body', 'string', true],
|
||||||
|
['panorama/mail/message/subject', 'message', 'subject', 'string', true],
|
||||||
|
['panorama/mail/message/message_id', 'message', 'message_id', 'string', false],
|
||||||
|
]
|
||||||
|
:put fqkey_to_dbkey { key, relation, field_name, type, is_fts_enabled }
|
||||||
|
}
|
||||||
|
|
||||||
// # Inverse mappings for easy querying
|
# Create journal type
|
||||||
// { :create node_has_key { key: String => id: String } }
|
{ :create journal { node_id: String => title: String default '', content: String } }
|
||||||
// { ::index create node_has_key:inverse { id } }
|
{ :create journal_day { day: String => node_id: String } }
|
||||||
// { :create node_managed_by_app { node_id: String => app: String } }
|
|
||||||
// { :create node_refers_to { node_id: String => other_node_id: String } }
|
|
||||||
// {
|
|
||||||
// :create fqkey_to_dbkey {
|
|
||||||
// key: String
|
|
||||||
// =>
|
|
||||||
// relation: String,
|
|
||||||
// field_name: String,
|
|
||||||
// type: String,
|
|
||||||
// is_fts_enabled: Bool,
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// {
|
|
||||||
// ?[key, relation, field_name, type, is_fts_enabled] <- [
|
|
||||||
// ['panorama/journal/page/day', 'journal_day', 'day', 'string', false],
|
|
||||||
// ['panorama/journal/page/title', 'journal', 'title', 'string', true],
|
|
||||||
// ['panorama/journal/page/content', 'journal', 'content', 'string', true],
|
|
||||||
// ['panorama/mail/config/imap_hostname', 'mail_config', 'imap_hostname', 'string', false],
|
|
||||||
// ['panorama/mail/config/imap_port', 'mail_config', 'imap_port', 'int', false],
|
|
||||||
// ['panorama/mail/config/imap_username', 'mail_config', 'imap_username', 'string', false],
|
|
||||||
// ['panorama/mail/config/imap_password', 'mail_config', 'imap_password', 'string', false],
|
|
||||||
// ['panorama/mail/message/body', 'message', 'body', 'string', true],
|
|
||||||
// ['panorama/mail/message/subject', 'message', 'subject', 'string', true],
|
|
||||||
// ['panorama/mail/message/message_id', 'message', 'message_id', 'string', false],
|
|
||||||
// ]
|
|
||||||
// :put fqkey_to_dbkey { key, relation, field_name, type, is_fts_enabled }
|
|
||||||
// }
|
|
||||||
|
|
||||||
// # Create journal type
|
# Mail
|
||||||
// { :create journal { node_id: String => title: String default '', content: String } }
|
{
|
||||||
// { :create journal_day { day: String => node_id: String } }
|
:create mail_config {
|
||||||
|
node_id: String
|
||||||
|
=>
|
||||||
|
imap_hostname: String,
|
||||||
|
imap_port: Int,
|
||||||
|
imap_username: String,
|
||||||
|
imap_password: String,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
{
|
||||||
|
:create mailbox {
|
||||||
|
node_id: String
|
||||||
|
=>
|
||||||
|
account_node_id: String,
|
||||||
|
mailbox_name: String,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
{ ::index create mailbox:by_account_id_and_name { account_node_id, mailbox_name } }
|
||||||
|
{
|
||||||
|
:create message {
|
||||||
|
node_id: String
|
||||||
|
=>
|
||||||
|
message_id: String,
|
||||||
|
account_node_id: String,
|
||||||
|
mailbox_node_id: String,
|
||||||
|
subject: String,
|
||||||
|
headers: Json?,
|
||||||
|
body: Bytes,
|
||||||
|
internal_date: String,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
{ ::index create message:message_id { message_id } }
|
||||||
|
{ ::index create message:date { internal_date } }
|
||||||
|
{ ::index create message:by_mailbox_id { mailbox_node_id } }
|
||||||
|
|
||||||
// # Mail
|
# Calendar
|
||||||
// {
|
",
|
||||||
// :create mail_config {
|
"",
|
||||||
// node_id: String
|
false,
|
||||||
// =>
|
);
|
||||||
// imap_hostname: String,
|
ensure_ok(&result)?;
|
||||||
// imap_port: Int,
|
|
||||||
// imap_username: String,
|
|
||||||
// imap_password: String,
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// {
|
|
||||||
// :create mailbox {
|
|
||||||
// node_id: String
|
|
||||||
// =>
|
|
||||||
// account_node_id: String,
|
|
||||||
// mailbox_name: String,
|
|
||||||
// uid_validity: Int? default null,
|
|
||||||
// extra_data: Json default {},
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// { ::index create mailbox:by_account_id_and_name { account_node_id, mailbox_name } }
|
|
||||||
// {
|
|
||||||
// :create message {
|
|
||||||
// node_id: String
|
|
||||||
// =>
|
|
||||||
// message_id: String,
|
|
||||||
// account_node_id: String,
|
|
||||||
// mailbox_node_id: String,
|
|
||||||
// subject: String,
|
|
||||||
// headers: Json?,
|
|
||||||
// body: Bytes,
|
|
||||||
// internal_date: String,
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// { ::index create message:message_id { message_id } }
|
|
||||||
// { ::index create message:date { internal_date } }
|
|
||||||
// { ::index create message:by_mailbox_id { mailbox_node_id } }
|
|
||||||
|
|
||||||
// # Calendar
|
Ok(())
|
||||||
// ",
|
}
|
||||||
// "",
|
|
||||||
// false,
|
|
||||||
// );
|
|
||||||
// ensure_ok(&result)?;
|
|
||||||
|
|
||||||
// Ok(())
|
|
||||||
// }
|
|
||||||
|
|
|
@ -1,3 +0,0 @@
|
||||||
use crate::AppState;
|
|
||||||
|
|
||||||
impl AppState {}
|
|
|
@ -47,10 +47,6 @@ impl AppState {
|
||||||
|
|
||||||
let mut all_relations = hmap! {};
|
let mut all_relations = hmap! {};
|
||||||
for relation_name in relation_names.iter() {
|
for relation_name in relation_names.iter() {
|
||||||
if relation_name.contains(":") {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut relation_info = vec![];
|
let mut relation_info = vec![];
|
||||||
|
|
||||||
let columns = relation_columns.get(relation_name.as_str()).unwrap();
|
let columns = relation_columns.get(relation_name.as_str()).unwrap();
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use chrono::Local;
|
use chrono::Local;
|
||||||
// use cozo::ScriptMutability;
|
use cozo::ScriptMutability;
|
||||||
use miette::{IntoDiagnostic, Result};
|
use miette::{IntoDiagnostic, Result};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
|
|
@ -1,28 +1,21 @@
|
||||||
// pub mod codetrack;
|
pub mod export;
|
||||||
// pub mod export;
|
pub mod journal;
|
||||||
// pub mod journal;
|
pub mod mail;
|
||||||
// pub mod mail;
|
pub mod node;
|
||||||
// pub mod node;
|
pub mod utils;
|
||||||
// pub mod utils;
|
|
||||||
|
|
||||||
use std::{collections::HashMap, fs, path::Path};
|
use std::{collections::HashMap, fs, path::Path};
|
||||||
|
|
||||||
use bimap::BiMap;
|
use bimap::BiMap;
|
||||||
use miette::{Context, IntoDiagnostic, Result};
|
use cozo::DbInstance;
|
||||||
use sqlx::{
|
use miette::{IntoDiagnostic, Result};
|
||||||
sqlite::{SqliteConnectOptions, SqliteJournalMode, SqlitePoolOptions},
|
|
||||||
SqlitePool,
|
|
||||||
};
|
|
||||||
use tantivy::{
|
use tantivy::{
|
||||||
directory::MmapDirectory,
|
directory::MmapDirectory,
|
||||||
schema::{Field, Schema, STORED, STRING, TEXT},
|
schema::{Field, Schema, STORED, STRING, TEXT},
|
||||||
Index,
|
Index,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{mail::mail_loop, migrations::run_migrations};
|
||||||
// mail::MailWorker,
|
|
||||||
migrations::{self, MIGRATOR},
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn tantivy_schema() -> (Schema, BiMap<String, Field>) {
|
pub fn tantivy_schema() -> (Schema, BiMap<String, Field>) {
|
||||||
let mut schema_builder = Schema::builder();
|
let mut schema_builder = Schema::builder();
|
||||||
|
@ -40,7 +33,7 @@ pub fn tantivy_schema() -> (Schema, BiMap<String, Field>) {
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct AppState {
|
pub struct AppState {
|
||||||
pub db: SqlitePool,
|
pub db: DbInstance,
|
||||||
pub tantivy_index: Index,
|
pub tantivy_index: Index,
|
||||||
pub tantivy_field_map: BiMap<String, Field>,
|
pub tantivy_field_map: BiMap<String, Field>,
|
||||||
}
|
}
|
||||||
|
@ -48,10 +41,6 @@ pub struct AppState {
|
||||||
impl AppState {
|
impl AppState {
|
||||||
pub async fn new(panorama_dir: impl AsRef<Path>) -> Result<Self> {
|
pub async fn new(panorama_dir: impl AsRef<Path>) -> Result<Self> {
|
||||||
let panorama_dir = panorama_dir.as_ref().to_path_buf();
|
let panorama_dir = panorama_dir.as_ref().to_path_buf();
|
||||||
fs::create_dir_all(&panorama_dir)
|
|
||||||
.into_diagnostic()
|
|
||||||
.context("Could not create panorama directory")?;
|
|
||||||
|
|
||||||
println!("Panorama dir: {}", panorama_dir.display());
|
println!("Panorama dir: {}", panorama_dir.display());
|
||||||
|
|
||||||
let (tantivy_index, tantivy_field_map) = {
|
let (tantivy_index, tantivy_field_map) = {
|
||||||
|
@ -67,14 +56,12 @@ impl AppState {
|
||||||
};
|
};
|
||||||
|
|
||||||
let db_path = panorama_dir.join("db.sqlite");
|
let db_path = panorama_dir.join("db.sqlite");
|
||||||
let sqlite_connect_options = SqliteConnectOptions::new()
|
let db = DbInstance::new(
|
||||||
.filename(db_path)
|
"sqlite",
|
||||||
.journal_mode(SqliteJournalMode::Wal);
|
db_path.display().to_string(),
|
||||||
let db = SqlitePoolOptions::new()
|
Default::default(),
|
||||||
.connect_with(sqlite_connect_options)
|
)
|
||||||
.await
|
.unwrap();
|
||||||
.into_diagnostic()
|
|
||||||
.context("Could not connect to SQLite database")?;
|
|
||||||
|
|
||||||
let state = AppState {
|
let state = AppState {
|
||||||
db,
|
db,
|
||||||
|
@ -87,16 +74,10 @@ impl AppState {
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn init(&self) -> Result<()> {
|
async fn init(&self) -> Result<()> {
|
||||||
// run_migrations(&self.db).await?;
|
run_migrations(&self.db).await?;
|
||||||
MIGRATOR
|
|
||||||
.run(&self.db)
|
|
||||||
.await
|
|
||||||
.into_diagnostic()
|
|
||||||
.context("Could not migrate database")?;
|
|
||||||
|
|
||||||
// let state = self.clone();
|
let state = self.clone();
|
||||||
// let mail_worker = MailWorker::new(state);
|
tokio::spawn(async move { mail_loop(state).await });
|
||||||
// tokio::spawn(mail_worker.mail_loop());
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,6 +4,7 @@ use std::{
|
||||||
};
|
};
|
||||||
|
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
|
use cozo::{DataValue, MultiTransaction, NamedRows};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use miette::{bail, IntoDiagnostic, Result};
|
use miette::{bail, IntoDiagnostic, Result};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
@ -17,7 +18,7 @@ use uuid::Uuid;
|
||||||
|
|
||||||
use crate::{AppState, NodeId};
|
use crate::{AppState, NodeId};
|
||||||
|
|
||||||
use super::utils::owned_value_to_json_value;
|
use super::utils::{data_value_to_json_value, owned_value_to_json_value};
|
||||||
|
|
||||||
pub type ExtraData = BTreeMap<String, Value>;
|
pub type ExtraData = BTreeMap<String, Value>;
|
||||||
|
|
||||||
|
@ -376,9 +377,8 @@ impl AppState {
|
||||||
.get_by_left("panorama/journal/page/content")
|
.get_by_left("panorama/journal/page/content")
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.clone();
|
.clone();
|
||||||
let mut query_parser =
|
let query_parser =
|
||||||
QueryParser::for_index(&self.tantivy_index, vec![journal_page_field]);
|
QueryParser::for_index(&self.tantivy_index, vec![journal_page_field]);
|
||||||
query_parser.set_field_fuzzy(journal_page_field, true, 2, true);
|
|
||||||
let query = query_parser.parse_query(query).into_diagnostic()?;
|
let query = query_parser.parse_query(query).into_diagnostic()?;
|
||||||
|
|
||||||
let top_docs = searcher
|
let top_docs = searcher
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
use cozo::{DataValue, Num};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use serde_json::{Number, Value};
|
use serde_json::{Number, Value};
|
||||||
use tantivy::schema::OwnedValue;
|
use tantivy::schema::OwnedValue;
|
||||||
|
@ -29,31 +30,28 @@ pub fn owned_value_to_json_value(data_value: &OwnedValue) -> Value {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// pub fn data_value_to_json_value(data_value: &DataValue) -> Value {
|
pub fn data_value_to_json_value(data_value: &DataValue) -> Value {
|
||||||
// match data_value {
|
match data_value {
|
||||||
// DataValue::Null => Value::Null,
|
DataValue::Null => Value::Null,
|
||||||
// DataValue::Bool(b) => Value::Bool(*b),
|
DataValue::Bool(b) => Value::Bool(*b),
|
||||||
// DataValue::Num(n) => Value::Number(match n {
|
DataValue::Num(n) => Value::Number(match n {
|
||||||
// Num::Int(i) => Number::from(*i),
|
Num::Int(i) => Number::from(*i),
|
||||||
// Num::Float(f) => Number::from_f64(*f).unwrap(),
|
Num::Float(f) => Number::from_f64(*f).unwrap(),
|
||||||
// }),
|
}),
|
||||||
// DataValue::Str(s) => Value::String(s.to_string()),
|
DataValue::Str(s) => Value::String(s.to_string()),
|
||||||
// DataValue::List(v) => {
|
DataValue::List(v) => {
|
||||||
// Value::Array(v.into_iter().map(data_value_to_json_value).collect_vec())
|
Value::Array(v.into_iter().map(data_value_to_json_value).collect_vec())
|
||||||
// }
|
}
|
||||||
// DataValue::Json(v) => v.0.clone(),
|
DataValue::Json(v) => v.0.clone(),
|
||||||
// DataValue::Bytes(s) => {
|
_ => {
|
||||||
// Value::String(String::from_utf8_lossy(s).to_string())
|
println!("Converting unknown {:?}", data_value);
|
||||||
// }
|
serde_json::to_value(data_value).unwrap()
|
||||||
// _ => {
|
} // DataValue::Bytes(s) => todo!(),
|
||||||
// println!("Converting unknown {:?}", data_value);
|
// DataValue::Uuid(_) => todo!(),
|
||||||
// serde_json::to_value(data_value).unwrap()
|
// DataValue::Regex(_) => todo!(),
|
||||||
// } // DataValue::Bytes(s) => todo!(),
|
// DataValue::Set(_) => todo!(),
|
||||||
// // DataValue::Uuid(_) => todo!(),
|
// DataValue::Vec(_) => todo!(),
|
||||||
// // DataValue::Regex(_) => todo!(),
|
// DataValue::Validity(_) => todo!(),
|
||||||
// // DataValue::Set(_) => todo!(),
|
// DataValue::Bot => todo!(),
|
||||||
// // DataValue::Vec(_) => todo!(),
|
}
|
||||||
// // DataValue::Validity(_) => todo!(),
|
}
|
||||||
// // DataValue::Bot => todo!(),
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
|
@ -9,7 +9,7 @@ edition = "2021"
|
||||||
anyhow = "1.0.86"
|
anyhow = "1.0.86"
|
||||||
axum = "0.7.5"
|
axum = "0.7.5"
|
||||||
chrono = { version = "0.4.38", features = ["serde"] }
|
chrono = { version = "0.4.38", features = ["serde"] }
|
||||||
# cozo = { version = "0.7.6", features = ["storage-rocksdb"] }
|
cozo = { version = "0.7.6", features = ["storage-rocksdb"] }
|
||||||
csv = "1.3.0"
|
csv = "1.3.0"
|
||||||
dirs = "5.0.1"
|
dirs = "5.0.1"
|
||||||
futures = "0.3.30"
|
futures = "0.3.30"
|
||||||
|
|
25
crates/panorama-daemon/src/export.rs
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
use std::{
|
||||||
|
fs::{self, File},
|
||||||
|
path::PathBuf,
|
||||||
|
};
|
||||||
|
|
||||||
|
use axum::{extract::State, Json};
|
||||||
|
use miette::IntoDiagnostic;
|
||||||
|
use serde_json::Value;
|
||||||
|
|
||||||
|
use crate::{error::AppResult, AppState};
|
||||||
|
|
||||||
|
// This code is really bad but gives me a quick way to look at all of the data
|
||||||
|
// in the data at once. Rip this out once there's any Real Security Mechanism.
|
||||||
|
pub async fn export(State(state): State<AppState>) -> AppResult<Json<Value>> {
|
||||||
|
let export = state.export().await?;
|
||||||
|
|
||||||
|
let base_dir = PathBuf::from("export");
|
||||||
|
fs::create_dir_all(&base_dir).into_diagnostic()?;
|
||||||
|
|
||||||
|
let file = File::create(base_dir.join("export.json")).into_diagnostic()?;
|
||||||
|
|
||||||
|
serde_json::to_writer_pretty(file, &export).into_diagnostic()?;
|
||||||
|
|
||||||
|
Ok(Json(export))
|
||||||
|
}
|
|
@ -1,5 +1,6 @@
|
||||||
use axum::{extract::State, routing::get, Json, Router};
|
use axum::{extract::State, routing::get, Json, Router};
|
||||||
use chrono::Local;
|
use chrono::Local;
|
||||||
|
use cozo::ScriptMutability;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use utoipa::OpenApi;
|
use utoipa::OpenApi;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
@ -8,27 +9,26 @@ use crate::{error::AppResult, AppState};
|
||||||
|
|
||||||
/// Node API
|
/// Node API
|
||||||
#[derive(OpenApi)]
|
#[derive(OpenApi)]
|
||||||
#[openapi(paths(), components(schemas()))]
|
#[openapi(paths(get_todays_journal_id), components(schemas()))]
|
||||||
pub(super) struct JournalApi;
|
pub(super) struct JournalApi;
|
||||||
|
|
||||||
pub(super) fn router() -> Router<AppState> {
|
pub(super) fn router() -> Router<AppState> {
|
||||||
Router::new()
|
Router::new().route("/get_todays_journal_id", get(get_todays_journal_id))
|
||||||
// .route("/get_todays_journal_id", get(get_todays_journal_id))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// #[utoipa::path(
|
#[utoipa::path(
|
||||||
// get,
|
get,
|
||||||
// path = "/get_todays_journal_id",
|
path = "/get_todays_journal_id",
|
||||||
// responses(
|
responses(
|
||||||
// (status = 200),
|
(status = 200),
|
||||||
// ),
|
),
|
||||||
// )]
|
)]
|
||||||
// pub async fn get_todays_journal_id(
|
pub async fn get_todays_journal_id(
|
||||||
// State(state): State<AppState>,
|
State(state): State<AppState>,
|
||||||
// ) -> AppResult<Json<Value>> {
|
) -> AppResult<Json<Value>> {
|
||||||
// let node_id = state.get_todays_journal_id().await?;
|
let node_id = state.get_todays_journal_id().await?;
|
||||||
|
|
||||||
// Ok(Json(json!({
|
Ok(Json(json!({
|
||||||
// "node_id": node_id.to_string(),
|
"node_id": node_id.to_string(),
|
||||||
// })))
|
})))
|
||||||
// }
|
}
|
||||||
|
|
|
@ -8,6 +8,7 @@ extern crate serde_json;
|
||||||
extern crate sugars;
|
extern crate sugars;
|
||||||
|
|
||||||
mod error;
|
mod error;
|
||||||
|
mod export;
|
||||||
mod journal;
|
mod journal;
|
||||||
pub mod mail;
|
pub mod mail;
|
||||||
mod node;
|
mod node;
|
||||||
|
@ -26,6 +27,12 @@ use tower_http::{
|
||||||
use utoipa::OpenApi;
|
use utoipa::OpenApi;
|
||||||
use utoipa_scalar::{Scalar, Servable};
|
use utoipa_scalar::{Scalar, Servable};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
export::export,
|
||||||
|
mail::{get_mail, get_mail_config},
|
||||||
|
node::search_nodes,
|
||||||
|
};
|
||||||
|
|
||||||
pub async fn run() -> Result<()> {
|
pub async fn run() -> Result<()> {
|
||||||
#[derive(OpenApi)]
|
#[derive(OpenApi)]
|
||||||
#[openapi(
|
#[openapi(
|
||||||
|
@ -54,10 +61,11 @@ pub async fn run() -> Result<()> {
|
||||||
let app = Router::new()
|
let app = Router::new()
|
||||||
.merge(Scalar::with_url("/api/docs", ApiDoc::openapi()))
|
.merge(Scalar::with_url("/api/docs", ApiDoc::openapi()))
|
||||||
.route("/", get(|| async { "Hello, World!" }))
|
.route("/", get(|| async { "Hello, World!" }))
|
||||||
|
.route("/export", get(export))
|
||||||
.nest("/node", node::router().with_state(state.clone()))
|
.nest("/node", node::router().with_state(state.clone()))
|
||||||
.nest("/journal", journal::router().with_state(state.clone()))
|
.nest("/journal", journal::router().with_state(state.clone()))
|
||||||
// .route("/mail/config", get(get_mail_config))
|
.route("/mail/config", get(get_mail_config))
|
||||||
// .route("/mail", get(get_mail))
|
.route("/mail", get(get_mail))
|
||||||
.layer(ServiceBuilder::new().layer(cors_layer))
|
.layer(ServiceBuilder::new().layer(cors_layer))
|
||||||
.layer(ServiceBuilder::new().layer(trace_layer))
|
.layer(ServiceBuilder::new().layer(trace_layer))
|
||||||
.with_state(state.clone());
|
.with_state(state.clone());
|
||||||
|
|
|
@ -1,53 +1,54 @@
|
||||||
use axum::{extract::State, Json};
|
use axum::{extract::State, Json};
|
||||||
|
use cozo::ScriptMutability;
|
||||||
use panorama_core::AppState;
|
use panorama_core::AppState;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use crate::error::AppResult;
|
use crate::error::AppResult;
|
||||||
|
|
||||||
// pub async fn get_mail_config(
|
pub async fn get_mail_config(
|
||||||
// State(state): State<AppState>,
|
State(state): State<AppState>,
|
||||||
// ) -> AppResult<Json<Value>> {
|
) -> AppResult<Json<Value>> {
|
||||||
// let configs = state.fetch_mail_configs()?;
|
let configs = state.fetch_mail_configs()?;
|
||||||
// Ok(Json(json!({ "configs": configs })))
|
Ok(Json(json!({ "configs": configs })))
|
||||||
// }
|
}
|
||||||
|
|
||||||
// pub async fn get_mail(State(state): State<AppState>) -> AppResult<Json<Value>> {
|
pub async fn get_mail(State(state): State<AppState>) -> AppResult<Json<Value>> {
|
||||||
// let mailboxes = state.db.run_script("
|
let mailboxes = state.db.run_script("
|
||||||
// ?[node_id, account_node_id, mailbox_name] := *mailbox {node_id, account_node_id, mailbox_name}
|
?[node_id, account_node_id, mailbox_name] := *mailbox {node_id, account_node_id, mailbox_name}
|
||||||
// ", Default::default(), ScriptMutability::Immutable)?;
|
", Default::default(), ScriptMutability::Immutable)?;
|
||||||
|
|
||||||
// let mailboxes = mailboxes
|
let mailboxes = mailboxes
|
||||||
// .rows
|
.rows
|
||||||
// .iter()
|
.iter()
|
||||||
// .map(|mb| {
|
.map(|mb| {
|
||||||
// json!({
|
json!({
|
||||||
// "node_id": mb[0].get_str().unwrap(),
|
"node_id": mb[0].get_str().unwrap(),
|
||||||
// "account_node_id": mb[1].get_str().unwrap(),
|
"account_node_id": mb[1].get_str().unwrap(),
|
||||||
// "mailbox_name": mb[2].get_str().unwrap(),
|
"mailbox_name": mb[2].get_str().unwrap(),
|
||||||
// })
|
})
|
||||||
// })
|
})
|
||||||
// .collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
// let messages = state.db.run_script("
|
let messages = state.db.run_script("
|
||||||
// ?[node_id, subject, body, internal_date] := *message {node_id, subject, body, internal_date}
|
?[node_id, subject, body, internal_date] := *message {node_id, subject, body, internal_date}
|
||||||
// :limit 10
|
:limit 10
|
||||||
// ", Default::default(), ScriptMutability::Immutable)?;
|
", Default::default(), ScriptMutability::Immutable)?;
|
||||||
|
|
||||||
// let messages = messages
|
let messages = messages
|
||||||
// .rows
|
.rows
|
||||||
// .iter()
|
.iter()
|
||||||
// .map(|m| {
|
.map(|m| {
|
||||||
// json!({
|
json!({
|
||||||
// "node_id": m[0].get_str().unwrap(),
|
"node_id": m[0].get_str().unwrap(),
|
||||||
// "subject": m[1].get_str().unwrap(),
|
"subject": m[1].get_str().unwrap(),
|
||||||
// "body": m[2].get_str(),
|
"body": m[2].get_str(),
|
||||||
// "internal_date": m[3].get_str().unwrap(),
|
"internal_date": m[3].get_str().unwrap(),
|
||||||
// })
|
})
|
||||||
// })
|
})
|
||||||
// .collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
// Ok(Json(json!({
|
Ok(Json(json!({
|
||||||
// "mailboxes": mailboxes,
|
"mailboxes": mailboxes,
|
||||||
// "messages": messages,
|
"messages": messages,
|
||||||
// })))
|
})))
|
||||||
// }
|
}
|
||||||
|
|
|
@ -10,10 +10,11 @@ use axum::{
|
||||||
Json, Router,
|
Json, Router,
|
||||||
};
|
};
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
|
use cozo::{DataValue, MultiTransaction};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use miette::IntoDiagnostic;
|
use miette::IntoDiagnostic;
|
||||||
use panorama_core::{
|
use panorama_core::{
|
||||||
// state::node::{CreateOrUpdate, ExtraData},
|
state::node::{CreateOrUpdate, ExtraData},
|
||||||
NodeId,
|
NodeId,
|
||||||
};
|
};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
@ -24,169 +25,172 @@ use crate::{error::AppResult, AppState};
|
||||||
|
|
||||||
/// Node API
|
/// Node API
|
||||||
#[derive(OpenApi)]
|
#[derive(OpenApi)]
|
||||||
#[openapi(paths(), components(schemas()))]
|
#[openapi(
|
||||||
|
paths(get_node, update_node, create_node),
|
||||||
|
components(schemas(GetNodeResult))
|
||||||
|
)]
|
||||||
pub(super) struct NodeApi;
|
pub(super) struct NodeApi;
|
||||||
|
|
||||||
pub(super) fn router() -> Router<AppState> {
|
pub(super) fn router() -> Router<AppState> {
|
||||||
Router::new()
|
Router::new()
|
||||||
// .route("/", put(create_node))
|
.route("/", put(create_node))
|
||||||
// .route("/:id", get(get_node))
|
.route("/:id", get(get_node))
|
||||||
// .route("/:id", post(update_node))
|
.route("/:id", post(update_node))
|
||||||
// .route("/search", get(search_nodes))
|
.route("/search", get(search_nodes))
|
||||||
}
|
}
|
||||||
|
|
||||||
// #[derive(Serialize, Deserialize, ToSchema, Clone)]
|
#[derive(Serialize, Deserialize, ToSchema, Clone)]
|
||||||
// struct GetNodeResult {
|
struct GetNodeResult {
|
||||||
// node_id: String,
|
node_id: String,
|
||||||
// fields: HashMap<String, Value>,
|
fields: HashMap<String, Value>,
|
||||||
// created_at: DateTime<Utc>,
|
created_at: DateTime<Utc>,
|
||||||
// updated_at: DateTime<Utc>,
|
updated_at: DateTime<Utc>,
|
||||||
// }
|
}
|
||||||
|
|
||||||
// /// Get node info
|
/// Get node info
|
||||||
// ///
|
///
|
||||||
// /// This endpoint retrieves all the fields for a particular node
|
/// This endpoint retrieves all the fields for a particular node
|
||||||
// #[utoipa::path(
|
#[utoipa::path(
|
||||||
// get,
|
get,
|
||||||
// path = "/{id}",
|
path = "/{id}",
|
||||||
// responses(
|
responses(
|
||||||
// (status = 200, body = [GetNodeResult]),
|
(status = 200, body = [GetNodeResult]),
|
||||||
// (status = 404, description = "the node ID provided was not found")
|
(status = 404, description = "the node ID provided was not found")
|
||||||
// ),
|
),
|
||||||
// params(
|
params(
|
||||||
// ("id" = String, Path, description = "Node ID"),
|
("id" = String, Path, description = "Node ID"),
|
||||||
// ),
|
),
|
||||||
// )]
|
)]
|
||||||
// pub async fn get_node(
|
pub async fn get_node(
|
||||||
// State(state): State<AppState>,
|
State(state): State<AppState>,
|
||||||
// Path(node_id): Path<String>,
|
Path(node_id): Path<String>,
|
||||||
// ) -> AppResult<(StatusCode, Json<Value>)> {
|
) -> AppResult<(StatusCode, Json<Value>)> {
|
||||||
// let node_info = state.get_node(&node_id).await?;
|
let node_info = state.get_node(&node_id).await?;
|
||||||
|
|
||||||
// Ok((
|
Ok((
|
||||||
// StatusCode::OK,
|
StatusCode::OK,
|
||||||
// Json(json!({
|
Json(json!({
|
||||||
// "node_id": node_id,
|
"node_id": node_id,
|
||||||
// "fields": node_info.fields,
|
"fields": node_info.fields,
|
||||||
// "created_at": node_info.created_at,
|
"created_at": node_info.created_at,
|
||||||
// "updated_at": node_info.updated_at,
|
"updated_at": node_info.updated_at,
|
||||||
// })),
|
})),
|
||||||
// ))
|
))
|
||||||
// }
|
}
|
||||||
|
|
||||||
// #[derive(Deserialize, Debug)]
|
#[derive(Deserialize, Debug)]
|
||||||
// pub struct UpdateData {
|
pub struct UpdateData {
|
||||||
// extra_data: Option<ExtraData>,
|
extra_data: Option<ExtraData>,
|
||||||
// }
|
}
|
||||||
|
|
||||||
// /// Update node info
|
/// Update node info
|
||||||
// #[utoipa::path(
|
#[utoipa::path(
|
||||||
// post,
|
post,
|
||||||
// path = "/{id}",
|
path = "/{id}",
|
||||||
// responses(
|
responses(
|
||||||
// (status = 200)
|
(status = 200)
|
||||||
// ),
|
),
|
||||||
// params(
|
params(
|
||||||
// ("id" = String, Path, description = "Node ID"),
|
("id" = String, Path, description = "Node ID"),
|
||||||
// )
|
)
|
||||||
// )]
|
)]
|
||||||
// pub async fn update_node(
|
pub async fn update_node(
|
||||||
// State(state): State<AppState>,
|
State(state): State<AppState>,
|
||||||
// Path(node_id): Path<String>,
|
Path(node_id): Path<String>,
|
||||||
// Json(opts): Json<UpdateData>,
|
Json(opts): Json<UpdateData>,
|
||||||
// ) -> AppResult<Json<Value>> {
|
) -> AppResult<Json<Value>> {
|
||||||
// let node_id = NodeId(Uuid::from_str(&node_id).into_diagnostic()?);
|
let node_id = NodeId(Uuid::from_str(&node_id).into_diagnostic()?);
|
||||||
// let node_info = state
|
let node_info = state
|
||||||
// .create_or_update_node(CreateOrUpdate::Update { node_id }, opts.extra_data)
|
.create_or_update_node(CreateOrUpdate::Update { node_id }, opts.extra_data)
|
||||||
// .await?;
|
.await?;
|
||||||
|
|
||||||
// Ok(Json(json!({
|
Ok(Json(json!({
|
||||||
// "node_id": node_info.node_id.to_string(),
|
"node_id": node_info.node_id.to_string(),
|
||||||
// })))
|
})))
|
||||||
// }
|
}
|
||||||
|
|
||||||
// #[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
// pub struct CreateNodeOpts {
|
pub struct CreateNodeOpts {
|
||||||
// // TODO: Allow submitting a string
|
// TODO: Allow submitting a string
|
||||||
// // id: Option<String>,
|
// id: Option<String>,
|
||||||
// #[serde(rename = "type")]
|
#[serde(rename = "type")]
|
||||||
// ty: String,
|
ty: String,
|
||||||
// extra_data: Option<ExtraData>,
|
extra_data: Option<ExtraData>,
|
||||||
// }
|
}
|
||||||
|
|
||||||
// #[utoipa::path(
|
#[utoipa::path(
|
||||||
// put,
|
put,
|
||||||
// path = "/",
|
path = "/",
|
||||||
// responses((status = 200)),
|
responses((status = 200)),
|
||||||
// )]
|
)]
|
||||||
// pub async fn create_node(
|
pub async fn create_node(
|
||||||
// State(state): State<AppState>,
|
State(state): State<AppState>,
|
||||||
// Json(opts): Json<CreateNodeOpts>,
|
Json(opts): Json<CreateNodeOpts>,
|
||||||
// ) -> AppResult<Json<Value>> {
|
) -> AppResult<Json<Value>> {
|
||||||
// let node_info = state
|
let node_info = state
|
||||||
// .create_or_update_node(
|
.create_or_update_node(
|
||||||
// CreateOrUpdate::Create { r#type: opts.ty },
|
CreateOrUpdate::Create { r#type: opts.ty },
|
||||||
// opts.extra_data,
|
opts.extra_data,
|
||||||
// )
|
)
|
||||||
// .await?;
|
.await?;
|
||||||
|
|
||||||
// Ok(Json(json!({
|
Ok(Json(json!({
|
||||||
// "node_id": node_info.node_id.to_string(),
|
"node_id": node_info.node_id.to_string(),
|
||||||
// })))
|
})))
|
||||||
// }
|
}
|
||||||
|
|
||||||
// #[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
// pub struct SearchQuery {
|
pub struct SearchQuery {
|
||||||
// query: String,
|
query: String,
|
||||||
// }
|
}
|
||||||
|
|
||||||
// #[utoipa::path(
|
#[utoipa::path(
|
||||||
// get,
|
get,
|
||||||
// path = "/search",
|
path = "/search",
|
||||||
// responses((status = 200)),
|
responses((status = 200)),
|
||||||
// )]
|
)]
|
||||||
// pub async fn search_nodes(
|
pub async fn search_nodes(
|
||||||
// State(state): State<AppState>,
|
State(state): State<AppState>,
|
||||||
// Query(query): Query<SearchQuery>,
|
Query(query): Query<SearchQuery>,
|
||||||
// ) -> AppResult<Json<Value>> {
|
) -> AppResult<Json<Value>> {
|
||||||
// let search_result = state.search_nodes(query.query).await?;
|
let search_result = state.search_nodes(query.query).await?;
|
||||||
// let search_result = search_result
|
let search_result = search_result
|
||||||
// .into_iter()
|
.into_iter()
|
||||||
// .map(|(id, value)| value["fields"].clone())
|
.map(|(id, value)| value["fields"].clone())
|
||||||
// .collect_vec();
|
.collect_vec();
|
||||||
|
|
||||||
// Ok(Json(json!({
|
Ok(Json(json!({
|
||||||
// "results": search_result,
|
"results": search_result,
|
||||||
// })))
|
})))
|
||||||
// }
|
}
|
||||||
|
|
||||||
// fn get_rows_for_extra_keys(
|
fn get_rows_for_extra_keys(
|
||||||
// tx: &MultiTransaction,
|
tx: &MultiTransaction,
|
||||||
// extra_data: &ExtraData,
|
extra_data: &ExtraData,
|
||||||
// ) -> AppResult<HashMap<String, (String, String, String)>> {
|
) -> AppResult<HashMap<String, (String, String, String)>> {
|
||||||
// let result = tx.run_script(
|
let result = tx.run_script(
|
||||||
// "
|
"
|
||||||
// ?[key, relation, field_name, type] :=
|
?[key, relation, field_name, type] :=
|
||||||
// *fqkey_to_dbkey{key, relation, field_name, type},
|
*fqkey_to_dbkey{key, relation, field_name, type},
|
||||||
// is_in(key, $keys)
|
is_in(key, $keys)
|
||||||
// ",
|
",
|
||||||
// btmap! {
|
btmap! {
|
||||||
// "keys".to_owned() => DataValue::List(
|
"keys".to_owned() => DataValue::List(
|
||||||
// extra_data
|
extra_data
|
||||||
// .keys()
|
.keys()
|
||||||
// .map(|s| DataValue::from(s.as_str()))
|
.map(|s| DataValue::from(s.as_str()))
|
||||||
// .collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
// ),
|
),
|
||||||
// },
|
},
|
||||||
// )?;
|
)?;
|
||||||
|
|
||||||
// let s = |s: &DataValue| s.get_str().unwrap().to_owned();
|
let s = |s: &DataValue| s.get_str().unwrap().to_owned();
|
||||||
|
|
||||||
// Ok(
|
Ok(
|
||||||
// result
|
result
|
||||||
// .rows
|
.rows
|
||||||
// .into_iter()
|
.into_iter()
|
||||||
// .map(|row| (s(&row[0]), (s(&row[1]), s(&row[2]), s(&row[3]))))
|
.map(|row| (s(&row[0]), (s(&row[1]), s(&row[2]), s(&row[3]))))
|
||||||
// .collect::<HashMap<_, _>>(),
|
.collect::<HashMap<_, _>>(),
|
||||||
// )
|
)
|
||||||
// }
|
}
|
||||||
|
|
|
@ -1,11 +0,0 @@
|
||||||
export interface NodeInfo {
|
|
||||||
fields: Partial<NodeFields>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const JOURNAL_PAGE_CONTENT_FIELD_NAME = "panorama/journal/page/content";
|
|
||||||
export const JOURNAL_PAGE_TITLE_FIELD_NAME = "panorama/journal/page/title";
|
|
||||||
|
|
||||||
export interface NodeFields {
|
|
||||||
[JOURNAL_PAGE_CONTENT_FIELD_NAME]: string;
|
|
||||||
[JOURNAL_PAGE_TITLE_FIELD_NAME]: string;
|
|
||||||
}
|
|