save docs
This commit is contained in:
parent
bcd2e9086b
commit
ccebb53879
12 changed files with 2062 additions and 2414 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -2778,6 +2778,7 @@ dependencies = [
|
||||||
"axum",
|
"axum",
|
||||||
"chrono",
|
"chrono",
|
||||||
"cozo",
|
"cozo",
|
||||||
|
"csv",
|
||||||
"dirs",
|
"dirs",
|
||||||
"futures",
|
"futures",
|
||||||
"miette",
|
"miette",
|
||||||
|
|
|
@ -17,6 +17,7 @@
|
||||||
"@mui/material": "^5.15.18",
|
"@mui/material": "^5.15.18",
|
||||||
"@tanstack/react-query": "^5.37.1",
|
"@tanstack/react-query": "^5.37.1",
|
||||||
"@tauri-apps/api": "^1",
|
"@tauri-apps/api": "^1",
|
||||||
|
"@uidotdev/usehooks": "^2.4.1",
|
||||||
"@uiw/react-md-editor": "^4.0.4",
|
"@uiw/react-md-editor": "^4.0.4",
|
||||||
"classnames": "^2.5.1",
|
"classnames": "^2.5.1",
|
||||||
"hast-util-to-jsx-runtime": "^2.3.0",
|
"hast-util-to-jsx-runtime": "^2.3.0",
|
||||||
|
@ -31,6 +32,7 @@
|
||||||
"react-time-ago": "^7.3.3",
|
"react-time-ago": "^7.3.3",
|
||||||
"remark": "^15.0.1",
|
"remark": "^15.0.1",
|
||||||
"remark-rehype": "^11.1.0",
|
"remark-rehype": "^11.1.0",
|
||||||
|
"use-debounce": "^10.0.1",
|
||||||
"vfile": "^6.0.1"
|
"vfile": "^6.0.1"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
|
|
@ -1,3 +1,10 @@
|
||||||
|
.container {
|
||||||
|
flex-grow: 1;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
.mdContent {
|
.mdContent {
|
||||||
flex-grow: 1;
|
flex-grow: 1;
|
||||||
display: flex;
|
display: flex;
|
||||||
|
|
|
@ -1,139 +1,59 @@
|
||||||
import {
|
import { useEffect, useState } from "react";
|
||||||
ReactNode,
|
|
||||||
createContext,
|
|
||||||
useCallback,
|
|
||||||
useContext,
|
|
||||||
useEffect,
|
|
||||||
useRef,
|
|
||||||
useState,
|
|
||||||
} from "react";
|
|
||||||
import { Fragment, jsx, jsxs } from "react/jsx-runtime";
|
|
||||||
import styles from "./JournalPage.module.scss";
|
|
||||||
import MDEditor from "@uiw/react-md-editor";
|
import MDEditor from "@uiw/react-md-editor";
|
||||||
import Markdown from "react-markdown";
|
import { usePrevious, useDebounce } from "@uidotdev/usehooks";
|
||||||
import { toMdast } from "hast-util-to-mdast";
|
import { useQueryClient } from "@tanstack/react-query";
|
||||||
import { Node as MdastNode } from "mdast";
|
import styles from "./JournalPage.module.scss";
|
||||||
import { fromMarkdown } from "mdast-util-from-markdown";
|
|
||||||
import { toMarkdown } from "mdast-util-to-markdown";
|
|
||||||
import { toJsxRuntime } from "hast-util-to-jsx-runtime";
|
|
||||||
import remarkRehype from "remark-rehype";
|
|
||||||
import { VFile } from "vfile";
|
|
||||||
import { common } from "@mui/material/colors";
|
|
||||||
import classNames from "classnames";
|
|
||||||
|
|
||||||
interface MDContextValue {
|
export interface JournalPageProps {
|
||||||
isEditing: boolean;
|
id: string;
|
||||||
|
data: {
|
||||||
|
content: string;
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// biome-ignore lint/style/noNonNullAssertion: <explanation>
|
export default function JournalPage({ id, data }: JournalPageProps) {
|
||||||
const MDContext = createContext<MDContextValue>(null!);
|
const queryClient = useQueryClient();
|
||||||
|
const [value, setValue] = useState(() => data.content);
|
||||||
const emptyContent = { type: "root", children: [] };
|
const valueToSave = useDebounce(value, 1000);
|
||||||
|
const previous = usePrevious(valueToSave);
|
||||||
export default function JournalPage({ id, data }) {
|
const changed = valueToSave !== previous;
|
||||||
const [content, setContent] = useState(() => data.content);
|
|
||||||
const [isEditing, setIsEditing] = useState(() => false);
|
|
||||||
const [currentlyFocused, setCurrentlyFocused] = useState<string | undefined>(
|
|
||||||
() => undefined,
|
|
||||||
);
|
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (content === null) {
|
if (changed) {
|
||||||
setContent(() => ({
|
(async () => {
|
||||||
type: "root",
|
console.log("Saving...");
|
||||||
children: [
|
const resp = await fetch(`http://localhost:5195/node/${id}`, {
|
||||||
{ type: "paragraph", children: [{ type: "text", value: "" }] },
|
method: "POST",
|
||||||
],
|
headers: {
|
||||||
}));
|
"Content-Type": "application/json",
|
||||||
setCurrentlyFocused(".children[0]");
|
},
|
||||||
setIsEditing(true);
|
body: JSON.stringify({
|
||||||
|
extra_data: {
|
||||||
|
"panorama/journal/page/content": valueToSave,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
const data = await resp.text();
|
||||||
|
console.log("result", data);
|
||||||
|
|
||||||
|
queryClient.invalidateQueries({ queryKey: ["fetchNode", id] });
|
||||||
|
})();
|
||||||
}
|
}
|
||||||
}, [content]);
|
}, [id, changed, valueToSave, queryClient]);
|
||||||
|
|
||||||
const contextValue = { content, setContent, isEditing, setIsEditing };
|
|
||||||
|
|
||||||
const jsxContent = convertToJsx(content, { currentlyFocused });
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<div data-color-mode="light" className={styles.container}>
|
||||||
<details>
|
<details>
|
||||||
<summary>JSON</summary>
|
<summary>JSON</summary>
|
||||||
<pre>{JSON.stringify(data, null, 2)}</pre>
|
<pre>{JSON.stringify(data, null, 2)}</pre>
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
<div className={styles.mdContent} data-color-mode="light">
|
<MDEditor
|
||||||
<MDContext.Provider value={contextValue}>
|
value={value}
|
||||||
{jsxContent}
|
className={styles.mdEditor}
|
||||||
</MDContext.Provider>
|
onChange={(newValue) => newValue && setValue(newValue)}
|
||||||
|
preview="preview"
|
||||||
<pre>{JSON.stringify(content, null, 2)}</pre>
|
/>
|
||||||
</div>
|
|
||||||
</>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ConvertToJsxOpts {
|
|
||||||
currentlyFocused?: string | undefined;
|
|
||||||
parent?: MdastNode | undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
function convertToJsx(
|
|
||||||
tree: MdastNode,
|
|
||||||
opts?: ConvertToJsxOpts | undefined,
|
|
||||||
): ReactNode {
|
|
||||||
console.log("tree", tree);
|
|
||||||
|
|
||||||
if (tree === null) return;
|
|
||||||
|
|
||||||
const commonProps = {
|
|
||||||
node: tree,
|
|
||||||
parent: opts?.parent,
|
|
||||||
};
|
|
||||||
|
|
||||||
switch (tree.type) {
|
|
||||||
case "root":
|
|
||||||
return tree.children.map((child) =>
|
|
||||||
convertToJsx(child, { parent: tree }),
|
|
||||||
);
|
|
||||||
|
|
||||||
case "paragraph":
|
|
||||||
return <Paragraph {...commonProps} />;
|
|
||||||
|
|
||||||
default:
|
|
||||||
throw new Error(`unhandled ${tree.type}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function Paragraph({ ...args }) {
|
|
||||||
// const { isEditing } = useContext(MDContext);
|
|
||||||
const [isEditing, setIsEditing] = useState(() => false);
|
|
||||||
const [localValue, setLocalValue] = useState(null);
|
|
||||||
|
|
||||||
const onDoubleClick = useCallback(() => {
|
|
||||||
if (!isEditing) {
|
|
||||||
setIsEditing(true);
|
|
||||||
}
|
|
||||||
}, [isEditing]);
|
|
||||||
|
|
||||||
const save = useCallback(() => {
|
|
||||||
console.log("saving!", localValue);
|
|
||||||
});
|
|
||||||
|
|
||||||
const onPaste = useCallback((evt) => {
|
|
||||||
console.log("pasted");
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div>
|
|
||||||
<div
|
|
||||||
className={classNames(styles.block, isEditing && styles.isEditing)}
|
|
||||||
contentEditable={isEditing}
|
|
||||||
onDoubleClick={onDoubleClick}
|
|
||||||
onPaste={onPaste}
|
|
||||||
onBlur={save}
|
|
||||||
>
|
|
||||||
<br />
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,9 +0,0 @@
|
||||||
import { ReactNode } from "react";
|
|
||||||
import { Nodes as MdastNodes } from "mdast";
|
|
||||||
|
|
||||||
export function convertToJsx(tree: MdastNodes): ReactNode {
|
|
||||||
console.log("tree", tree);
|
|
||||||
|
|
||||||
switch (tree.type) {
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -10,6 +10,7 @@ anyhow = "1.0.86"
|
||||||
axum = "0.7.5"
|
axum = "0.7.5"
|
||||||
chrono = { version = "0.4.38", features = ["serde"] }
|
chrono = { version = "0.4.38", features = ["serde"] }
|
||||||
cozo = { version = "0.7.6", features = ["storage-rocksdb"] }
|
cozo = { version = "0.7.6", features = ["storage-rocksdb"] }
|
||||||
|
csv = "1.3.0"
|
||||||
dirs = "5.0.1"
|
dirs = "5.0.1"
|
||||||
futures = "0.3.30"
|
futures = "0.3.30"
|
||||||
miette = "5.5.0"
|
miette = "5.5.0"
|
||||||
|
|
|
@ -7,9 +7,12 @@ use std::{
|
||||||
|
|
||||||
use axum::extract::State;
|
use axum::extract::State;
|
||||||
use cozo::ScriptMutability;
|
use cozo::ScriptMutability;
|
||||||
|
use csv::WriterBuilder;
|
||||||
|
|
||||||
use crate::{error::AppResult, AppState};
|
use crate::{error::AppResult, AppState};
|
||||||
|
|
||||||
|
// This code is really bad but gives me a quick way to look at all of the data
|
||||||
|
// in the data at once. Rip this out once there's any Real Security Mechanism.
|
||||||
pub async fn export(State(state): State<AppState>) -> AppResult<()> {
|
pub async fn export(State(state): State<AppState>) -> AppResult<()> {
|
||||||
let result = state.db.run_script(
|
let result = state.db.run_script(
|
||||||
"::relations",
|
"::relations",
|
||||||
|
@ -52,8 +55,12 @@ pub async fn export(State(state): State<AppState>) -> AppResult<()> {
|
||||||
let tx = state.db.multi_transaction(false);
|
let tx = state.db.multi_transaction(false);
|
||||||
|
|
||||||
for relation_name in relation_names.iter() {
|
for relation_name in relation_names.iter() {
|
||||||
let relation_path = base_dir.join(format!("{relation_name}.ndjson"));
|
let relation_path = base_dir.join(format!("{relation_name}.csv"));
|
||||||
let mut file = File::create(&relation_path).unwrap();
|
let mut writer = WriterBuilder::new()
|
||||||
|
.has_headers(true)
|
||||||
|
.from_path(relation_path)
|
||||||
|
.unwrap();
|
||||||
|
// let mut file = File::create(&relation_path).unwrap();
|
||||||
|
|
||||||
let columns = relation_columns
|
let columns = relation_columns
|
||||||
.get(relation_name.as_str())
|
.get(relation_name.as_str())
|
||||||
|
@ -64,18 +71,19 @@ pub async fn export(State(state): State<AppState>) -> AppResult<()> {
|
||||||
println!("Query: {query}");
|
println!("Query: {query}");
|
||||||
let result = tx.run_script(&query, Default::default())?;
|
let result = tx.run_script(&query, Default::default())?;
|
||||||
|
|
||||||
|
writer.write_record(result.headers).unwrap();
|
||||||
|
|
||||||
for row in result.rows.into_iter() {
|
for row in result.rows.into_iter() {
|
||||||
let mut object = HashMap::new();
|
// let serialized = serde_json::to_string(&object).unwrap();
|
||||||
|
writer
|
||||||
for (idx, col) in row.into_iter().enumerate() {
|
.write_record(
|
||||||
let row_name = result.headers[idx].clone();
|
row.iter().map(|col| serde_json::to_string(&col).unwrap()),
|
||||||
object.insert(row_name, col);
|
)
|
||||||
|
.unwrap();
|
||||||
|
// file.write(b"\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
let serialized = serde_json::to_string(&object).unwrap();
|
writer.flush().unwrap();
|
||||||
file.write(serialized.as_bytes());
|
|
||||||
file.write(b"\n");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -24,6 +24,7 @@ pub async fn get_todays_journal_id(
|
||||||
|
|
||||||
println!("Result: {:?}", result);
|
println!("Result: {:?}", result);
|
||||||
|
|
||||||
|
// TODO: Do this check on the server side
|
||||||
if result.rows.len() == 0 {
|
if result.rows.len() == 0 {
|
||||||
// Insert a new one
|
// Insert a new one
|
||||||
let uuid = Uuid::now_v7();
|
let uuid = Uuid::now_v7();
|
||||||
|
@ -36,7 +37,7 @@ pub async fn get_todays_journal_id(
|
||||||
:put node { id, title, type }
|
:put node { id, title, type }
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
?[node_id, content] <- [[$node_id, {}]]
|
?[node_id, content] <- [[$node_id, '']]
|
||||||
:put journal { node_id => content }
|
:put journal { node_id => content }
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
|
|
|
@ -59,9 +59,8 @@ async fn main() -> Result<()> {
|
||||||
let state = AppState { db };
|
let state = AppState { db };
|
||||||
|
|
||||||
let cors = CorsLayer::new()
|
let cors = CorsLayer::new()
|
||||||
// allow `GET` and `POST` when accessing the resource
|
|
||||||
.allow_methods([Method::GET, Method::POST])
|
.allow_methods([Method::GET, Method::POST])
|
||||||
// allow requests from any origin
|
.allow_headers(cors::Any)
|
||||||
.allow_origin(cors::Any);
|
.allow_origin(cors::Any);
|
||||||
|
|
||||||
// build our application with a single route
|
// build our application with a single route
|
||||||
|
|
|
@ -126,13 +126,19 @@ fn migration_01(db: &DbInstance) -> Result<()> {
|
||||||
type: String,
|
type: String,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
{
|
||||||
|
?[key, relation, field_name, type] <- [
|
||||||
|
['panorama/journal/page/content', 'journal', 'content', 'string']
|
||||||
|
]
|
||||||
|
:put fqkey_to_dbkey { key, relation, field_name, type }
|
||||||
|
}
|
||||||
|
|
||||||
# Create journal type
|
# Create journal type
|
||||||
{ :create journal { node_id: String => content: Json } }
|
{ :create journal { node_id: String => content: String } }
|
||||||
{ :create journal_day { day: String => node_id: String } }
|
{ :create journal_day { day: String => node_id: String } }
|
||||||
{
|
{
|
||||||
::fts create journal:text_index {
|
::fts create journal:text_index {
|
||||||
extractor: dump_json(content),
|
extractor: content,
|
||||||
extract_filter: !is_null(content),
|
extract_filter: !is_null(content),
|
||||||
tokenizer: Simple,
|
tokenizer: Simple,
|
||||||
filters: [Lowercase, Stemmer('english'), Stopwords('en')],
|
filters: [Lowercase, Stemmer('english'), Stopwords('en')],
|
||||||
|
|
|
@ -5,7 +5,7 @@ use axum::{
|
||||||
http::StatusCode,
|
http::StatusCode,
|
||||||
Json,
|
Json,
|
||||||
};
|
};
|
||||||
use cozo::{DataValue, ScriptMutability};
|
use cozo::{DataValue, ScriptMutability, Vector};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use crate::{error::AppResult, AppState};
|
use crate::{error::AppResult, AppState};
|
||||||
|
@ -69,18 +69,21 @@ pub async fn update_node(
|
||||||
Json(update_data): Json<UpdateData>,
|
Json(update_data): Json<UpdateData>,
|
||||||
) -> AppResult<Json<Value>> {
|
) -> AppResult<Json<Value>> {
|
||||||
println!("Update data: {:?}", update_data);
|
println!("Update data: {:?}", update_data);
|
||||||
|
let node_id_data = DataValue::from(node_id.clone());
|
||||||
|
|
||||||
|
// TODO: Combine these into the same script
|
||||||
|
|
||||||
let tx = state.db.multi_transaction(true);
|
let tx = state.db.multi_transaction(true);
|
||||||
|
|
||||||
if let Some(extra_data) = update_data.extra_data {
|
if let Some(extra_data) = update_data.extra_data {
|
||||||
let result = tx.run_script(
|
let result = tx.run_script(
|
||||||
"
|
"
|
||||||
?[relation, field_name, type] :=
|
?[key, relation, field_name, type] :=
|
||||||
*fqkey_to_dbkey{key, relation, field_name, type},
|
*fqkey_to_dbkey{key, relation, field_name, type},
|
||||||
key = $key
|
is_in(key, $keys)
|
||||||
",
|
",
|
||||||
btmap! {
|
btmap! {
|
||||||
"key".to_owned() => DataValue::List(
|
"keys".to_owned() => DataValue::List(
|
||||||
extra_data
|
extra_data
|
||||||
.keys()
|
.keys()
|
||||||
.map(|s| DataValue::from(s.as_str()))
|
.map(|s| DataValue::from(s.as_str()))
|
||||||
|
@ -89,8 +92,48 @@ pub async fn update_node(
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
println!("Result: {result:?}");
|
let s = |s: &DataValue| s.get_str().unwrap().to_owned();
|
||||||
|
let result = result
|
||||||
|
.rows
|
||||||
|
.into_iter()
|
||||||
|
.map(|row| (s(&row[0]), (s(&row[1]), s(&row[2]), s(&row[3]))))
|
||||||
|
.collect::<HashMap<_, _>>();
|
||||||
|
|
||||||
|
for (key, (relation, field_name, ty)) in result.iter() {
|
||||||
|
let new_value = extra_data.get(key).unwrap();
|
||||||
|
|
||||||
|
// TODO: Make this more generic
|
||||||
|
let new_value = DataValue::from(new_value.as_str().unwrap());
|
||||||
|
|
||||||
|
let query = format!(
|
||||||
|
"
|
||||||
|
?[ node_id, {field_name} ] <- [[$node_id, $input_data]]
|
||||||
|
:update {relation} {{ node_id, {field_name} }}
|
||||||
|
"
|
||||||
|
);
|
||||||
|
println!("QUERY: {query:?}");
|
||||||
|
let result = tx.run_script(
|
||||||
|
&query,
|
||||||
|
btmap! {
|
||||||
|
"node_id".to_owned() => node_id_data.clone(),
|
||||||
|
"input_data".to_owned() => new_value,
|
||||||
|
},
|
||||||
|
)?;
|
||||||
|
|
||||||
|
println!("RESULT: {result:?}");
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tx.run_script(
|
||||||
|
"
|
||||||
|
# Always update the time
|
||||||
|
?[ id, updated_at ] <- [[ $node_id, now() ]]
|
||||||
|
:update node { id, updated_at }
|
||||||
|
",
|
||||||
|
btmap! {
|
||||||
|
"node_id".to_owned() => node_id_data,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
tx.commit()?;
|
tx.commit()?;
|
||||||
|
|
||||||
|
|
4191
pnpm-lock.yaml
4191
pnpm-lock.yaml
File diff suppressed because it is too large
Load diff
Loading…
Reference in a new issue