node impl

This commit is contained in:
Michael Zhang 2024-07-09 20:06:19 -05:00
parent 9ae70cdd36
commit f42dff2a12
241 changed files with 525 additions and 8994 deletions

View file

@ -1,10 +0,0 @@
[package]
name = "panorama-codetrack"
version = "0.1.0"
edition = "2021"
[[bin]]
name = "panorama-codetrack"
path = "rust-src/main.rs"
[dependencies]

View file

@ -1,30 +0,0 @@
name: panorama/codetrack
version: 0.1.0
panorama_version: 0.1.0
description: Code tracking app similar to WakaTime
command: cargo run -p panorama-codetrack
node_types:
- name: heartbeat
keys:
- name: start_time
type: date
- name: end_time
type: date
- name: project
type: text
indexes:
- type: rtree
start: panorama/codetrack/start_time
end: panorama/codetrack/start_time
endpoints:
profiles:
release:
module: ./main.wasm

View file

@ -1,3 +0,0 @@
fn main() {
println!("Hello, world!");
}

View file

@ -1,9 +0,0 @@
{
"compilerOptions": {
"lib": ["ESNext", "DOM", "DOM.Iterable"],
"allowJs": false,
"skipLibCheck": true,
"target": "ESNext",
"module": "ESNext"
}
}

View file

@ -1,3 +0,0 @@
export default {
nodeTypes: {},
};

18
biome.json Normal file
View file

@ -0,0 +1,18 @@
{
"$schema": "https://biomejs.dev/schemas/1.4.1/schema.json",
"organizeImports": {
"enabled": true
},
"formatter": {
"enabled": true,
"indentWidth": 2,
"indentStyle": "space",
"lineWidth": 100
},
"linter": {
"enabled": true,
"rules": {
"recommended": true
}
}
}

View file

@ -1,40 +0,0 @@
[package]
name = "panorama-core"
version = "0.1.0"
edition = "2021"
[dependencies]
anyhow = { version = "1.0.86", features = ["backtrace"] }
backoff = { version = "0.4.0", features = ["tokio"] }
bimap = "0.6.3"
chrono = { version = "0.4.38", features = ["serde"] }
futures = "0.3.30"
itertools = "0.13.0"
schemars = "0.8.21"
serde = { version = "1.0.203", features = ["derive"] }
serde_json = "1.0.117"
serde_yaml = "0.9.34"
sqlx = { version = "0.7.4", features = [
"runtime-tokio",
"tls-rustls",
"macros",
"sqlite",
"uuid",
"chrono",
"regexp",
] }
sugars = "3.0.1"
tantivy = { version = "0.22.0", features = ["zstd"] }
tokio = { version = "1.38.0", features = ["full"] }
uuid = { version = "1.8.0", features = ["v7"] }
walkdir = "2.5.0"
wasmtime = { version = "22.0.0", default-features = false, features = [
"runtime",
"cranelift",
] }
wasmtime-wasi = "22.0.0"
[dependencies.async-imap]
version = "0.9.7"
default-features = false
features = ["runtime-tokio"]

View file

@ -1,4 +0,0 @@
fn main() {
println!("cargo:rerun-if-changed=../../apps");
println!("cargo:rerun-if-changed=migrations");
}

View file

@ -1,40 +0,0 @@
CREATE TABLE node (
node_id TEXT PRIMARY KEY,
node_type TEXT NOT NULL,
updated_at INTEGER NOT NULL DEFAULT CURRENT_TIMESTAMP,
extra_data JSON
);
CREATE TABLE node_has_key (
node_id TEXT NOT NULL,
full_key TEXT NOT NULL,
PRIMARY KEY (node_id, full_key)
);
CREATE INDEX node_has_key_idx_node_id ON node_has_key(node_id);
CREATE INDEX node_has_key_idx_full_key ON node_has_key(full_key);
-- App-related tables
CREATE TABLE app (
app_id INTEGER PRIMARY KEY AUTOINCREMENT,
app_name TEXT NOT NULL,
app_version TEXT NOT NULL,
app_version_hash TEXT,
app_description TEXT,
app_homepage TEXT,
app_repository TEXT,
app_license TEXT
);
CREATE TABLE app_table_mapping (
app_id INTEGER NOT NULL,
app_table_name TEXT NOT NULL,
db_table_name TEXT NOT NULL
);
CREATE TABLE key_mapping (
full_key TEXT NOT NULL,
app_id INTEGER NOT NULL,
app_table_name TEXT NOT NULL,
app_table_field TEXT NOT NULL,
is_fts_enabled BOOLEAN NOT NULL DEFAULT FALSE
);

View file

@ -1,42 +0,0 @@
#[macro_use]
extern crate serde;
#[macro_use]
extern crate serde_json;
#[macro_use]
extern crate sugars;
pub mod migrations;
pub mod state;
// pub mod mail;
pub mod messaging;
#[cfg(test)]
mod tests;
use std::fmt;
pub use crate::state::AppState;
use anyhow::{bail, Result};
use serde_json::Value;
use uuid::Uuid;
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)]
pub struct NodeId(pub Uuid);
impl fmt::Display for NodeId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0.to_string())
}
}
pub fn ensure_ok(s: &str) -> Result<()> {
let status: Value = serde_json::from_str(&s)?;
let status = status.as_object().unwrap();
let ok = status.get("ok").unwrap().as_bool().unwrap_or(false);
if !ok {
let display = status.get("display").unwrap().as_str().unwrap();
bail!("shit (error: {display})")
}
Ok(())
}

View file

@ -1,286 +0,0 @@
use std::{
collections::{HashMap, HashSet},
time::Duration,
};
use anyhow::{Context as _, Result};
use async_imap::Session;
use backoff::{exponential::ExponentialBackoff, SystemClock};
use futures::TryStreamExt;
use itertools::Itertools;
use tokio::{net::TcpStream, time::sleep};
use uuid::Uuid;
use crate::{mail, AppState};
pub struct MailWorker {
state: AppState,
}
impl MailWorker {
pub fn new(state: AppState) -> MailWorker {
MailWorker { state }
}
pub async fn mail_loop(self) -> Result<()> {
loop {
let mut policy = ExponentialBackoff::<SystemClock>::default();
policy.current_interval = Duration::from_secs(5);
policy.initial_interval = Duration::from_secs(5);
backoff::future::retry(policy, || async {
match self.mail_loop_inner().await {
Ok(_) => {}
Err(err) => {
eprintln!("Mail error: {:?}", err);
Err(err)?;
}
}
// For now, just sleep 30 seconds and then fetch again
// TODO: Run a bunch of connections at once and do IDLE over them (if possible)
sleep(Duration::from_secs(30)).await;
Ok(())
})
.await?;
}
Ok(())
}
async fn mail_loop_inner(&self) -> Result<()> {
// Fetch the mail configs
let configs = self.state.fetch_mail_configs()?;
if configs.is_empty() {
return Ok(());
}
// TODO: Do all configs instead of just the first
let config = &configs[0];
let stream =
TcpStream::connect((config.imap_hostname.as_str(), config.imap_port))
.await?;
let client = async_imap::Client::new(stream);
let mut session = client
.login(&config.imap_username, &config.imap_password)
.await
.map_err(|(err, _)| err)?;
let all_mailbox_ids = self
.fetch_and_store_all_mailboxes(config.node_id.to_string(), &mut session)
.await
.context("Could not fetch mailboxes")?;
self
.fetch_all_mail_from_single_mailbox(
&mut session,
&all_mailbox_ids,
config.node_id.to_string(),
"INBOX",
)
.await
.context("Could not fetch mail from INBOX")?;
session.logout().await.into_diagnostic()?;
Ok(())
}
async fn fetch_and_store_all_mailboxes(
&self,
config_node_id: String,
session: &mut Session<TcpStream>,
) -> Result<HashMap<String, String>> {
// println!("Session: {:?}", session);
let mailboxes = session
.list(None, Some("*"))
.await?
.try_collect::<Vec<_>>()
.await?;
let mut all_mailboxes = HashMap::new();
// TODO: Make this more efficient by using bulk in query
for mailbox in mailboxes {
let tx = self.state.db.multi_transaction(true);
let result = tx.run_script(
"
?[node_id] :=
*mailbox{node_id, account_node_id, mailbox_name},
account_node_id = $account_node_id,
mailbox_name = $mailbox_name,
",
btmap! {
"account_node_id".to_owned()=>DataValue::from(config_node_id.clone()),
"mailbox_name".to_owned()=>DataValue::from(mailbox.name().to_string()),
},
)?;
let node_id = if result.rows.len() == 0 {
let new_node_id = Uuid::now_v7();
let new_node_id = new_node_id.to_string();
let extra_data = json!({
"name": mailbox.name(),
});
tx.run_script("
?[node_id, account_node_id, mailbox_name, extra_data] <-
[[$new_node_id, $account_node_id, $mailbox_name, $extra_data]]
:put mailbox { node_id, account_node_id, mailbox_name, extra_data }
",
btmap! {
"new_node_id".to_owned() => DataValue::from(new_node_id.clone()),
"account_node_id".to_owned() => DataValue::from(config_node_id.clone()),
"mailbox_name".to_owned()=>DataValue::from(mailbox.name().to_string()),
"extra_data".to_owned()=>DataValue::Json(JsonData(extra_data)),
},
)?;
new_node_id
} else {
result.rows[0][0].get_str().unwrap().to_owned()
};
tx.commit()?;
all_mailboxes.insert(mailbox.name().to_owned(), node_id);
}
// println!("All mailboxes: {:?}", all_mailboxes);
Ok(all_mailboxes)
}
async fn fetch_all_mail_from_single_mailbox(
&self,
session: &mut Session<TcpStream>,
all_mailbox_ids: &HashMap<String, String>,
config_node_id: String,
mailbox_name: impl AsRef<str>,
) -> Result<()> {
let mailbox_name = mailbox_name.as_ref();
let mailbox = session.select(mailbox_name).await.into_diagnostic()?;
let mailbox_node_id = all_mailbox_ids.get(mailbox_name).unwrap();
let extra_data = json!({
"uid_validity": mailbox.uid_validity,
"last_seen": mailbox.unseen,
});
// TODO: Validate uid validity here
let all_uids = session
.uid_search("ALL")
.await
.context("Could not fetch all UIDs")?;
println!("All UIDs ({}): {:?}", all_uids.len(), all_uids);
let messages = session
.uid_fetch(
all_uids.iter().join(","),
"(FLAGS ENVELOPE BODY[HEADER] BODY[TEXT] INTERNALDATE)",
)
.await
.into_diagnostic()?
.try_collect::<Vec<_>>()
.await
.into_diagnostic()
.context("Could not fetch messages")?;
println!(
"messages {:?}",
messages.iter().map(|f| f.body()).collect::<Vec<_>>()
);
let mut unique_message_ids = HashSet::new();
let data: Vec<_> = messages
.iter()
.map(|msg| {
let message_node_id = Uuid::now_v7();
let headers =
String::from_utf8(msg.header().unwrap().to_vec()).unwrap();
let headers = headers
.split("\r\n")
.filter_map(|s| {
// This is really bad lmao
let p = s.split(": ").collect::<Vec<_>>();
if p.len() < 2 {
None
} else {
Some((p[0], p[1..].join(": ")))
}
})
.collect::<HashMap<_, _>>();
let message_id = headers
.get("Message-ID")
.map(|s| (*s).to_owned())
.unwrap_or(message_node_id.to_string());
unique_message_ids.insert(message_id.clone());
DataValue::List(vec![
DataValue::from(message_node_id.to_string()),
DataValue::from(config_node_id.to_string()),
DataValue::from(mailbox_node_id.clone()),
DataValue::from(
headers
.get("Subject")
.map(|s| (*s).to_owned())
.unwrap_or("Subject".to_owned()),
),
DataValue::Json(JsonData(serde_json::to_value(&headers).unwrap())),
DataValue::Bytes(msg.text().unwrap().to_vec()),
DataValue::from(msg.internal_date().unwrap().to_rfc3339()),
DataValue::from(message_id),
])
})
.collect();
println!("Adding {} messages to database...", data.len());
let input_data = DataValue::List(data);
// TODO: Can this be one query?
let tx = self.state.db.multi_transaction(true);
let unique_message_ids_data_value = DataValue::List(
unique_message_ids
.into_iter()
.map(|s| DataValue::from(s))
.collect_vec(),
);
let existing_ids = tx.run_script(
"
?[node_id] := *message { node_id, message_id },
is_in(message_id, $message_ids)
",
btmap! { "message_ids".to_owned() => unique_message_ids_data_value },
)?;
println!("Existing ids: {:?}", existing_ids);
self
.state
.db
.run_script(
"
?[
node_id, account_node_id, mailbox_node_id, subject, headers, body,
internal_date, message_id
] <- $input_data
:put message {
node_id, account_node_id, mailbox_node_id, subject, headers, body,
internal_date, message_id
}
",
btmap! {
"input_data".to_owned() => input_data,
},
ScriptMutability::Mutable,
)
.context("Could not add message to database")?;
Ok(())
}
}

View file

@ -1,4 +0,0 @@
//! Panorama uses an internal messaging system to pass content around
//!
//! This implementation is dead simple, just passes all messages and filters on the other end
pub struct Messaging {}

View file

@ -1,197 +0,0 @@
use sqlx::migrate::Migrator;
pub static MIGRATOR: Migrator = sqlx::migrate!();
// pub async fn run_migrations(db: &DbInstance) -> Result<()> {
// let migration_status = check_migration_status(db).await?;
// println!("migration status: {:?}", migration_status);
// let migrations: Vec<Box<dyn for<'a> Fn(&'a DbInstance) -> Result<()>>> =
// vec![Box::new(no_op), Box::new(migration_01)];
// if let MigrationStatus::NoMigrations = migration_status {
// let result = db.run_script_str(
// "
// { :create migrations { yeah: Int default 0 => version: Int default 0 } }
// {
// ?[yeah, version] <- [[0, 0]]
// :put migrations { yeah, version }
// }
// ",
// "",
// false,
// );
// ensure_ok(&result)?;
// }
// let start_at_migration = match migration_status {
// MigrationStatus::NoMigrations => 0,
// MigrationStatus::HasVersion(n) => n,
// };
// let migrations_to_run = migrations
// .iter()
// .enumerate()
// .skip(start_at_migration as usize + 1);
// // println!("running {} migrations...", migrations_to_run.len());
// //TODO: This should all be done in a transaction
// for (idx, migration) in migrations_to_run {
// println!("running migration {idx}...");
// migration(db)?;
// let result = db.run_script_str(
// "
// ?[yeah, version] <- [[0, $version]]
// :put migrations { yeah => version }
// ",
// &format!("{{\"version\":{}}}", idx),
// false,
// );
// ensure_ok(&result)?;
// println!("succeeded migration {idx}!");
// }
// Ok(())
// }
// #[derive(Debug)]
// enum MigrationStatus {
// NoMigrations,
// HasVersion(u64),
// }
// async fn check_migration_status(db: &DbInstance) -> Result<MigrationStatus> {
// let status = db.run_script_str(
// "
// ?[yeah, version] := *migrations[yeah, version]
// ",
// "",
// true,
// );
// println!("Status: {}", status);
// let status: Value = serde_json::from_str(&status).into_diagnostic()?;
// let status = status.as_object().unwrap();
// let ok = status.get("ok").unwrap().as_bool().unwrap_or(false);
// if !ok {
// let status_code = status.get("code").unwrap().as_str().unwrap();
// if status_code == "query::relation_not_found" {
// return Ok(MigrationStatus::NoMigrations);
// }
// }
// let rows = status.get("rows").unwrap().as_array().unwrap();
// let row = rows[0].as_array().unwrap();
// let version = row[1].as_number().unwrap().as_u64().unwrap();
// println!("row: {row:?}");
// Ok(MigrationStatus::HasVersion(version))
// }
// fn no_op(_: &DbInstance) -> Result<()> {
// Ok(())
// }
// fn migration_01(db: &DbInstance) -> Result<()> {
// let result = db.run_script_str(
// "
// # Primary node type
// {
// :create node {
// id: String
// =>
// type: String,
// created_at: Float default now(),
// updated_at: Float default now(),
// extra_data: Json default {},
// }
// }
// # Inverse mappings for easy querying
// { :create node_has_key { key: String => id: String } }
// { ::index create node_has_key:inverse { id } }
// { :create node_managed_by_app { node_id: String => app: String } }
// { :create node_refers_to { node_id: String => other_node_id: String } }
// {
// :create fqkey_to_dbkey {
// key: String
// =>
// relation: String,
// field_name: String,
// type: String,
// is_fts_enabled: Bool,
// }
// }
// {
// ?[key, relation, field_name, type, is_fts_enabled] <- [
// ['panorama/journal/page/day', 'journal_day', 'day', 'string', false],
// ['panorama/journal/page/title', 'journal', 'title', 'string', true],
// ['panorama/journal/page/content', 'journal', 'content', 'string', true],
// ['panorama/mail/config/imap_hostname', 'mail_config', 'imap_hostname', 'string', false],
// ['panorama/mail/config/imap_port', 'mail_config', 'imap_port', 'int', false],
// ['panorama/mail/config/imap_username', 'mail_config', 'imap_username', 'string', false],
// ['panorama/mail/config/imap_password', 'mail_config', 'imap_password', 'string', false],
// ['panorama/mail/message/body', 'message', 'body', 'string', true],
// ['panorama/mail/message/subject', 'message', 'subject', 'string', true],
// ['panorama/mail/message/message_id', 'message', 'message_id', 'string', false],
// ]
// :put fqkey_to_dbkey { key, relation, field_name, type, is_fts_enabled }
// }
// # Create journal type
// { :create journal { node_id: String => title: String default '', content: String } }
// { :create journal_day { day: String => node_id: String } }
// # Mail
// {
// :create mail_config {
// node_id: String
// =>
// imap_hostname: String,
// imap_port: Int,
// imap_username: String,
// imap_password: String,
// }
// }
// {
// :create mailbox {
// node_id: String
// =>
// account_node_id: String,
// mailbox_name: String,
// uid_validity: Int? default null,
// extra_data: Json default {},
// }
// }
// { ::index create mailbox:by_account_id_and_name { account_node_id, mailbox_name } }
// {
// :create message {
// node_id: String
// =>
// message_id: String,
// account_node_id: String,
// mailbox_node_id: String,
// subject: String,
// headers: Json?,
// body: Bytes,
// internal_date: String,
// }
// }
// { ::index create message:message_id { message_id } }
// { ::index create message:date { internal_date } }
// { ::index create message:by_mailbox_id { mailbox_node_id } }
// # Calendar
// ",
// "",
// false,
// );
// ensure_ok(&result)?;
// Ok(())
// }

View file

@ -1,51 +0,0 @@
use std::io::{stdout, Write};
use anyhow::Result;
use chrono::{DateTime, Utc};
use wasmtime::{Caller, InstancePre, Linker, Memory};
pub struct WasmtimeModule {
pub(crate) module: InstancePre<WasmtimeInstanceEnv>,
}
impl WasmtimeModule {
pub fn link_imports(linker: &mut Linker<WasmtimeInstanceEnv>) -> Result<()> {
macro_rules! link_function {
($($module:literal :: $func:ident),* $(,)?) => {
linker $(
.func_wrap(
$module,
concat!("_", stringify!($func)),
WasmtimeInstanceEnv::$func,
)?
)*;
};
}
abi_funcs!(link_function);
Ok(())
}
}
/// This is loosely based on SpacetimeDB's implementation of their host.
/// See: https://github.com/clockworklabs/SpacetimeDB/blob/c19c0d45c454db2a4215deb23c7f9f82cb5d7561/crates/core/src/host/wasmtime/wasm_instance_env.rs
pub struct WasmtimeInstanceEnv {
/// This is only an Option because memory is initialized after this is created so we need to come back and put it in later
pub(crate) mem: Option<Memory>,
}
impl WasmtimeInstanceEnv {
pub fn print(mut caller: Caller<'_, Self>, len: u64, ptr: u32) {
let mem = caller.data().mem.unwrap();
let mut buffer = vec![0; len as usize];
mem.read(caller, ptr as usize, &mut buffer);
let s = String::from_utf8(buffer).unwrap();
println!("Called print: {}", s);
}
pub fn get_current_time(_: Caller<'_, Self>) -> i64 {
let now = Utc::now();
now.timestamp_nanos_opt().unwrap()
}
pub fn register_endpoint(mut caller: Caller<'_, Self>) {}
}

View file

@ -1,10 +0,0 @@
macro_rules! abi_funcs {
($macro_name:ident) => {
// TODO: Why is this "env"? How do i use another name
$macro_name! {
"env"::get_current_time,
"env"::print,
"env"::register_endpoint,
}
};
}

View file

@ -1,7 +0,0 @@
use anyhow::Result;
pub struct Memory {
pub memory: wasmtime::Memory,
}
impl Memory {}

View file

@ -1,160 +0,0 @@
#[macro_use]
pub mod macros;
pub mod internal;
pub mod manifest;
pub mod memory;
use std::{
collections::HashMap,
fs::{self, File},
io::Read,
path::{Path, PathBuf},
};
use anyhow::{anyhow, Context as _, Result};
use internal::{WasmtimeInstanceEnv, WasmtimeModule};
use itertools::Itertools;
use wasmtime::{AsContext, Config, Engine, Linker, Memory, Module, Store};
use crate::AppState;
use self::manifest::AppManifest;
pub type AllAppData = HashMap<String, AppData>;
impl AppState {
pub async fn install_apps_from_search_paths(&self) -> Result<AllAppData> {
let search_paths = vec![
PathBuf::from("/Users/michael/Projects/panorama/apps"),
PathBuf::from("/home/michael/Projects/panorama/apps"),
];
let mut found = Vec::new();
for path in search_paths {
if !path.exists() {
continue;
}
let read_dir = fs::read_dir(&path)
.with_context(|| format!("could not read {}", path.display()))?;
for dir_entry in read_dir {
let dir_entry = dir_entry?;
let path = dir_entry.path();
let manifest_path = path.join("manifest.yml");
if manifest_path.exists() {
found.push(path);
}
}
}
let mut all_app_data = HashMap::new();
for path in found {
let app_data = self.install_app_from_path(&path).await?;
println!("App data: {:?}", app_data);
all_app_data.insert(
path.display().to_string(),
AppData {
name: "hello".to_string(),
},
);
}
Ok(all_app_data)
}
}
#[derive(Debug)]
pub struct AppData {
name: String,
}
impl AppState {
async fn install_app_from_path(&self, path: impl AsRef<Path>) -> Result<()> {
let app_path = path.as_ref();
let manifest_path = app_path.join("manifest.yml");
let manifest: AppManifest = {
let file = File::open(&manifest_path)?;
serde_yaml::from_reader(file).with_context(|| {
format!(
"Could not parse config file from {}",
manifest_path.display()
)
})?
};
println!("Manifest: {:?}", manifest);
let module_path = app_path.join(manifest.module);
let installer_program = {
let mut file = File::open(&module_path).with_context(|| {
format!(
"Could not open installer from path: {}",
module_path.display()
)
})?;
let mut buf = Vec::new();
file.read_to_end(&mut buf)?;
buf
};
println!("Installer program: {} bytes", installer_program.len());
let config = Config::new();
let engine = Engine::new(&config)?;
let module = Module::new(&engine, &installer_program)?;
let mut linker = Linker::new(&engine);
WasmtimeModule::link_imports(&mut linker)?;
let module = linker.instantiate_pre(&module)?;
let module = WasmtimeModule { module };
let mut state = WasmtimeInstanceEnv { mem: None };
let mut store = Store::new(&engine, state);
println!(
"Required imports: {:?}",
module
.module
.module()
.imports()
.map(|s| s.name())
.collect_vec()
);
let instance = module
.module
.instantiate(&mut store)
.context("Could not instantiate")?;
let mem = instance
.get_memory(&mut store, "memory")
.ok_or_else(|| anyhow!("Fuck!"))?;
store.data_mut().mem = Some(mem);
instance.exports(&mut store).for_each(|export| {
println!("Export: {}", export.name());
});
let hello = instance
.get_typed_func::<(), i32>(&mut store, "install")
.context("Could not get typed function")?;
hello.call(&mut store, ()).context("Could not call")?;
Ok(())
}
}
fn read_utf_8string<C>(
c: C,
mem: &Memory,
len: usize,
offset: usize,
) -> Result<String>
where
C: AsContext,
{
let mut buffer = vec![0; len];
mem.read(c, offset, &mut buffer)?;
let string = String::from_utf8(buffer)?;
Ok(string)
}

View file

@ -1,27 +0,0 @@
use std::path::PathBuf;
use schemars::JsonSchema;
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct AppManifest {
pub name: String,
pub version: Option<String>,
pub panorama_version: Option<String>,
pub description: Option<String>,
pub module: PathBuf,
#[serde(default)]
pub endpoints: Vec<AppManifestEndpoint>,
#[serde(default)]
pub triggers: Vec<AppManifestTriggers>,
}
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct AppManifestEndpoint {
pub url: String,
pub method: String,
pub export_name: String,
}
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct AppManifestTriggers {}

View file

@ -1,74 +0,0 @@
pub mod manifest;
use std::{
collections::HashMap,
fs::{self, File},
path::{Path, PathBuf},
};
use anyhow::{Context as _, Result};
use crate::AppState;
use self::manifest::AppManifest;
impl AppState {
pub async fn install_apps_from_search_paths(&self) -> Result<()> {
let search_paths = vec![
PathBuf::from("/Users/michael/Projects/panorama/apps"),
PathBuf::from("/home/michael/Projects/panorama/apps"),
];
let mut found = Vec::new();
for path in search_paths {
if !path.exists() {
continue;
}
let read_dir = fs::read_dir(&path)
.with_context(|| format!("could not read {}", path.display()))?;
for dir_entry in read_dir {
let dir_entry = dir_entry?;
let path = dir_entry.path();
let manifest_path = path.join("manifest.yml");
if manifest_path.exists() {
found.push(path);
}
}
}
// let mut all_app_data = HashMap::new();
// for path in found {
// let app_data = self.install_app_from_path(&path).await?;
// println!("App data: {:?}", app_data);
// all_app_data.insert(
// path.display().to_string(),
// AppData {
// name: "hello".to_string(),
// },
// );
// }
Ok(())
}
async fn install_app_from_path(&self, path: impl AsRef<Path>) -> Result<()> {
let app_path = path.as_ref();
let manifest_path = app_path.join("manifest.yml");
let manifest: AppManifest = {
let file = File::open(&manifest_path)?;
serde_yaml::from_reader(file).with_context(|| {
format!(
"Could not parse config file from {}",
manifest_path.display()
)
})?
};
println!("Manifest: {:?}", manifest);
todo!()
}
}

View file

@ -1,3 +0,0 @@
use crate::AppState;
impl AppState {}

View file

@ -1,77 +0,0 @@
use std::collections::HashMap;
use anyhow::Result;
use cozo::ScriptMutability;
use serde_json::Value;
use crate::AppState;
use super::utils::data_value_to_json_value;
impl AppState {
pub async fn export(&self) -> Result<Value> {
let result = self.db.run_script(
"::relations",
Default::default(),
ScriptMutability::Immutable,
)?;
let name_index = result.headers.iter().position(|x| x == "name").unwrap();
let relation_names = result
.rows
.into_iter()
.map(|row| row[name_index].get_str().unwrap().to_owned())
.collect::<Vec<_>>();
let mut relation_columns = HashMap::new();
for relation_name in relation_names.iter() {
let result = self.db.run_script(
&format!("::columns {relation_name}"),
Default::default(),
ScriptMutability::Immutable,
)?;
let column_index =
result.headers.iter().position(|x| x == "column").unwrap();
let columns = result
.rows
.into_iter()
.map(|row| row[column_index].get_str().unwrap().to_owned())
.collect::<Vec<_>>();
relation_columns.insert(relation_name.clone(), columns);
}
let tx = self.db.multi_transaction(false);
let mut all_relations = hmap! {};
for relation_name in relation_names.iter() {
if relation_name.contains(":") {
continue;
}
let mut relation_info = vec![];
let columns = relation_columns.get(relation_name.as_str()).unwrap();
let columns_str = columns.join(", ");
let query =
format!("?[{columns_str}] := *{relation_name} {{ {columns_str} }}");
let result = tx.run_script(&query, Default::default())?;
for row in result.rows.into_iter() {
let mut object = hmap! {};
row.into_iter().enumerate().for_each(|(idx, col)| {
object
.insert(columns[idx].to_owned(), data_value_to_json_value(&col));
});
relation_info.push(object);
}
all_relations.insert(relation_name.to_owned(), relation_info);
}
Ok(json!({"relations": all_relations}))
}
}

View file

@ -1,56 +0,0 @@
use std::str::FromStr;
use anyhow::Result;
use chrono::Local;
use uuid::Uuid;
use crate::{AppState, NodeId};
use super::node::CreateOrUpdate;
impl AppState {
pub async fn get_todays_journal_id(&self) -> Result<NodeId> {
let today = todays_date();
let result = self.db.run_script(
"
?[node_id] := *journal_day{day, node_id}, day = $day
",
btmap! {
"day".to_owned() => today.clone().into(),
},
ScriptMutability::Immutable,
)?;
// TODO: Do this check on the server side
if result.rows.len() == 0 {
// Insert a new one
// let uuid = Uuid::now_v7();
// let node_id = uuid.to_string();
let node_info = self
.create_or_update_node(
CreateOrUpdate::Create {
r#type: "panorama/journal/page".to_owned(),
},
Some(btmap! {
"panorama/journal/page/day".to_owned() => today.clone().into(),
"panorama/journal/page/content".to_owned() => "".to_owned().into(),
"panorama/journal/page/title".to_owned() => today.clone().into(),
}),
)
.await?;
return Ok(node_info.node_id);
}
let node_id = result.rows[0][0].get_str().unwrap();
Ok(NodeId(Uuid::from_str(node_id).into_diagnostic()?))
}
}
fn todays_date() -> String {
let now = Local::now();
let date = now.date_naive();
date.format("%Y-%m-%d").to_string()
}

View file

@ -1,47 +0,0 @@
use std::{collections::HashMap, str::FromStr, time::Duration};
use anyhow::Result;
use cozo::{DataValue, JsonData, ScriptMutability};
use futures::TryStreamExt;
use tokio::{net::TcpStream, time::sleep};
use uuid::Uuid;
use crate::{AppState, NodeId};
#[derive(Debug, Serialize)]
pub struct MailConfig {
pub node_id: NodeId,
pub imap_hostname: String,
pub imap_port: u16,
pub imap_username: String,
pub imap_password: String,
}
impl AppState {
/// Fetch the list of mail configs in the database
pub fn fetch_mail_configs(&self) -> Result<Vec<MailConfig>> {
let result = self.db.run_script(
"
?[node_id, imap_hostname, imap_port, imap_username, imap_password] :=
*node{ id: node_id },
*mail_config{ node_id, imap_hostname, imap_port, imap_username, imap_password }
",
Default::default(),
ScriptMutability::Immutable,
)?;
let result = result
.rows
.into_iter()
.map(|row| MailConfig {
node_id: NodeId(Uuid::from_str(row[0].get_str().unwrap()).unwrap()),
imap_hostname: row[1].get_str().unwrap().to_owned(),
imap_port: row[2].get_int().unwrap() as u16,
imap_username: row[3].get_str().unwrap().to_owned(),
imap_password: row[4].get_str().unwrap().to_owned(),
})
.collect::<Vec<_>>();
Ok(result)
}
}

View file

@ -1,117 +0,0 @@
// pub mod apps;
// pub mod codetrack;
// pub mod export;
// pub mod journal;
// pub mod mail;
pub mod appsv0;
pub mod node;
pub mod node_raw;
// pub mod utils;
use std::{collections::HashMap, fs, path::Path};
use anyhow::{Context, Result};
use bimap::BiMap;
use sqlx::{
pool::PoolConnection,
sqlite::{SqliteConnectOptions, SqliteJournalMode, SqlitePoolOptions},
Sqlite, SqlitePool,
};
use tantivy::{
directory::MmapDirectory,
schema::{Field, Schema, STORED, STRING, TEXT},
Index,
};
use wasmtime::Module;
use crate::{
// mail::MailWorker,
migrations::MIGRATOR,
};
pub fn tantivy_schema() -> (Schema, BiMap<String, Field>) {
let mut schema_builder = Schema::builder();
let mut field_map = BiMap::new();
let node_id = schema_builder.add_text_field("node_id", STRING | STORED);
field_map.insert("node_id".to_owned(), node_id);
let journal_content = schema_builder.add_text_field("title", TEXT | STORED);
field_map.insert("panorama/journal/page/content".to_owned(), journal_content);
(schema_builder.build(), field_map)
}
#[derive(Clone)]
pub struct AppState {
pub db: SqlitePool,
pub tantivy_index: Index,
pub tantivy_field_map: BiMap<String, Field>,
pub app_wasm_modules: HashMap<String, Module>,
// TODO: Compile this into a more efficient thing than just iter
pub app_routes: HashMap<String, Vec<AppRoute>>,
}
#[derive(Clone)]
pub struct AppRoute {
route: String,
handler_name: String,
}
impl AppState {
pub async fn new(panorama_dir: impl AsRef<Path>) -> Result<Self> {
let panorama_dir = panorama_dir.as_ref().to_path_buf();
fs::create_dir_all(&panorama_dir)
.context("Could not create panorama directory")?;
println!("Panorama dir: {}", panorama_dir.display());
let (tantivy_index, tantivy_field_map) = {
let (schema, field_map) = tantivy_schema();
let tantivy_path = panorama_dir.join("tantivy-index");
fs::create_dir_all(&tantivy_path)?;
let dir = MmapDirectory::open(&tantivy_path)?;
let index = Index::builder().schema(schema).open_or_create(dir)?;
(index, field_map)
};
let db_path = panorama_dir.join("db.sqlite");
let sqlite_connect_options = SqliteConnectOptions::new()
.filename(db_path)
.journal_mode(SqliteJournalMode::Wal)
.create_if_missing(true);
let db = SqlitePoolOptions::new()
.connect_with(sqlite_connect_options)
.await
.context("Could not connect to SQLite database")?;
let state = AppState {
db,
tantivy_index,
tantivy_field_map,
app_wasm_modules: Default::default(),
app_routes: Default::default(),
};
state.init().await?;
Ok(state)
}
pub async fn conn(&self) -> Result<PoolConnection<Sqlite>> {
self.db.acquire().await.map_err(|err| err.into())
}
async fn init(&self) -> Result<()> {
// run_migrations(&self.db).await?;
MIGRATOR
.run(&self.db)
.await
.context("Could not migrate database")?;
Ok(())
}
pub fn handle_app_route() {}
}

View file

@ -1,523 +0,0 @@
use std::collections::{BTreeMap, HashMap};
use anyhow::Result;
use chrono::{DateTime, Utc};
use itertools::Itertools;
use serde_json::Value;
use sqlx::{Connection, Executor, FromRow, QueryBuilder, Sqlite};
use uuid::Uuid;
use crate::{state::node_raw::FieldMappingRow, AppState, NodeId};
// use super::utils::owned_value_to_json_value;
pub type ExtraData = BTreeMap<String, Value>;
pub type FieldsByTable<'a> =
HashMap<(&'a i64, &'a String), Vec<&'a FieldMappingRow>>;
#[derive(Debug)]
pub struct NodeInfo {
pub node_id: NodeId,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
pub fields: Option<HashMap<String, Value>>,
}
impl AppState {
/// Get all properties of a node
pub async fn get_node(&self, node_id: impl AsRef<str>) -> Result<NodeInfo> {
let node_id = node_id.as_ref().to_owned();
let mut conn = self.conn().await?;
conn
.transaction::<_, _, sqlx::Error>(|tx| {
Box::pin(async move {
let node_id = node_id.clone();
let field_mapping =
AppState::get_related_field_list_for_node_id(&mut **tx, &node_id)
.await?;
// Group the keys by which relation they're in
let fields_by_table = field_mapping.iter().into_group_map_by(
|FieldMappingRow {
app_id,
app_table_name,
..
}| (app_id, app_table_name),
);
// Run the query that grabs all of the relevant fields, and coalesce
// the fields back
let related_fields =
AppState::query_related_fields(&mut **tx, &fields_by_table).await?;
println!("Related fields: {:?}", related_fields);
// let created_at = DateTime::from_timestamp_millis(
// (result.rows[0][2].get_float().unwrap() * 1000.0) as i64,
// )
// .unwrap();
// let updated_at = DateTime::from_timestamp_millis(
// (result.rows[0][3].get_float().unwrap() * 1000.0) as i64,
// )
// .unwrap();
// let mut fields = HashMap::new();
// for row in result
// .rows
// .into_iter()
// .map(|row| row.into_iter().skip(4).zip(all_fields.iter()))
// {
// for (value, (_, _, field_name)) in row {
// fields.insert(
// field_name.to_string(),
// data_value_to_json_value(&value),
// );
// }
// }
todo!()
// Ok(NodeInfo {
// node_id: NodeId(Uuid::from_str(&node_id).unwrap()),
// created_at,
// updated_at,
// fields: Some(fields),
// })
})
})
.await?;
todo!()
// Ok(())
}
async fn query_related_fields<'e, 'c: 'e, X>(
x: X,
fields_by_table: &FieldsByTable<'_>,
) -> sqlx::Result<HashMap<String, Value>>
where
X: 'e + Executor<'c, Database = Sqlite>,
{
let mut query = QueryBuilder::new("");
let mut mapping = HashMap::new();
let mut ctr = 0;
let mut selected_fields = vec![];
for ((app_id, app_table_name), fields) in fields_by_table.iter() {
let table_gen_name = format!("c{ctr}");
ctr += 1;
let mut keys = vec![];
for field_info in fields.iter() {
let field_gen_name = format!("f{ctr}");
ctr += 1;
mapping.insert(&field_info.full_key, field_gen_name.clone());
keys.push(field_gen_name.clone());
selected_fields.push(format!(
"{}.{} as {}",
table_gen_name, field_info.app_table_field, field_gen_name
));
// constraints.push(format!(
// "{}: {}",
// field_info.relation_field.to_owned(),
// field_gen_name,
// ));
// all_fields.push((
// field_gen_name,
// field_info.relation_field.to_owned(),
// key,
// ))
}
// let keys = keys.join(", ");
// let constraints = constraints.join(", ");
// all_relation_queries.push(format!(
// "
// {table_gen_name}[{keys}] :=
// *{relation}{{ node_id, {constraints} }},
// node_id = $node_id
// "
// ));
// all_relation_constraints.push(format!("{table_gen_name}[{keys}],"))
}
if selected_fields.is_empty() {
return Ok(HashMap::new());
}
query.push("SELECT ");
query.push(selected_fields.join(", "));
query.push(" FROM ");
println!("Query: {:?}", query.sql());
// let all_relation_constraints = all_relation_constraints.join("\n");
// let all_relation_queries = all_relation_queries.join("\n\n");
// let all_field_names = all_fields
// .iter()
// .map(|(field_name, _, _)| field_name)
// .join(", ");
// let _query = format!(
// "
// {all_relation_queries}
// ?[type, extra_data, created_at, updated_at, {all_field_names}] :=
// *node {{ id, type, created_at, updated_at, extra_data }},
// {all_relation_constraints}
// id = $node_id
// "
// );
let rows = query.build().fetch_all(x).await;
todo!()
}
}
#[derive(Debug)]
pub enum CreateOrUpdate {
Create { r#type: String },
Update { node_id: NodeId },
}
impl AppState {
// TODO: Split this out into create and update
pub async fn create_or_update_node(
&self,
opts: CreateOrUpdate,
extra_data: Option<ExtraData>,
) -> Result<NodeInfo> {
let node_id = match opts {
CreateOrUpdate::Create { .. } => NodeId(Uuid::now_v7()),
CreateOrUpdate::Update { ref node_id } => node_id.clone(),
};
let node_id = node_id.to_string();
let action = match opts {
CreateOrUpdate::Create { .. } => "put",
CreateOrUpdate::Update { .. } => "update",
};
println!("Request: {opts:?} {extra_data:?}");
let mut conn = self.conn().await?;
conn
.transaction::<_, _, sqlx::Error>(|tx| {
Box::pin(async move {
let node_info = match opts {
CreateOrUpdate::Create { r#type } => {
AppState::create_node_raw(&mut **tx, &r#type).await?
}
CreateOrUpdate::Update { node_id } => todo!(),
};
if let Some(extra_data) = extra_data {
if !extra_data.is_empty() {
let node_id_str = node_id.to_string();
let field_mapping = AppState::get_related_field_list_for_node_id(
&mut **tx,
&node_id_str,
)
.await?;
// Group the keys by which relation they're in
let fields_by_table = field_mapping.iter().into_group_map_by(
|FieldMappingRow {
app_id,
app_table_name,
..
}| (app_id, app_table_name),
);
AppState::write_extra_data(
&mut **tx,
&node_id_str,
&fields_by_table,
extra_data,
)
.await?;
}
}
Ok(node_info)
})
})
.await
.map_err(|err| err.into())
}
async fn create_node_raw<'e, 'c: 'e, X>(
x: X,
r#type: &str,
) -> sqlx::Result<NodeInfo>
where
X: 'e + Executor<'c, Database = Sqlite>,
{
let node_id = Uuid::now_v7();
let node_id_str = node_id.to_string();
#[derive(FromRow)]
struct Result {
updated_at: i64,
}
let result = sqlx::query_as!(
Result,
r#"
INSERT INTO node (node_id, node_type, extra_data)
VALUES (?, ?, "{}")
RETURNING updated_at
"#,
node_id_str,
r#type,
)
.fetch_one(x)
.await?;
let updated_at =
DateTime::from_timestamp_millis(result.updated_at * 1000).unwrap();
let created_at = DateTime::from_timestamp_millis(
node_id.get_timestamp().unwrap().to_unix().0 as i64 * 1000,
)
.unwrap();
Ok(NodeInfo {
node_id: NodeId(node_id),
created_at,
updated_at,
fields: None,
})
}
async fn write_extra_data<'e, 'c: 'e, X>(
x: X,
node_id: &str,
fields_by_table: &FieldsByTable<'_>,
extra_data: ExtraData,
) -> sqlx::Result<()>
where
X: 'e + Executor<'c, Database = Sqlite>,
{
// Update Tantivy indexes
// for ((app_id, app_table_name), fields) in fields_by_table.iter() {
// let mut writer =
// self.tantivy_index.writer(15_000_000).into_diagnostic()?;
// let delete_term = Term::from_field_text(node_id_field.clone(), &node_id);
// writer.delete_term(delete_term);
// writer.add_document(doc).into_diagnostic()?;
// writer.commit().into_diagnostic()?;
// drop(writer);
// }
// Update database
let mut node_has_keys = Vec::new();
println!("Fields by table: {:?}", fields_by_table);
for ((app_id, app_table_name), fields) in fields_by_table.iter() {
for field_info in fields {
node_has_keys.push(&field_info.full_key);
}
// let mut doc =
// btmap! { node_id_field.clone() => OwnedValue::Str(node_id.to_owned()) };
// let fields_mapping = fields
// .into_iter()
// .map(
// |(
// key,
// FieldInfo {
// relation_field,
// r#type,
// is_fts_enabled,
// ..
// },
// )| {
// let new_value = extra_data.get(*key).unwrap();
// // TODO: Make this more generic
// let new_value = match r#type.as_str() {
// "int" => DataValue::from(new_value.as_i64().unwrap()),
// _ => DataValue::from(new_value.as_str().unwrap()),
// };
// if *is_fts_enabled {
// if let Some(field) = self.tantivy_field_map.get_by_left(*key) {
// doc.insert(
// field.clone(),
// OwnedValue::Str(new_value.get_str().unwrap().to_owned()),
// );
// }
// }
// (relation_field.to_owned(), new_value)
// },
// )
// .collect::<BTreeMap<_, _>>();
// let keys = fields_mapping.keys().collect::<Vec<_>>();
// let keys_joined = keys.iter().join(", ");
// if !keys.is_empty() {
// let query = format!(
// "
// ?[ node_id, {keys_joined} ] <- [$input_data]
// :{action} {relation} {{ node_id, {keys_joined} }}
// "
// );
// let mut params = vec![];
// params.push(DataValue::from(node_id.clone()));
// for key in keys {
// params.push(fields_mapping[key].clone());
// }
// let result = tx.run_script(
// &query,
// btmap! {
// "input_data".to_owned() => DataValue::List(params),
// },
// );
// }
}
if !node_has_keys.is_empty() {
let mut query =
QueryBuilder::new("INSERT INTO node_has_key (node_id, full_key) ");
query.push_values(node_has_keys, |mut b, key| {
b.push_bind(node_id).push_bind(key);
});
println!("Query: {:?}", query.sql());
query.build().execute(x).await?;
}
Ok(())
}
}
// impl AppState {
// pub async fn update_node() {}
// pub async fn search_nodes(
// &self,
// query: impl AsRef<str>,
// ) -> Result<Vec<(NodeId, Value)>> {
// let query = query.as_ref();
// let reader = self.tantivy_index.reader().into_diagnostic()?;
// let searcher = reader.searcher();
// let node_id_field = self
// .tantivy_field_map
// .get_by_left("node_id")
// .unwrap()
// .clone();
// let journal_page_field = self
// .tantivy_field_map
// .get_by_left("panorama/journal/page/content")
// .unwrap()
// .clone();
// let mut query_parser =
// QueryParser::for_index(&self.tantivy_index, vec![journal_page_field]);
// query_parser.set_field_fuzzy(journal_page_field, true, 2, true);
// let query = query_parser.parse_query(query).into_diagnostic()?;
// let top_docs = searcher
// .search(&query, &TopDocs::with_limit(10))
// .into_diagnostic()?;
// Ok(
// top_docs
// .into_iter()
// .map(|(score, doc_address)| {
// let retrieved_doc =
// searcher.doc::<TantivyDocument>(doc_address).unwrap();
// let node_id = retrieved_doc
// .get_first(node_id_field.clone())
// .unwrap()
// .as_str()
// .unwrap();
// let all_fields = retrieved_doc.get_sorted_field_values();
// let node_id = NodeId(Uuid::from_str(node_id).unwrap());
// let fields = all_fields
// .into_iter()
// .map(|(field, values)| {
// (
// self.tantivy_field_map.get_by_right(&field).unwrap(),
// if values.len() == 1 {
// owned_value_to_json_value(values[0])
// } else {
// Value::Array(
// values
// .into_iter()
// .map(owned_value_to_json_value)
// .collect_vec(),
// )
// },
// )
// })
// .collect::<HashMap<_, _>>();
// (
// node_id,
// json!({
// "score": score,
// "fields": fields,
// }),
// )
// })
// .collect::<Vec<_>>(),
// )
// }
// fn get_rows_for_extra_keys(
// &self,
// tx: &MultiTransaction,
// keys: &[String],
// ) -> Result<FieldMapping> {
// let result = tx.run_script(
// "
// ?[key, relation, field_name, type, is_fts_enabled] :=
// *fqkey_to_dbkey{key, relation, field_name, type, is_fts_enabled},
// is_in(key, $keys)
// ",
// btmap! {
// "keys".to_owned() => DataValue::List(
// keys.into_iter()
// .map(|s| DataValue::from(s.as_str()))
// .collect::<Vec<_>>()
// ),
// },
// )?;
// AppState::rows_to_field_mapping(result)
// }
// fn rows_to_field_mapping(result: NamedRows) -> Result<FieldMapping> {
// let s = |s: &DataValue| s.get_str().unwrap().to_owned();
// Ok(
// result
// .rows
// .into_iter()
// .map(|row| {
// (
// s(&row[0]),
// FieldInfo {
// relation_name: s(&row[1]),
// relation_field: s(&row[2]),
// r#type: s(&row[3]),
// is_fts_enabled: row[4].get_bool().unwrap(),
// },
// )
// })
// .collect::<HashMap<_, _>>(),
// )
// }
// }

View file

@ -1,42 +0,0 @@
use sqlx::{Executor, FromRow, Sqlite};
use crate::AppState;
#[derive(Debug, FromRow)]
pub struct FieldMappingRow {
pub full_key: String,
pub app_id: i64,
pub app_table_name: String,
pub app_table_field: String,
pub db_table_name: Option<String>,
}
impl AppState {
pub(crate) async fn get_related_field_list_for_node_id<'e, 'c: 'e, X>(
x: X,
node_id: &str,
) -> sqlx::Result<Vec<FieldMappingRow>>
where
X: 'e + Executor<'c, Database = Sqlite>,
{
sqlx::query_as!(
FieldMappingRow,
"
SELECT
node_has_key.full_key, key_mapping.app_id,
key_mapping.app_table_name, app_table_field,
app_table_mapping.db_table_name
FROM node_has_key
INNER JOIN key_mapping
ON node_has_key.full_key = key_mapping.full_key
INNER JOIN app_table_mapping
ON key_mapping.app_id = app_table_mapping.app_id
AND key_mapping.app_table_name = app_table_mapping.app_table_name
WHERE node_id = $1
",
node_id
)
.fetch_all(x)
.await
}
}

View file

@ -1,59 +0,0 @@
use itertools::Itertools;
use serde_json::{Number, Value};
use tantivy::schema::OwnedValue;
pub fn owned_value_to_json_value(data_value: &OwnedValue) -> Value {
match data_value {
OwnedValue::Null => Value::Null,
OwnedValue::Str(s) => Value::String(s.to_string()),
OwnedValue::U64(u) => Value::Number(Number::from(*u)),
OwnedValue::I64(i) => Value::Number(Number::from(*i)),
OwnedValue::F64(f) => Value::Number(Number::from_f64(*f).unwrap()),
OwnedValue::Bool(b) => Value::Bool(*b),
OwnedValue::Array(a) => {
Value::Array(a.into_iter().map(owned_value_to_json_value).collect_vec())
}
OwnedValue::Object(o) => Value::Object(
o.into_iter()
.map(|(k, v)| (k.to_owned(), owned_value_to_json_value(v)))
.collect(),
),
_ => {
println!("Converting unknown {:?}", data_value);
serde_json::to_value(data_value).unwrap()
} // OwnedValue::Date(_) => todo!(),
// OwnedValue::Facet(_) => todo!(),
// OwnedValue::Bytes(_) => todo!(),
// OwnedValue::IpAddr(_) => todo!(),
// OwnedValue::PreTokStr(_) => todo!(),
}
}
// pub fn data_value_to_json_value(data_value: &DataValue) -> Value {
// match data_value {
// DataValue::Null => Value::Null,
// DataValue::Bool(b) => Value::Bool(*b),
// DataValue::Num(n) => Value::Number(match n {
// Num::Int(i) => Number::from(*i),
// Num::Float(f) => Number::from_f64(*f).unwrap(),
// }),
// DataValue::Str(s) => Value::String(s.to_string()),
// DataValue::List(v) => {
// Value::Array(v.into_iter().map(data_value_to_json_value).collect_vec())
// }
// DataValue::Json(v) => v.0.clone(),
// DataValue::Bytes(s) => {
// Value::String(String::from_utf8_lossy(s).to_string())
// }
// _ => {
// println!("Converting unknown {:?}", data_value);
// serde_json::to_value(data_value).unwrap()
// } // DataValue::Bytes(s) => todo!(),
// // DataValue::Uuid(_) => todo!(),
// // DataValue::Regex(_) => todo!(),
// // DataValue::Set(_) => todo!(),
// // DataValue::Vec(_) => todo!(),
// // DataValue::Validity(_) => todo!(),
// // DataValue::Bot => todo!(),
// }
// }

View file

@ -1,87 +0,0 @@
use anyhow::Result;
use sqlx::SqlitePool;
use tantivy::Index;
use crate::{
migrations::MIGRATOR,
state::{node::CreateOrUpdate, tantivy_schema},
AppState,
};
pub async fn test_state() -> Result<AppState> {
let db = SqlitePool::connect(":memory:").await?;
let (schema, tantivy_field_map) = tantivy_schema();
let tantivy_index = Index::create_in_ram(schema);
MIGRATOR.run(&db).await?;
let state = AppState {
db,
tantivy_index,
tantivy_field_map,
app_routes: Default::default(),
app_wasm_modules: Default::default(),
};
Ok(state)
}
#[tokio::test]
pub async fn test_create_node() -> Result<()> {
let state = test_state().await?;
let node_info = state
.create_or_update_node(
CreateOrUpdate::Create {
r#type: "panorama/journal/page".to_string(),
},
Some(btmap! {
"panorama/journal/page/content".to_owned() => json!("helloge"),
}),
)
.await?;
let mut node = state.get_node(node_info.node_id.to_string()).await?;
assert!(node.fields.is_some());
let fields = node.fields.take().unwrap();
assert!(fields.contains_key("panorama/journal/page/content"));
Ok(())
}
#[tokio::test]
pub async fn test_full_text_search() -> Result<()> {
let state = test_state().await?;
let node_info = state
.create_or_update_node(
CreateOrUpdate::Create {
r#type: "panorama/journal/page".to_string(),
},
Some(btmap! {
"panorama/journal/page/content".to_owned() => json!("Hello, world!"),
}),
)
.await?;
todo!();
// let results = state.search_nodes("world").await?;
// assert!(results
// .into_iter()
// .map(|entry| entry.0)
// .contains(&node_info.node_id));
Ok(())
}
#[tokio::test]
pub async fn test_install_apps() -> Result<()> {
let state = test_state().await?;
state.install_apps_from_search_paths().await?;
todo!();
Ok(())
}

View file

@ -1 +0,0 @@
export

View file

@ -1,41 +0,0 @@
[package]
name = "panorama-daemon"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
anyhow = "1.0.86"
axum = "0.7.5"
chrono = { version = "0.4.38", features = ["serde"] }
clap = { version = "4.5.7", features = ["derive"] }
# cozo = { version = "0.7.6", features = ["storage-rocksdb"] }
csv = "1.3.0"
dirs = "5.0.1"
futures = "0.3.30"
itertools = "0.13.0"
panorama-core = { path = "../panorama-core" }
schemars = "0.8.21"
serde = { version = "1.0.202", features = ["derive"] }
serde_json = "1.0.117"
sugars = "3.0.1"
tantivy = { version = "0.22.0", features = ["zstd"] }
tokio = { version = "1.37.0", features = ["full"] }
tower = "0.4.13"
tower-http = { version = "0.5.2", features = ["cors", "trace"] }
tracing-subscriber = "0.3.18"
uuid = { version = "1.8.0", features = ["v7"] }
[dependencies.utoipa]
git = "https://github.com/juhaku/utoipa"
features = ["axum_extras", "time", "uuid", "chrono", "yaml"]
[dependencies.utoipa-scalar]
git = "https://github.com/juhaku/utoipa"
features = ["axum"]
[dependencies.async-imap]
version = "0.9.7"
default-features = false
features = ["runtime-tokio"]

View file

@ -1,15 +0,0 @@
use axum::{
routing::{method_routing, MethodFilter},
Router,
};
use panorama_core::AppState;
use utoipa::OpenApi;
#[derive(OpenApi)]
#[openapi(paths(), components(schemas()))]
pub(super) struct AppsApi;
pub(super) fn router() -> Router<AppState> {
Router::new()
// .route("/app/:id/*path", method_routing::any(handler))
}

View file

@ -1,34 +0,0 @@
use axum::{
http::StatusCode,
response::{IntoResponse, Response},
};
pub type AppResult<T, E = AppError> = std::result::Result<T, E>;
// Make our own error that wraps `anyhow::Error`.
#[derive(Debug)]
pub struct AppError(anyhow::Error);
// Tell axum how to convert `AppError` into a response.
impl IntoResponse for AppError {
fn into_response(self) -> Response {
eprintln!("Encountered error: {}", self.0);
eprintln!("{:?}", self.0);
(
StatusCode::INTERNAL_SERVER_ERROR,
format!("Something went wrong: {}", self.0),
)
.into_response()
}
}
// This enables using `?` on functions that return `Result<_, anyhow::Error>` to turn them into
// `Result<_, AppError>`. That way you don't need to do that manually.
impl<E> From<E> for AppError
where
E: Into<anyhow::Error>,
{
fn from(err: E) -> Self {
Self(err.into())
}
}

View file

@ -1,31 +0,0 @@
use axum::Router;
use utoipa::OpenApi;
use crate::AppState;
/// Node API
#[derive(OpenApi)]
#[openapi(paths(), components(schemas()))]
pub(super) struct JournalApi;
pub(super) fn router() -> Router<AppState> {
Router::new()
// .route("/get_todays_journal_id", get(get_todays_journal_id))
}
// #[utoipa::path(
// get,
// path = "/get_todays_journal_id",
// responses(
// (status = 200),
// ),
// )]
// pub async fn get_todays_journal_id(
// State(state): State<AppState>,
// ) -> AppResult<Json<Value>> {
// let node_id = state.get_todays_journal_id().await?;
// Ok(Json(json!({
// "node_id": node_id.to_string(),
// })))
// }

View file

@ -1,73 +0,0 @@
#[macro_use]
extern crate anyhow;
#[macro_use]
extern crate serde;
#[macro_use]
extern crate serde_json;
#[macro_use]
extern crate sugars;
pub mod apps;
mod error;
mod journal;
pub mod mail;
mod node;
use std::fs;
use anyhow::Result;
use axum::{http::Method, routing::get, Router};
use panorama_core::AppState;
use tokio::net::TcpListener;
use tower::ServiceBuilder;
use tower_http::{
cors::{self, CorsLayer},
trace::TraceLayer,
};
use utoipa::OpenApi;
use utoipa_scalar::{Scalar, Servable};
pub async fn run() -> Result<()> {
#[derive(OpenApi)]
#[openapi(
modifiers(),
nest(
(path = "/journal", api = crate::journal::JournalApi),
(path = "/node", api = crate::node::NodeApi),
),
)]
struct ApiDoc;
let data_dir = dirs::data_dir().unwrap();
let panorama_dir = data_dir.join("panorama");
fs::create_dir_all(&panorama_dir)?;
let state = AppState::new(&panorama_dir).await?;
state.install_apps_from_search_paths().await?;
let cors_layer = CorsLayer::new()
.allow_methods([Method::GET, Method::POST, Method::PUT])
.allow_headers(cors::Any)
.allow_origin(cors::Any);
let trace_layer = TraceLayer::new_for_http();
// build our application with a single route
let app = Router::new()
.merge(Scalar::with_url("/api/docs", ApiDoc::openapi()))
.route("/", get(|| async { "Hello, World!" }))
.nest("/node", node::router().with_state(state.clone()))
.nest("/journal", journal::router().with_state(state.clone()))
// .route("/mail/config", get(get_mail_config))
// .route("/mail", get(get_mail))
.layer(ServiceBuilder::new().layer(cors_layer))
.layer(ServiceBuilder::new().layer(trace_layer))
.with_state(state.clone());
let listener = TcpListener::bind("0.0.0.0:5195").await?;
println!("Listening... {:?}", listener);
axum::serve(listener, app).await?;
Ok(())
}

View file

@ -1,47 +0,0 @@
// pub async fn get_mail_config(
// State(state): State<AppState>,
// ) -> AppResult<Json<Value>> {
// let configs = state.fetch_mail_configs()?;
// Ok(Json(json!({ "configs": configs })))
// }
// pub async fn get_mail(State(state): State<AppState>) -> AppResult<Json<Value>> {
// let mailboxes = state.db.run_script("
// ?[node_id, account_node_id, mailbox_name] := *mailbox {node_id, account_node_id, mailbox_name}
// ", Default::default(), ScriptMutability::Immutable)?;
// let mailboxes = mailboxes
// .rows
// .iter()
// .map(|mb| {
// json!({
// "node_id": mb[0].get_str().unwrap(),
// "account_node_id": mb[1].get_str().unwrap(),
// "mailbox_name": mb[2].get_str().unwrap(),
// })
// })
// .collect::<Vec<_>>();
// let messages = state.db.run_script("
// ?[node_id, subject, body, internal_date] := *message {node_id, subject, body, internal_date}
// :limit 10
// ", Default::default(), ScriptMutability::Immutable)?;
// let messages = messages
// .rows
// .iter()
// .map(|m| {
// json!({
// "node_id": m[0].get_str().unwrap(),
// "subject": m[1].get_str().unwrap(),
// "body": m[2].get_str(),
// "internal_date": m[3].get_str().unwrap(),
// })
// })
// .collect::<Vec<_>>();
// Ok(Json(json!({
// "mailboxes": mailboxes,
// "messages": messages,
// })))
// }

View file

@ -1,32 +0,0 @@
use anyhow::Result;
use clap::{Parser, Subcommand};
use panorama_core::state::appsv0::manifest::AppManifest;
use schemars::schema_for;
#[derive(Debug, Parser)]
struct Opt {
#[clap(subcommand)]
command: Option<Command>,
}
#[derive(Debug, Subcommand)]
enum Command {
GenerateConfigSchema,
}
#[tokio::main]
async fn main() -> Result<()> {
let opt = Opt::parse();
tracing_subscriber::fmt::init();
match opt.command {
Some(Command::GenerateConfigSchema) => {
let schema = schema_for!(AppManifest);
println!("{}", serde_json::to_string_pretty(&schema).unwrap());
}
None => panorama_daemon::run().await?,
}
Ok(())
}

View file

@ -1,173 +0,0 @@
use axum::Router;
use utoipa::OpenApi;
use crate::AppState;
/// Node API
#[derive(OpenApi)]
#[openapi(paths(), components(schemas()))]
pub(super) struct NodeApi;
pub(super) fn router() -> Router<AppState> {
Router::new()
// .route("/", put(create_node))
// .route("/:id", get(get_node))
// .route("/:id", post(update_node))
// .route("/search", get(search_nodes))
}
// #[derive(Serialize, Deserialize, ToSchema, Clone)]
// struct GetNodeResult {
// node_id: String,
// fields: HashMap<String, Value>,
// created_at: DateTime<Utc>,
// updated_at: DateTime<Utc>,
// }
// /// Get node info
// ///
// /// This endpoint retrieves all the fields for a particular node
// #[utoipa::path(
// get,
// path = "/{id}",
// responses(
// (status = 200, body = [GetNodeResult]),
// (status = 404, description = "the node ID provided was not found")
// ),
// params(
// ("id" = String, Path, description = "Node ID"),
// ),
// )]
// pub async fn get_node(
// State(state): State<AppState>,
// Path(node_id): Path<String>,
// ) -> AppResult<(StatusCode, Json<Value>)> {
// let node_info = state.get_node(&node_id).await?;
// Ok((
// StatusCode::OK,
// Json(json!({
// "node_id": node_id,
// "fields": node_info.fields,
// "created_at": node_info.created_at,
// "updated_at": node_info.updated_at,
// })),
// ))
// }
// #[derive(Deserialize, Debug)]
// pub struct UpdateData {
// extra_data: Option<ExtraData>,
// }
// /// Update node info
// #[utoipa::path(
// post,
// path = "/{id}",
// responses(
// (status = 200)
// ),
// params(
// ("id" = String, Path, description = "Node ID"),
// )
// )]
// pub async fn update_node(
// State(state): State<AppState>,
// Path(node_id): Path<String>,
// Json(opts): Json<UpdateData>,
// ) -> AppResult<Json<Value>> {
// let node_id = NodeId(Uuid::from_str(&node_id).into_diagnostic()?);
// let node_info = state
// .create_or_update_node(CreateOrUpdate::Update { node_id }, opts.extra_data)
// .await?;
// Ok(Json(json!({
// "node_id": node_info.node_id.to_string(),
// })))
// }
// #[derive(Debug, Deserialize)]
// pub struct CreateNodeOpts {
// // TODO: Allow submitting a string
// // id: Option<String>,
// #[serde(rename = "type")]
// ty: String,
// extra_data: Option<ExtraData>,
// }
// #[utoipa::path(
// put,
// path = "/",
// responses((status = 200)),
// )]
// pub async fn create_node(
// State(state): State<AppState>,
// Json(opts): Json<CreateNodeOpts>,
// ) -> AppResult<Json<Value>> {
// let node_info = state
// .create_or_update_node(
// CreateOrUpdate::Create { r#type: opts.ty },
// opts.extra_data,
// )
// .await?;
// Ok(Json(json!({
// "node_id": node_info.node_id.to_string(),
// })))
// }
// #[derive(Deserialize)]
// pub struct SearchQuery {
// query: String,
// }
// #[utoipa::path(
// get,
// path = "/search",
// responses((status = 200)),
// )]
// pub async fn search_nodes(
// State(state): State<AppState>,
// Query(query): Query<SearchQuery>,
// ) -> AppResult<Json<Value>> {
// let search_result = state.search_nodes(query.query).await?;
// let search_result = search_result
// .into_iter()
// .map(|(id, value)| value["fields"].clone())
// .collect_vec();
// Ok(Json(json!({
// "results": search_result,
// })))
// }
// fn get_rows_for_extra_keys(
// tx: &MultiTransaction,
// extra_data: &ExtraData,
// ) -> AppResult<HashMap<String, (String, String, String)>> {
// let result = tx.run_script(
// "
// ?[key, relation, field_name, type] :=
// *fqkey_to_dbkey{key, relation, field_name, type},
// is_in(key, $keys)
// ",
// btmap! {
// "keys".to_owned() => DataValue::List(
// extra_data
// .keys()
// .map(|s| DataValue::from(s.as_str()))
// .collect::<Vec<_>>()
// ),
// },
// )?;
// let s = |s: &DataValue| s.get_str().unwrap().to_owned();
// Ok(
// result
// .rows
// .into_iter()
// .map(|row| (s(&row[0]), (s(&row[1]), s(&row[2]), s(&row[3]))))
// .collect::<HashMap<_, _>>(),
// )
// }

View file

@ -1,9 +0,0 @@
[package]
name = "panorama-macros"
version = "0.1.0"
edition = "2021"
[lib]
proc-macro = true
[dependencies]

View file

@ -1 +0,0 @@
// TODO: derive named rows

View file

@ -1,6 +0,0 @@
[package]
name = "panorama-sync"
version = "0.1.0"
edition = "2021"
[dependencies]

View file

@ -1 +0,0 @@

View file

@ -22,6 +22,10 @@ export default defineConfig({
label: "Technical Docs",
autogenerate: { directory: "technical-docs" },
},
{
label: "Protocols",
autogenerate: { directory: "protocols" },
},
],
customCss: ["./node_modules/katex/dist/katex.min.css"],
}),

View file

@ -0,0 +1,30 @@
---
title: Device
---
The panorama network keeps track of what devices join and leave the network.
Each device has certain attributes:
```ts
interface DeviceConfig {
// Not used for anything important, just for displaying an icon if needed
formFactor: "desktop" | "server" | "laptop" | "phone" | "tablet" | string;
// A string that represents a duration of time. If it has been longer than
// this amount of time since last contacting this device, consider it to have
// gone offline
heartbeatDuration: string;
// Whether or not to schedule services to this device
canRunServices: boolean;
// Whether or not this device should be treated as a file store
// (recommended to be off for phones)
canStoreFiles: boolean;
}
```
Each device keeps track of each other device, with a merkle tree of signatures.
Devices have their own keypairs. TODO: See how matrix does cross-signing

View file

@ -0,0 +1,14 @@
---
title: Design Principles
---
## Architecture
- **Never use fully-qualified names starting from domain (i.e `com.example.package`).**
This makes it so migrating domains / package names becomes very hard.
## Data governance
- **Offline first, full control to the user.**
Synchronization is an important feature but must be built as a separate thing.
This also means that it should be possible for some devices to stay offline for long periods of time.

View file

@ -8,13 +8,8 @@ This is documentation for a feature that is in development.
Almost none of this is implemented and most of it will probably change in the future.
:::
This **only** deals with syncing nodes and files between devices owned by the same person. Permissions are not considered here.
## Node-level sync
## Design notes
## Attribute-level sync
-
Devices need to have some kind of knowledge of each other's existence. This may not necessarily be exposed to apps, but the thing that's responsible for syncing needs to know which nodes have which files.
-
Slow internet connections and largely offline usage patterns need to be considered.
-
**TODO:** does this need to be deeply integrated within the panorama daemon itself or is there a way to expose enough APIs for this to just be an app?
## Index-level sync

View file

@ -6,7 +6,7 @@ Types exist to ensure that apps are treating data properly.
## Formal definition
A node's type can be one of the following:
An attribute's type can be one of the following:
$\tau :\equiv$
@ -25,7 +25,8 @@ the URL format will need to be decided upon by then.
## Notes
- All nodes must belong to _closed_ types.
- Nodes don't have types; only attributes do.
- All attributes must belong to _closed_ types.
This means type variables cannot exist at the top-level.
- When shown in the panorama UI, the constant type will not be shown as a separate type.
Instead the actual type itself will be inlined.

View file

@ -0,0 +1,5 @@
---
title: Client-Bridge Protocols
---
A **bridge** is just a way of connecting two devices.

View file

@ -0,0 +1,4 @@
---
title: Client-Client Protocols
---

View file

@ -1,43 +0,0 @@
# Miscellaneous
*.class
*.log
*.pyc
*.swp
.DS_Store
.atom/
.buildlog/
.history
.svn/
migrate_working_dir/
# IntelliJ related
*.iml
*.ipr
*.iws
.idea/
# The .vscode folder contains launch configuration and tasks you configure in
# VS Code which you may wish to be included in version control, so this line
# is commented out by default.
#.vscode/
# Flutter/Dart/Pub related
**/doc/api/
**/ios/Flutter/.last_build_id
.dart_tool/
.flutter-plugins
.flutter-plugins-dependencies
.pub-cache/
.pub/
/build/
# Symbolication related
app.*.symbols
# Obfuscation related
app.*.map.json
# Android Studio will place build artifacts here
/android/app/debug
/android/app/profile
/android/app/release

View file

@ -1,45 +0,0 @@
# This file tracks properties of this Flutter project.
# Used by Flutter tool to assess capabilities and perform upgrades etc.
#
# This file should be version controlled and should not be manually edited.
version:
revision: "761747bfc538b5af34aa0d3fac380f1bc331ec49"
channel: "stable"
project_type: app
# Tracks metadata for the flutter migrate command
migration:
platforms:
- platform: root
create_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
base_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
- platform: android
create_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
base_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
- platform: ios
create_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
base_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
- platform: linux
create_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
base_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
- platform: macos
create_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
base_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
- platform: web
create_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
base_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
- platform: windows
create_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
base_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
# User provided section
# List of Local paths (relative to this file) that should be
# ignored by the migrate tool.
#
# Files that are not part of the templates will be ignored by default.
unmanaged_files:
- 'lib/main.dart'
- 'ios/Runner.xcodeproj/project.pbxproj'

View file

@ -1,16 +0,0 @@
# panorama
A new Flutter project.
## Getting Started
This project is a starting point for a Flutter application.
A few resources to get you started if this is your first Flutter project:
- [Lab: Write your first Flutter app](https://docs.flutter.dev/get-started/codelab)
- [Cookbook: Useful Flutter samples](https://docs.flutter.dev/cookbook)
For help getting started with Flutter development, view the
[online documentation](https://docs.flutter.dev/), which offers tutorials,
samples, guidance on mobile development, and a full API reference.

View file

@ -1,28 +0,0 @@
# This file configures the analyzer, which statically analyzes Dart code to
# check for errors, warnings, and lints.
#
# The issues identified by the analyzer are surfaced in the UI of Dart-enabled
# IDEs (https://dart.dev/tools#ides-and-editors). The analyzer can also be
# invoked from the command line by running `flutter analyze`.
# The following line activates a set of recommended lints for Flutter apps,
# packages, and plugins designed to encourage good coding practices.
include: package:flutter_lints/flutter.yaml
linter:
# The lint rules applied to this project can be customized in the
# section below to disable rules from the `package:flutter_lints/flutter.yaml`
# included above or to enable additional rules. A list of all available lints
# and their documentation is published at https://dart.dev/lints.
#
# Instead of disabling a lint rule for the entire project in the
# section below, it can also be suppressed for a single line of code
# or a specific dart file by using the `// ignore: name_of_lint` and
# `// ignore_for_file: name_of_lint` syntax on the line or in the file
# producing the lint.
rules:
# avoid_print: false # Uncomment to disable the `avoid_print` rule
# prefer_single_quotes: true # Uncomment to enable the `prefer_single_quotes` rule
# Additional information about this file can be found at
# https://dart.dev/guides/language/analysis-options

View file

@ -1,13 +0,0 @@
gradle-wrapper.jar
/.gradle
/captures/
/gradlew
/gradlew.bat
/local.properties
GeneratedPluginRegistrant.java
# Remember to never publicly share your keystore.
# See https://flutter.dev/docs/deployment/android#reference-the-keystore-from-the-app
key.properties
**/*.keystore
**/*.jks

View file

@ -1,58 +0,0 @@
plugins {
id "com.android.application"
id "kotlin-android"
// The Flutter Gradle Plugin must be applied after the Android and Kotlin Gradle plugins.
id "dev.flutter.flutter-gradle-plugin"
}
def localProperties = new Properties()
def localPropertiesFile = rootProject.file("local.properties")
if (localPropertiesFile.exists()) {
localPropertiesFile.withReader("UTF-8") { reader ->
localProperties.load(reader)
}
}
def flutterVersionCode = localProperties.getProperty("flutter.versionCode")
if (flutterVersionCode == null) {
flutterVersionCode = "1"
}
def flutterVersionName = localProperties.getProperty("flutter.versionName")
if (flutterVersionName == null) {
flutterVersionName = "1.0"
}
android {
namespace = "com.example.panorama"
compileSdk = flutter.compileSdkVersion
ndkVersion = flutter.ndkVersion
compileOptions {
sourceCompatibility = JavaVersion.VERSION_1_8
targetCompatibility = JavaVersion.VERSION_1_8
}
defaultConfig {
// TODO: Specify your own unique Application ID (https://developer.android.com/studio/build/application-id.html).
applicationId = "com.example.panorama"
// You can update the following values to match your application needs.
// For more information, see: https://docs.flutter.dev/deployment/android#reviewing-the-gradle-build-configuration.
minSdk = flutter.minSdkVersion
targetSdk = flutter.targetSdkVersion
versionCode = flutterVersionCode.toInteger()
versionName = flutterVersionName
}
buildTypes {
release {
// TODO: Add your own signing config for the release build.
// Signing with the debug keys for now, so `flutter run --release` works.
signingConfig = signingConfigs.debug
}
}
}
flutter {
source = "../.."
}

View file

@ -1,7 +0,0 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
<!-- The INTERNET permission is required for development. Specifically,
the Flutter tool needs it to communicate with the running application
to allow setting breakpoints, to provide hot reload, etc.
-->
<uses-permission android:name="android.permission.INTERNET"/>
</manifest>

View file

@ -1,45 +0,0 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
<application
android:label="panorama"
android:name="${applicationName}"
android:icon="@mipmap/ic_launcher">
<activity
android:name=".MainActivity"
android:exported="true"
android:launchMode="singleTop"
android:taskAffinity=""
android:theme="@style/LaunchTheme"
android:configChanges="orientation|keyboardHidden|keyboard|screenSize|smallestScreenSize|locale|layoutDirection|fontScale|screenLayout|density|uiMode"
android:hardwareAccelerated="true"
android:windowSoftInputMode="adjustResize">
<!-- Specifies an Android theme to apply to this Activity as soon as
the Android process has started. This theme is visible to the user
while the Flutter UI initializes. After that, this theme continues
to determine the Window background behind the Flutter UI. -->
<meta-data
android:name="io.flutter.embedding.android.NormalTheme"
android:resource="@style/NormalTheme"
/>
<intent-filter>
<action android:name="android.intent.action.MAIN"/>
<category android:name="android.intent.category.LAUNCHER"/>
</intent-filter>
</activity>
<!-- Don't delete the meta-data below.
This is used by the Flutter tool to generate GeneratedPluginRegistrant.java -->
<meta-data
android:name="flutterEmbedding"
android:value="2" />
</application>
<!-- Required to query activities that can process text, see:
https://developer.android.com/training/package-visibility and
https://developer.android.com/reference/android/content/Intent#ACTION_PROCESS_TEXT.
In particular, this is used by the Flutter engine in io.flutter.plugin.text.ProcessTextPlugin. -->
<queries>
<intent>
<action android:name="android.intent.action.PROCESS_TEXT"/>
<data android:mimeType="text/plain"/>
</intent>
</queries>
</manifest>

View file

@ -1,5 +0,0 @@
package com.example.panorama
import io.flutter.embedding.android.FlutterActivity
class MainActivity: FlutterActivity()

View file

@ -1,12 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Modify this file to customize your launch splash screen -->
<layer-list xmlns:android="http://schemas.android.com/apk/res/android">
<item android:drawable="?android:colorBackground" />
<!-- You can insert your own image assets here -->
<!-- <item>
<bitmap
android:gravity="center"
android:src="@mipmap/launch_image" />
</item> -->
</layer-list>

View file

@ -1,12 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Modify this file to customize your launch splash screen -->
<layer-list xmlns:android="http://schemas.android.com/apk/res/android">
<item android:drawable="@android:color/white" />
<!-- You can insert your own image assets here -->
<!-- <item>
<bitmap
android:gravity="center"
android:src="@mipmap/launch_image" />
</item> -->
</layer-list>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 544 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 442 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 721 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.4 KiB

View file

@ -1,18 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<!-- Theme applied to the Android Window while the process is starting when the OS's Dark Mode setting is on -->
<style name="LaunchTheme" parent="@android:style/Theme.Black.NoTitleBar">
<!-- Show a splash screen on the activity. Automatically removed when
the Flutter engine draws its first frame -->
<item name="android:windowBackground">@drawable/launch_background</item>
</style>
<!-- Theme applied to the Android Window as soon as the process has started.
This theme determines the color of the Android Window while your
Flutter UI initializes, as well as behind your Flutter UI while its
running.
This Theme is only used starting with V2 of Flutter's Android embedding. -->
<style name="NormalTheme" parent="@android:style/Theme.Black.NoTitleBar">
<item name="android:windowBackground">?android:colorBackground</item>
</style>
</resources>

View file

@ -1,18 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<!-- Theme applied to the Android Window while the process is starting when the OS's Dark Mode setting is off -->
<style name="LaunchTheme" parent="@android:style/Theme.Light.NoTitleBar">
<!-- Show a splash screen on the activity. Automatically removed when
the Flutter engine draws its first frame -->
<item name="android:windowBackground">@drawable/launch_background</item>
</style>
<!-- Theme applied to the Android Window as soon as the process has started.
This theme determines the color of the Android Window while your
Flutter UI initializes, as well as behind your Flutter UI while its
running.
This Theme is only used starting with V2 of Flutter's Android embedding. -->
<style name="NormalTheme" parent="@android:style/Theme.Light.NoTitleBar">
<item name="android:windowBackground">?android:colorBackground</item>
</style>
</resources>

View file

@ -1,7 +0,0 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
<!-- The INTERNET permission is required for development. Specifically,
the Flutter tool needs it to communicate with the running application
to allow setting breakpoints, to provide hot reload, etc.
-->
<uses-permission android:name="android.permission.INTERNET"/>
</manifest>

View file

@ -1,18 +0,0 @@
allprojects {
repositories {
google()
mavenCentral()
}
}
rootProject.buildDir = "../build"
subprojects {
project.buildDir = "${rootProject.buildDir}/${project.name}"
}
subprojects {
project.evaluationDependsOn(":app")
}
tasks.register("clean", Delete) {
delete rootProject.buildDir
}

View file

@ -1,3 +0,0 @@
org.gradle.jvmargs=-Xmx4G -XX:+HeapDumpOnOutOfMemoryError
android.useAndroidX=true
android.enableJetifier=true

View file

@ -1,5 +0,0 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-7.6.3-all.zip

View file

@ -1,25 +0,0 @@
pluginManagement {
def flutterSdkPath = {
def properties = new Properties()
file("local.properties").withInputStream { properties.load(it) }
def flutterSdkPath = properties.getProperty("flutter.sdk")
assert flutterSdkPath != null, "flutter.sdk not set in local.properties"
return flutterSdkPath
}()
includeBuild("$flutterSdkPath/packages/flutter_tools/gradle")
repositories {
google()
mavenCentral()
gradlePluginPortal()
}
}
plugins {
id "dev.flutter.flutter-plugin-loader" version "1.0.0"
id "com.android.application" version "7.3.0" apply false
id "org.jetbrains.kotlin.android" version "1.7.10" apply false
}
include ":app"

View file

@ -1,34 +0,0 @@
**/dgph
*.mode1v3
*.mode2v3
*.moved-aside
*.pbxuser
*.perspectivev3
**/*sync/
.sconsign.dblite
.tags*
**/.vagrant/
**/DerivedData/
Icon?
**/Pods/
**/.symlinks/
profile
xcuserdata
**/.generated/
Flutter/App.framework
Flutter/Flutter.framework
Flutter/Flutter.podspec
Flutter/Generated.xcconfig
Flutter/ephemeral/
Flutter/app.flx
Flutter/app.zip
Flutter/flutter_assets/
Flutter/flutter_export_environment.sh
ServiceDefinitions.json
Runner/GeneratedPluginRegistrant.*
# Exceptions to above rules.
!default.mode1v3
!default.mode2v3
!default.pbxuser
!default.perspectivev3

View file

@ -1,26 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleExecutable</key>
<string>App</string>
<key>CFBundleIdentifier</key>
<string>io.flutter.flutter.app</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>App</string>
<key>CFBundlePackageType</key>
<string>FMWK</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>1.0</string>
<key>MinimumOSVersion</key>
<string>12.0</string>
</dict>
</plist>

View file

@ -1 +0,0 @@
#include "Generated.xcconfig"

View file

@ -1 +0,0 @@
#include "Generated.xcconfig"

View file

@ -1,616 +0,0 @@
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 54;
objects = {
/* Begin PBXBuildFile section */
1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */ = {isa = PBXBuildFile; fileRef = 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */; };
331C808B294A63AB00263BE5 /* RunnerTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 331C807B294A618700263BE5 /* RunnerTests.swift */; };
3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */; };
74858FAF1ED2DC5600515810 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 74858FAE1ED2DC5600515810 /* AppDelegate.swift */; };
97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; };
97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FD1CF9000F007C117D /* Assets.xcassets */; };
97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */; };
/* End PBXBuildFile section */
/* Begin PBXContainerItemProxy section */
331C8085294A63A400263BE5 /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = 97C146E61CF9000F007C117D /* Project object */;
proxyType = 1;
remoteGlobalIDString = 97C146ED1CF9000F007C117D;
remoteInfo = Runner;
};
/* End PBXContainerItemProxy section */
/* Begin PBXCopyFilesBuildPhase section */
9705A1C41CF9048500538489 /* Embed Frameworks */ = {
isa = PBXCopyFilesBuildPhase;
buildActionMask = 2147483647;
dstPath = "";
dstSubfolderSpec = 10;
files = (
);
name = "Embed Frameworks";
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXCopyFilesBuildPhase section */
/* Begin PBXFileReference section */
1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GeneratedPluginRegistrant.h; sourceTree = "<group>"; };
1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GeneratedPluginRegistrant.m; sourceTree = "<group>"; };
331C807B294A618700263BE5 /* RunnerTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RunnerTests.swift; sourceTree = "<group>"; };
331C8081294A63A400263BE5 /* RunnerTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = RunnerTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = "<group>"; };
74858FAD1ED2DC5600515810 /* Runner-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "Runner-Bridging-Header.h"; sourceTree = "<group>"; };
74858FAE1ED2DC5600515810 /* AppDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = "<group>"; };
7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = Release.xcconfig; path = Flutter/Release.xcconfig; sourceTree = "<group>"; };
9740EEB21CF90195004384FC /* Debug.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Debug.xcconfig; path = Flutter/Debug.xcconfig; sourceTree = "<group>"; };
9740EEB31CF90195004384FC /* Generated.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Generated.xcconfig; path = Flutter/Generated.xcconfig; sourceTree = "<group>"; };
97C146EE1CF9000F007C117D /* Runner.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Runner.app; sourceTree = BUILT_PRODUCTS_DIR; };
97C146FB1CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = "<group>"; };
97C146FD1CF9000F007C117D /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
97C147001CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; };
97C147021CF9000F007C117D /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
97C146EB1CF9000F007C117D /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
331C8082294A63A400263BE5 /* RunnerTests */ = {
isa = PBXGroup;
children = (
331C807B294A618700263BE5 /* RunnerTests.swift */,
);
path = RunnerTests;
sourceTree = "<group>";
};
9740EEB11CF90186004384FC /* Flutter */ = {
isa = PBXGroup;
children = (
3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */,
9740EEB21CF90195004384FC /* Debug.xcconfig */,
7AFA3C8E1D35360C0083082E /* Release.xcconfig */,
9740EEB31CF90195004384FC /* Generated.xcconfig */,
);
name = Flutter;
sourceTree = "<group>";
};
97C146E51CF9000F007C117D = {
isa = PBXGroup;
children = (
9740EEB11CF90186004384FC /* Flutter */,
97C146F01CF9000F007C117D /* Runner */,
97C146EF1CF9000F007C117D /* Products */,
331C8082294A63A400263BE5 /* RunnerTests */,
);
sourceTree = "<group>";
};
97C146EF1CF9000F007C117D /* Products */ = {
isa = PBXGroup;
children = (
97C146EE1CF9000F007C117D /* Runner.app */,
331C8081294A63A400263BE5 /* RunnerTests.xctest */,
);
name = Products;
sourceTree = "<group>";
};
97C146F01CF9000F007C117D /* Runner */ = {
isa = PBXGroup;
children = (
97C146FA1CF9000F007C117D /* Main.storyboard */,
97C146FD1CF9000F007C117D /* Assets.xcassets */,
97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */,
97C147021CF9000F007C117D /* Info.plist */,
1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */,
1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */,
74858FAE1ED2DC5600515810 /* AppDelegate.swift */,
74858FAD1ED2DC5600515810 /* Runner-Bridging-Header.h */,
);
path = Runner;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
331C8080294A63A400263BE5 /* RunnerTests */ = {
isa = PBXNativeTarget;
buildConfigurationList = 331C8087294A63A400263BE5 /* Build configuration list for PBXNativeTarget "RunnerTests" */;
buildPhases = (
331C807D294A63A400263BE5 /* Sources */,
331C807F294A63A400263BE5 /* Resources */,
);
buildRules = (
);
dependencies = (
331C8086294A63A400263BE5 /* PBXTargetDependency */,
);
name = RunnerTests;
productName = RunnerTests;
productReference = 331C8081294A63A400263BE5 /* RunnerTests.xctest */;
productType = "com.apple.product-type.bundle.unit-test";
};
97C146ED1CF9000F007C117D /* Runner */ = {
isa = PBXNativeTarget;
buildConfigurationList = 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */;
buildPhases = (
9740EEB61CF901F6004384FC /* Run Script */,
97C146EA1CF9000F007C117D /* Sources */,
97C146EB1CF9000F007C117D /* Frameworks */,
97C146EC1CF9000F007C117D /* Resources */,
9705A1C41CF9048500538489 /* Embed Frameworks */,
3B06AD1E1E4923F5004D2608 /* Thin Binary */,
);
buildRules = (
);
dependencies = (
);
name = Runner;
productName = Runner;
productReference = 97C146EE1CF9000F007C117D /* Runner.app */;
productType = "com.apple.product-type.application";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
97C146E61CF9000F007C117D /* Project object */ = {
isa = PBXProject;
attributes = {
BuildIndependentTargetsInParallel = YES;
LastUpgradeCheck = 1510;
ORGANIZATIONNAME = "";
TargetAttributes = {
331C8080294A63A400263BE5 = {
CreatedOnToolsVersion = 14.0;
TestTargetID = 97C146ED1CF9000F007C117D;
};
97C146ED1CF9000F007C117D = {
CreatedOnToolsVersion = 7.3.1;
LastSwiftMigration = 1100;
};
};
};
buildConfigurationList = 97C146E91CF9000F007C117D /* Build configuration list for PBXProject "Runner" */;
compatibilityVersion = "Xcode 9.3";
developmentRegion = en;
hasScannedForEncodings = 0;
knownRegions = (
en,
Base,
);
mainGroup = 97C146E51CF9000F007C117D;
productRefGroup = 97C146EF1CF9000F007C117D /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
97C146ED1CF9000F007C117D /* Runner */,
331C8080294A63A400263BE5 /* RunnerTests */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
331C807F294A63A400263BE5 /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
97C146EC1CF9000F007C117D /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */,
3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */,
97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */,
97C146FC1CF9000F007C117D /* Main.storyboard in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXShellScriptBuildPhase section */
3B06AD1E1E4923F5004D2608 /* Thin Binary */ = {
isa = PBXShellScriptBuildPhase;
alwaysOutOfDate = 1;
buildActionMask = 2147483647;
files = (
);
inputPaths = (
"${TARGET_BUILD_DIR}/${INFOPLIST_PATH}",
);
name = "Thin Binary";
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" embed_and_thin";
};
9740EEB61CF901F6004384FC /* Run Script */ = {
isa = PBXShellScriptBuildPhase;
alwaysOutOfDate = 1;
buildActionMask = 2147483647;
files = (
);
inputPaths = (
);
name = "Run Script";
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build";
};
/* End PBXShellScriptBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
331C807D294A63A400263BE5 /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
331C808B294A63AB00263BE5 /* RunnerTests.swift in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
97C146EA1CF9000F007C117D /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
74858FAF1ED2DC5600515810 /* AppDelegate.swift in Sources */,
1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin PBXTargetDependency section */
331C8086294A63A400263BE5 /* PBXTargetDependency */ = {
isa = PBXTargetDependency;
target = 97C146ED1CF9000F007C117D /* Runner */;
targetProxy = 331C8085294A63A400263BE5 /* PBXContainerItemProxy */;
};
/* End PBXTargetDependency section */
/* Begin PBXVariantGroup section */
97C146FA1CF9000F007C117D /* Main.storyboard */ = {
isa = PBXVariantGroup;
children = (
97C146FB1CF9000F007C117D /* Base */,
);
name = Main.storyboard;
sourceTree = "<group>";
};
97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */ = {
isa = PBXVariantGroup;
children = (
97C147001CF9000F007C117D /* Base */,
);
name = LaunchScreen.storyboard;
sourceTree = "<group>";
};
/* End PBXVariantGroup section */
/* Begin XCBuildConfiguration section */
249021D3217E4FDB00AE95B9 /* Profile */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
CLANG_ANALYZER_NONNULL = YES;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_USER_SCRIPT_SANDBOXING = NO;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 12.0;
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = iphoneos;
SUPPORTED_PLATFORMS = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
};
name = Profile;
};
249021D4217E4FDB00AE95B9 /* Profile */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CLANG_ENABLE_MODULES = YES;
CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)";
ENABLE_BITCODE = NO;
INFOPLIST_FILE = Runner/Info.plist;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
PRODUCT_BUNDLE_IDENTIFIER = com.example.panorama;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h";
SWIFT_VERSION = 5.0;
VERSIONING_SYSTEM = "apple-generic";
};
name = Profile;
};
331C8088294A63A400263BE5 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
BUNDLE_LOADER = "$(TEST_HOST)";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
GENERATE_INFOPLIST_FILE = YES;
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = com.example.panorama.RunnerTests;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
SWIFT_VERSION = 5.0;
TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Runner.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/Runner";
};
name = Debug;
};
331C8089294A63A400263BE5 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
BUNDLE_LOADER = "$(TEST_HOST)";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
GENERATE_INFOPLIST_FILE = YES;
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = com.example.panorama.RunnerTests;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_VERSION = 5.0;
TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Runner.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/Runner";
};
name = Release;
};
331C808A294A63A400263BE5 /* Profile */ = {
isa = XCBuildConfiguration;
buildSettings = {
BUNDLE_LOADER = "$(TEST_HOST)";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
GENERATE_INFOPLIST_FILE = YES;
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = com.example.panorama.RunnerTests;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_VERSION = 5.0;
TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Runner.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/Runner";
};
name = Profile;
};
97C147031CF9000F007C117D /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
CLANG_ANALYZER_NONNULL = YES;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
ENABLE_USER_SCRIPT_SANDBOXING = NO;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 12.0;
MTL_ENABLE_DEBUG_INFO = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
};
97C147041CF9000F007C117D /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
CLANG_ANALYZER_NONNULL = YES;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_USER_SCRIPT_SANDBOXING = NO;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 12.0;
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = iphoneos;
SUPPORTED_PLATFORMS = iphoneos;
SWIFT_COMPILATION_MODE = wholemodule;
SWIFT_OPTIMIZATION_LEVEL = "-O";
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
};
name = Release;
};
97C147061CF9000F007C117D /* Debug */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 9740EEB21CF90195004384FC /* Debug.xcconfig */;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CLANG_ENABLE_MODULES = YES;
CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)";
ENABLE_BITCODE = NO;
INFOPLIST_FILE = Runner/Info.plist;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
PRODUCT_BUNDLE_IDENTIFIER = com.example.panorama;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h";
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
SWIFT_VERSION = 5.0;
VERSIONING_SYSTEM = "apple-generic";
};
name = Debug;
};
97C147071CF9000F007C117D /* Release */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CLANG_ENABLE_MODULES = YES;
CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)";
ENABLE_BITCODE = NO;
INFOPLIST_FILE = Runner/Info.plist;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
PRODUCT_BUNDLE_IDENTIFIER = com.example.panorama;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h";
SWIFT_VERSION = 5.0;
VERSIONING_SYSTEM = "apple-generic";
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
331C8087294A63A400263BE5 /* Build configuration list for PBXNativeTarget "RunnerTests" */ = {
isa = XCConfigurationList;
buildConfigurations = (
331C8088294A63A400263BE5 /* Debug */,
331C8089294A63A400263BE5 /* Release */,
331C808A294A63A400263BE5 /* Profile */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
97C146E91CF9000F007C117D /* Build configuration list for PBXProject "Runner" */ = {
isa = XCConfigurationList;
buildConfigurations = (
97C147031CF9000F007C117D /* Debug */,
97C147041CF9000F007C117D /* Release */,
249021D3217E4FDB00AE95B9 /* Profile */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */ = {
isa = XCConfigurationList;
buildConfigurations = (
97C147061CF9000F007C117D /* Debug */,
97C147071CF9000F007C117D /* Release */,
249021D4217E4FDB00AE95B9 /* Profile */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = 97C146E61CF9000F007C117D /* Project object */;
}

View file

@ -1,7 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "self:">
</FileRef>
</Workspace>

View file

@ -1,8 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>IDEDidComputeMac32BitWarning</key>
<true/>
</dict>
</plist>

View file

@ -1,8 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>PreviewsEnabled</key>
<false/>
</dict>
</plist>

View file

@ -1,98 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1510"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "97C146ED1CF9000F007C117D"
BuildableName = "Runner.app"
BlueprintName = "Runner"
ReferencedContainer = "container:Runner.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES">
<MacroExpansion>
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "97C146ED1CF9000F007C117D"
BuildableName = "Runner.app"
BlueprintName = "Runner"
ReferencedContainer = "container:Runner.xcodeproj">
</BuildableReference>
</MacroExpansion>
<Testables>
<TestableReference
skipped = "NO"
parallelizable = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "331C8080294A63A400263BE5"
BuildableName = "RunnerTests.xctest"
BlueprintName = "RunnerTests"
ReferencedContainer = "container:Runner.xcodeproj">
</BuildableReference>
</TestableReference>
</Testables>
</TestAction>
<LaunchAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "97C146ED1CF9000F007C117D"
BuildableName = "Runner.app"
BlueprintName = "Runner"
ReferencedContainer = "container:Runner.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
</LaunchAction>
<ProfileAction
buildConfiguration = "Profile"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "97C146ED1CF9000F007C117D"
BuildableName = "Runner.app"
BlueprintName = "Runner"
ReferencedContainer = "container:Runner.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>

View file

@ -1,7 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "group:Runner.xcodeproj">
</FileRef>
</Workspace>

View file

@ -1,8 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>IDEDidComputeMac32BitWarning</key>
<true/>
</dict>
</plist>

View file

@ -1,8 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>PreviewsEnabled</key>
<false/>
</dict>
</plist>

View file

@ -1,13 +0,0 @@
import Flutter
import UIKit
@UIApplicationMain
@objc class AppDelegate: FlutterAppDelegate {
override func application(
_ application: UIApplication,
didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?
) -> Bool {
GeneratedPluginRegistrant.register(with: self)
return super.application(application, didFinishLaunchingWithOptions: launchOptions)
}
}

View file

@ -1,122 +0,0 @@
{
"images" : [
{
"size" : "20x20",
"idiom" : "iphone",
"filename" : "Icon-App-20x20@2x.png",
"scale" : "2x"
},
{
"size" : "20x20",
"idiom" : "iphone",
"filename" : "Icon-App-20x20@3x.png",
"scale" : "3x"
},
{
"size" : "29x29",
"idiom" : "iphone",
"filename" : "Icon-App-29x29@1x.png",
"scale" : "1x"
},
{
"size" : "29x29",
"idiom" : "iphone",
"filename" : "Icon-App-29x29@2x.png",
"scale" : "2x"
},
{
"size" : "29x29",
"idiom" : "iphone",
"filename" : "Icon-App-29x29@3x.png",
"scale" : "3x"
},
{
"size" : "40x40",
"idiom" : "iphone",
"filename" : "Icon-App-40x40@2x.png",
"scale" : "2x"
},
{
"size" : "40x40",
"idiom" : "iphone",
"filename" : "Icon-App-40x40@3x.png",
"scale" : "3x"
},
{
"size" : "60x60",
"idiom" : "iphone",
"filename" : "Icon-App-60x60@2x.png",
"scale" : "2x"
},
{
"size" : "60x60",
"idiom" : "iphone",
"filename" : "Icon-App-60x60@3x.png",
"scale" : "3x"
},
{
"size" : "20x20",
"idiom" : "ipad",
"filename" : "Icon-App-20x20@1x.png",
"scale" : "1x"
},
{
"size" : "20x20",
"idiom" : "ipad",
"filename" : "Icon-App-20x20@2x.png",
"scale" : "2x"
},
{
"size" : "29x29",
"idiom" : "ipad",
"filename" : "Icon-App-29x29@1x.png",
"scale" : "1x"
},
{
"size" : "29x29",
"idiom" : "ipad",
"filename" : "Icon-App-29x29@2x.png",
"scale" : "2x"
},
{
"size" : "40x40",
"idiom" : "ipad",
"filename" : "Icon-App-40x40@1x.png",
"scale" : "1x"
},
{
"size" : "40x40",
"idiom" : "ipad",
"filename" : "Icon-App-40x40@2x.png",
"scale" : "2x"
},
{
"size" : "76x76",
"idiom" : "ipad",
"filename" : "Icon-App-76x76@1x.png",
"scale" : "1x"
},
{
"size" : "76x76",
"idiom" : "ipad",
"filename" : "Icon-App-76x76@2x.png",
"scale" : "2x"
},
{
"size" : "83.5x83.5",
"idiom" : "ipad",
"filename" : "Icon-App-83.5x83.5@2x.png",
"scale" : "2x"
},
{
"size" : "1024x1024",
"idiom" : "ios-marketing",
"filename" : "Icon-App-1024x1024@1x.png",
"scale" : "1x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 295 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 406 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 450 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 282 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 462 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 704 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 406 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 586 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 862 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 862 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 762 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.2 KiB

Some files were not shown because too many files have changed in this diff Show more