downloading mostly works for larger batches of email now

This commit is contained in:
Michael Zhang 2021-03-25 15:23:39 -05:00
parent dfe6ebb596
commit 6b82137124
Signed by: michael
GPG key ID: BDA47A31A3C8EE6B
5 changed files with 27 additions and 11 deletions

View file

@ -36,10 +36,11 @@ pub fn parse_capability(s: impl AsRef<str>) -> ParseResult<Capability> {
pub fn parse_streamed_response(s: impl AsRef<str>) -> ParseResult<(Response, usize)> { pub fn parse_streamed_response(s: impl AsRef<str>) -> ParseResult<(Response, usize)> {
let s = s.as_ref(); let s = s.as_ref();
let len = s.len();
let mut pairs = match Rfc3501::parse(Rule::streamed_response, s) { let mut pairs = match Rfc3501::parse(Rule::streamed_response, s) {
Ok(v) => v, Ok(v) => v,
Err(e) => { Err(e) => {
// error!("stream failed: {}", e); // error!("stream failed with len {}: {}", len ,e);
return Err(e); return Err(e);
} }
}; };

View file

@ -44,7 +44,7 @@ date_month = { "Jan" | "Feb" | "Mar" | "Apr" | "May" | "Jun" | "Jul" | "Aug" | "
date_time = { dquote_ ~ date_day_fixed ~ "-" ~ date_month ~ "-" ~ date_year ~ sp ~ time ~ sp ~ zone ~ dquote_ } date_time = { dquote_ ~ date_day_fixed ~ "-" ~ date_month ~ "-" ~ date_year ~ sp ~ time ~ sp ~ zone ~ dquote_ }
date_year = @{ digit{4} } date_year = @{ digit{4} }
digit_nz = @{ '\x31'..'\x39' } digit_nz = @{ '\x31'..'\x39' }
env_address1 = { "(" ~ address ~ (sp? ~ address)? ~ ")" } env_address1 = { "(" ~ address ~ (sp? ~ address)* ~ ")" }
env_bcc = { env_address1 | nil } env_bcc = { env_address1 | nil }
env_cc = { env_address1 | nil } env_cc = { env_address1 | nil }
env_date = { nstring } env_date = { nstring }

View file

@ -6,10 +6,12 @@ CREATE TABLE IF NOT EXISTS "accounts" (
CREATE TABLE IF NOT EXISTS "mail" ( CREATE TABLE IF NOT EXISTS "mail" (
"id" INTEGER PRIMARY KEY, "id" INTEGER PRIMARY KEY,
"internaldate" TEXT,
"message_id" TEXT, "message_id" TEXT,
"account" TEXT, "account" TEXT,
"folder" TEXT, "folder" TEXT,
"uidvalidity" INTEGER, "uidvalidity" INTEGER,
"subject" TEXT,
"uid" INTEGER, "uid" INTEGER,
"filename" TEXT "filename" TEXT
); );

View file

@ -75,7 +75,6 @@ pub async fn sync_main(
debug!("select response: {:?}", select); debug!("select response: {:?}", select);
if let (Some(exists), Some(uidvalidity)) = (select.exists, select.uid_validity) { if let (Some(exists), Some(uidvalidity)) = (select.exists, select.uid_validity) {
if exists < 10 {
let new_uids = stream::iter(1..exists).map(Ok).try_filter_map(|uid| { let new_uids = stream::iter(1..exists).map(Ok).try_filter_map(|uid| {
mail_store.try_identify_email(&acct_name, &folder, uid, uidvalidity, None) mail_store.try_identify_email(&acct_name, &folder, uid, uidvalidity, None)
// invert the option to only select uids that haven't been downloaded // invert the option to only select uids that haven't been downloaded
@ -83,6 +82,7 @@ pub async fn sync_main(
.map_err(|err| err.context("error checking if the email is already downloaded [try_identify_email]")) .map_err(|err| err.context("error checking if the email is already downloaded [try_identify_email]"))
}).try_collect::<Vec<_>>().await?; }).try_collect::<Vec<_>>().await?;
if !new_uids.is_empty() {
debug!("fetching uids {:?}", new_uids); debug!("fetching uids {:?}", new_uids);
let fetched = authed let fetched = authed
.uid_fetch(&new_uids, FetchItems::PanoramaAll) .uid_fetch(&new_uids, FetchItems::PanoramaAll)

View file

@ -127,9 +127,12 @@ impl MailStore {
attrs: Vec<AttributeValue>, attrs: Vec<AttributeValue>,
) -> Result<()> { ) -> Result<()> {
let mut body = None; let mut body = None;
let mut internaldate = None;
for attr in attrs { for attr in attrs {
if let AttributeValue::BodySection(body_attr) = attr { match attr {
body = body_attr.data; AttributeValue::BodySection(body_attr) => body = body_attr.data,
AttributeValue::InternalDate(date) => internaldate = Some(date),
_ => {}
} }
} }
@ -137,6 +140,10 @@ impl MailStore {
Some(v) => v, Some(v) => v,
None => return Ok(()), None => return Ok(()),
}; };
let internaldate = match internaldate {
Some(v) => v,
None => return Ok(()),
};
let mut hasher = Sha256::new(); let mut hasher = Sha256::new();
hasher.update(body.as_bytes()); hasher.update(body.as_bytes());
@ -149,14 +156,16 @@ impl MailStore {
// parse email // parse email
let mut message_id = None; let mut message_id = None;
let mut subject = None;
let mail = mailparse::parse_mail(body.as_bytes()) let mail = mailparse::parse_mail(body.as_bytes())
.with_context(|| format!("error parsing email with uid {}", uid))?; .with_context(|| format!("error parsing email with uid {}", uid))?;
for header in mail.headers.iter() { for header in mail.headers.iter() {
let key = header.get_key_ref(); let key = header.get_key_ref();
let key = key.to_ascii_lowercase();
let value = header.get_value(); let value = header.get_value();
if key == "message-id" { match key.to_ascii_lowercase().as_str() {
message_id = Some(value); "message-id" => message_id = Some(value),
"subject" => subject = Some(value),
_ => {}
} }
} }
@ -181,16 +190,20 @@ impl MailStore {
if existing.is_none() { if existing.is_none() {
let id = sqlx::query( let id = sqlx::query(
r#" r#"
INSERT INTO "mail" (account, message_id, folder, uid, uidvalidity, filename) INSERT INTO "mail" (
VALUES (?, ?, ?, ?, ?, ?) account, subject, message_id, folder, uid, uidvalidity,
filename, internaldate
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)
"#, "#,
) )
.bind(acct.as_ref()) .bind(acct.as_ref())
.bind(subject)
.bind(message_id) .bind(message_id)
.bind(folder.as_ref()) .bind(folder.as_ref())
.bind(uid) .bind(uid)
.bind(uidvalidity) .bind(uidvalidity)
.bind(filename) .bind(filename)
.bind(internaldate.to_rfc3339())
.execute(&self.pool) .execute(&self.pool)
.await .await
.context("error inserting email into db")? .context("error inserting email into db")?