Remove bincode dep, use serde_json for sqlite3 values

pull/223/head
Manos Pitsidianakis 2023-06-17 19:59:13 +03:00
parent fd0faade06
commit 4da5366959
Signed by: Manos Pitsidianakis
GPG Key ID: 7729C7707F7E09D0
10 changed files with 33 additions and 182 deletions

11
Cargo.lock generated
View File

@ -200,15 +200,6 @@ version = "0.21.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "604178f6c5c21f02dc555784810edfb88d34ac2c73b2eae109655649ee73ce3d"
[[package]]
name = "bincode"
version = "1.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad"
dependencies = [
"serde",
]
[[package]]
name = "bitflags"
version = "1.3.2"
@ -1167,7 +1158,6 @@ name = "meli"
version = "0.7.2"
dependencies = [
"async-task",
"bincode",
"bitflags",
"crossbeam",
"flate2",
@ -1206,7 +1196,6 @@ version = "0.7.2"
dependencies = [
"async-stream",
"base64 0.13.0",
"bincode",
"bitflags",
"data-encoding",
"encoding",

View File

@ -29,7 +29,6 @@ required-features = ["melib/imap_backend"]
[dependencies]
async-task = "^4.2.0"
bincode = { version = "^1.3.0", default-features = false }
bitflags = "1.0"
crossbeam = { version = "^0.8" }
flate2 = { version = "1.0.16", optional = true }

View File

@ -22,7 +22,6 @@ path = "src/lib.rs"
[dependencies]
async-stream = "^0.3"
base64 = { version = "^0.13", optional = true }
bincode = { version = "^1.3.0", default-features = false }
bitflags = "1.0"
data-encoding = { version = "2.1.1" }
encoding = { version = "0.2.33", default-features = false }
@ -44,7 +43,7 @@ regex = { version = "1" }
rusqlite = { version = "^0.28", default-features = false, optional = true }
serde = { version = "1.0.71", features = ["rc", ] }
serde_derive = "1.0.71"
serde_json = { version = "1.0", optional = true, features = ["raw_value",] }
serde_json = { version = "1.0", features = ["raw_value",] }
smallvec = { version = "^1.5.0", features = ["serde", ] }
smol = "1.0.0"
@ -81,7 +80,7 @@ gpgme = []
http = ["isahc"]
http-static = ["isahc", "isahc/static-curl"]
imap_backend = ["imap-codec", "tls"]
jmap_backend = ["http", "serde_json"]
jmap_backend = ["http"]
maildir_backend = ["notify"]
mbox_backend = ["notify"]
notmuch_backend = []

View File

@ -148,7 +148,7 @@ mod sqlite3_m {
CREATE INDEX IF NOT EXISTS envelope_idx ON envelopes(hash);
CREATE INDEX IF NOT EXISTS mailbox_idx ON mailbox(mailbox_hash);",
),
version: 2,
version: 3,
};
impl ToSql for ModSequence {
@ -405,19 +405,30 @@ mod sqlite3_m {
return Ok(None);
}
let mut stmt = self.connection.prepare(
"SELECT uid, envelope, modsequence FROM envelopes WHERE mailbox_hash = ?1;",
)?;
let ret: Vec<(UID, Envelope, Option<ModSequence>)> = match {
let mut stmt = self.connection.prepare(
"SELECT uid, envelope, modsequence FROM envelopes WHERE mailbox_hash = ?1;",
)?;
let ret: Vec<(UID, Envelope, Option<ModSequence>)> = stmt
.query_map(sqlite3::params![mailbox_hash], |row| {
Ok((
row.get(0).map(|i: Sqlite3UID| i as UID)?,
row.get(1)?,
row.get(2)?,
))
})?
.collect::<std::result::Result<_, _>>()?;
let x = stmt
.query_map(sqlite3::params![mailbox_hash], |row| {
Ok((
row.get(0).map(|i: Sqlite3UID| i as UID)?,
row.get(1)?,
row.get(2)?,
))
})?
.collect::<std::result::Result<_, _>>();
x
} {
Err(err) if matches!(&err, rusqlite::Error::FromSqlConversionFailure(_, _, _)) => {
drop(err);
self.reset()?;
return Ok(None);
}
Err(err) => return Err(err.into()),
Ok(v) => v,
};
let mut max_uid = 0;
let mut env_lck = self.uid_store.envelopes.lock().unwrap();
let mut hash_index_lck = self.uid_store.hash_index.lock().unwrap();

View File

@ -211,29 +211,17 @@ impl MailBackend for MaildirType {
let unseen = mailbox.unseen.clone();
let total = mailbox.total.clone();
let path: PathBuf = mailbox.fs_path().into();
let root_mailbox = self.path.to_path_buf();
let map = self.hash_indexes.clone();
let mailbox_index = self.mailbox_index.clone();
super::stream::MaildirStream::new(
&self.name,
mailbox_hash,
unseen,
total,
path,
root_mailbox,
map,
mailbox_index,
)
super::stream::MaildirStream::new(mailbox_hash, unseen, total, path, map, mailbox_index)
}
fn refresh(&mut self, mailbox_hash: MailboxHash) -> ResultFuture<()> {
let cache_dir = xdg::BaseDirectories::with_profile("meli", &self.name).unwrap();
let account_hash = AccountHash::from_bytes(self.name.as_bytes());
let sender = self.event_consumer.clone();
let mailbox: &MaildirMailbox = &self.mailboxes[&mailbox_hash];
let path: PathBuf = mailbox.fs_path().into();
let root_mailbox = self.path.to_path_buf();
let map = self.hash_indexes.clone();
let mailbox_index = self.mailbox_index.clone();
@ -268,23 +256,6 @@ impl MailBackend for MaildirType {
.lock()
.unwrap()
.insert(env.hash(), mailbox_hash);
let file_name = file.strip_prefix(&root_mailbox).unwrap().to_path_buf();
if let Ok(cached) = cache_dir.place_cache_file(file_name) {
/* place result in cache directory */
let f = fs::File::create(cached)?;
let metadata = f.metadata()?;
let mut permissions = metadata.permissions();
permissions.set_mode(0o600); // Read/write for owner only.
f.set_permissions(permissions)?;
let writer = io::BufWriter::new(f);
bincode::Options::serialize_into(
bincode::config::DefaultOptions::new(),
writer,
&env,
)?;
}
(sender)(
account_hash,
BackendEvent::Refresh(RefreshEvent {
@ -336,7 +307,6 @@ impl MailBackend for MaildirType {
watcher
.watch(&root_mailbox, RecursiveMode::Recursive)
.unwrap();
let cache_dir = xdg::BaseDirectories::with_profile("meli", &self.name).unwrap();
debug!("watching {:?}", root_mailbox);
let hash_indexes = self.hash_indexes.clone();
let mailbox_index = self.mailbox_index.clone();
@ -392,7 +362,6 @@ impl MailBackend for MaildirType {
&hash_indexes,
mailbox_hash,
pathbuf.as_path(),
&cache_dir,
file_name,
&mut buf,
) {
@ -447,7 +416,6 @@ impl MailBackend for MaildirType {
&hash_indexes,
mailbox_hash,
pathbuf.as_path(),
&cache_dir,
file_name,
&mut buf,
) {
@ -599,7 +567,6 @@ impl MailBackend for MaildirType {
&hash_indexes,
dest_mailbox,
dest.as_path(),
&cache_dir,
file_name,
&mut buf,
) {
@ -693,7 +660,6 @@ impl MailBackend for MaildirType {
&hash_indexes,
dest_mailbox.unwrap_or(mailbox_hash),
dest.as_path(),
&cache_dir,
file_name,
&mut buf,
) {
@ -740,7 +706,6 @@ impl MailBackend for MaildirType {
&hash_indexes,
dest_mailbox,
dest.as_path(),
&cache_dir,
file_name,
&mut buf,
) {
@ -1376,7 +1341,6 @@ fn add_path_to_index(
hash_index: &HashIndexes,
mailbox_hash: MailboxHash,
path: &Path,
cache_dir: &xdg::BaseDirectories,
file_name: PathBuf,
buf: &mut Vec<u8>,
) -> Result<Envelope> {
@ -1403,17 +1367,5 @@ fn add_path_to_index(
env_hash,
file_name.display()
);
if let Ok(cached) = cache_dir.place_cache_file(file_name) {
debug!("putting in cache");
/* place result in cache directory */
let f = fs::File::create(cached)?;
let metadata = f.metadata()?;
let mut permissions = metadata.permissions();
permissions.set_mode(0o600); // Read/write for owner only.
f.set_permissions(permissions)?;
let writer = io::BufWriter::new(f);
bincode::Options::serialize_into(bincode::config::DefaultOptions::new(), writer, &env)?;
}
Ok(env)
}

View File

@ -22,9 +22,7 @@
use core::{future::Future, pin::Pin};
use std::{
io::{self, Read},
os::unix::fs::PermissionsExt,
path::PathBuf,
result,
sync::{Arc, Mutex},
};
@ -46,12 +44,10 @@ pub struct MaildirStream {
impl MaildirStream {
pub fn new(
name: &str,
mailbox_hash: MailboxHash,
unseen: Arc<Mutex<usize>>,
total: Arc<Mutex<usize>>,
mut path: PathBuf,
root_mailbox: PathBuf,
map: HashIndexes,
mailbox_index: Arc<Mutex<HashMap<EnvelopeHash, MailboxHash>>>,
) -> Result<Pin<Box<dyn Stream<Item = Result<Vec<Envelope>>> + Send + 'static>>> {
@ -71,14 +67,11 @@ impl MaildirStream {
files
.chunks(chunk_size)
.map(|chunk| {
let cache_dir = xdg::BaseDirectories::with_profile("meli", name).unwrap();
Box::pin(Self::chunk(
SmallVec::from(chunk),
cache_dir,
mailbox_hash,
unseen.clone(),
total.clone(),
root_mailbox.clone(),
map.clone(),
mailbox_index.clone(),
)) as Pin<Box<dyn Future<Output = _> + Send + 'static>>
@ -92,11 +85,9 @@ impl MaildirStream {
async fn chunk(
chunk: SmallVec<[std::path::PathBuf; 2048]>,
cache_dir: xdg::BaseDirectories,
mailbox_hash: MailboxHash,
unseen: Arc<Mutex<usize>>,
total: Arc<Mutex<usize>>,
root_mailbox: PathBuf,
map: HashIndexes,
mailbox_index: Arc<Mutex<HashMap<EnvelopeHash, MailboxHash>>>,
) -> Result<Vec<Envelope>> {
@ -104,39 +95,6 @@ impl MaildirStream {
let mut unseen_total: usize = 0;
let mut buf = Vec::with_capacity(4096);
for file in chunk {
/* Check if we have a cache file with this email's
* filename */
let file_name = PathBuf::from(&file)
.strip_prefix(&root_mailbox)
.unwrap()
.to_path_buf();
if let Some(cached) = cache_dir.find_cache_file(&file_name) {
/* Cached struct exists, try to load it */
let cached_file = fs::File::open(&cached)?;
let filesize = cached_file.metadata()?.len();
let reader = io::BufReader::new(cached_file);
let result: result::Result<Envelope, _> = bincode::Options::deserialize_from(
bincode::Options::with_limit(
bincode::config::DefaultOptions::new(),
2 * filesize,
),
reader,
);
if let Ok(env) = result {
let mut map = map.lock().unwrap();
let map = map.entry(mailbox_hash).or_default();
let hash = env.hash();
map.insert(hash, file.clone().into());
mailbox_index.lock().unwrap().insert(hash, mailbox_hash);
if !env.is_seen() {
unseen_total += 1;
}
local_r.push(env);
continue;
}
/* Try delete invalid file */
let _ = fs::remove_file(&cached);
};
let env_hash = get_file_hash(&file);
{
let mut map = map.lock().unwrap();
@ -150,22 +108,6 @@ impl MaildirStream {
Ok(mut env) => {
env.set_hash(env_hash);
mailbox_index.lock().unwrap().insert(env_hash, mailbox_hash);
if let Ok(cached) = cache_dir.place_cache_file(file_name) {
/* place result in cache directory */
let f = fs::File::create(cached)?;
let metadata = f.metadata()?;
let mut permissions = metadata.permissions();
permissions.set_mode(0o600); // Read/write for owner only.
f.set_permissions(permissions)?;
let writer = io::BufWriter::new(f);
bincode::Options::serialize_into(
bincode::config::DefaultOptions::new(),
writer,
&env,
)?;
}
if !env.is_seen() {
unseen_total += 1;
}

View File

@ -49,30 +49,6 @@ impl Default for Collection {
}
}
/*
impl Drop for Collection {
fn drop(&mut self) {
let cache_dir: xdg::BaseDirectories =
xdg::BaseDirectories::with_profile("meli", "threads".to_string()).unwrap();
if let Ok(cached) = cache_dir.place_cache_file("threads") {
/* place result in cache directory */
let f = match fs::File::create(cached) {
Ok(f) => f,
Err(e) => {
panic!("{}", e);
}
};
let writer = io::BufWriter::new(f);
let _ = bincode::Options::serialize_into(
bincode::config::DefaultOptions::new(),
writer,
&self.thread,
);
}
}
}
*/
impl Collection {
pub fn new() -> Collection {
let message_id_index = Arc::new(RwLock::new(HashMap::with_capacity_and_hasher(

View File

@ -637,13 +637,6 @@ impl From<std::ffi::NulError> for Error {
}
}
impl From<Box<bincode::ErrorKind>> for Error {
#[inline]
fn from(kind: Box<bincode::ErrorKind>) -> Error {
Error::new(kind.to_string()).set_source(Some(Arc::new(kind)))
}
}
impl From<nix::Error> for Error {
#[inline]
fn from(kind: nix::Error) -> Error {

View File

@ -81,7 +81,8 @@ pub fn open_or_create_db(
let version: i32 = conn.pragma_query_value(None, "user_version", |row| row.get(0))?;
if version != 0_i32 && version as u32 != description.version {
log::info!(
"Database version mismatch, is {} but expected {}",
"Database version mismatch, is {} but expected {}. Attempting to recreate \
database.",
version,
description.version
);
@ -130,27 +131,17 @@ pub fn reset_db(description: &DatabaseDescription, identifier: Option<&str>) ->
impl ToSql for Envelope {
fn to_sql(&self) -> rusqlite::Result<ToSqlOutput> {
let v: Vec<u8> = bincode::Options::serialize(bincode::config::DefaultOptions::new(), self)
.map_err(|e| {
rusqlite::Error::ToSqlConversionFailure(Box::new(Error::new(e.to_string())))
})?;
let v: Vec<u8> = serde_json::to_vec(self).map_err(|e| {
rusqlite::Error::ToSqlConversionFailure(Box::new(Error::new(e.to_string())))
})?;
Ok(ToSqlOutput::from(v))
}
}
impl FromSql for Envelope {
fn column_result(value: rusqlite::types::ValueRef) -> FromSqlResult<Self> {
use std::convert::TryFrom;
let b: Vec<u8> = FromSql::column_result(value)?;
bincode::Options::deserialize(
bincode::Options::with_limit(
bincode::config::DefaultOptions::new(),
2 * u64::try_from(b.len()).map_err(|e| FromSqlError::Other(Box::new(e)))?,
),
&b,
)
.map_err(|e| FromSqlError::Other(Box::new(e)))
serde_json::from_slice(&b).map_err(|e| FromSqlError::Other(Box::new(e)))
}
}

View File

@ -21,7 +21,6 @@
/*! Configuration logic and `config.toml` interfaces. */
extern crate bincode;
extern crate serde;
extern crate toml;
extern crate xdg;