Skip to content

Commit

Permalink
[feature]: s9pk v2 (#2507)
Browse files Browse the repository at this point in the history
* feature: s9pk v2

wip

wip

wip

wip

refactor

* use WriteQueue

* fix proptest

* LoopDev
eager directory hash verification
  • Loading branch information
dr-bonez authored Nov 10, 2023
1 parent 521014c commit fd96859
Show file tree
Hide file tree
Showing 28 changed files with 1,877 additions and 273 deletions.
25 changes: 25 additions & 0 deletions backend/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions backend/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ base32 = "0.4.0"
base64 = "0.21.4"
base64ct = "1.6.0"
basic-cookies = "0.1.4"
blake3 = "1.5.0"
bytes = "1"
chrono = { version = "0.4.31", features = ["serde"] }
clap = "3.2.25"
Expand Down Expand Up @@ -89,6 +90,7 @@ imbl-value = { git = "https://github.com/Start9Labs/imbl-value.git" }
include_dir = "0.7.3"
indexmap = { version = "2.0.2", features = ["serde"] }
indicatif = { version = "0.17.7", features = ["tokio"] }
integer-encoding = { version = "4.0.0", features = ["tokio_async"] }
ipnet = { version = "2.8.0", features = ["serde"] }
iprange = { version = "0.6.7", features = ["serde"] }
isocountry = "0.3.2"
Expand Down
89 changes: 89 additions & 0 deletions backend/src/disk/mount/filesystem/loop_dev.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
use std::os::unix::ffi::OsStrExt;
use std::path::Path;

use async_trait::async_trait;
use digest::generic_array::GenericArray;
use digest::{Digest, OutputSizeUser};
use serde::{Deserialize, Serialize};
use sha2::Sha256;

use super::{FileSystem, MountType, ReadOnly};
use crate::util::Invoke;
use crate::{Error, ResultExt};

pub async fn mount(
logicalname: impl AsRef<Path>,
offset: u64,
size: u64,
mountpoint: impl AsRef<Path>,
mount_type: MountType,
) -> Result<(), Error> {
tokio::fs::create_dir_all(mountpoint.as_ref()).await?;
let mut opts = format!("loop,offset={offset},sizelimit={size}");
if mount_type == ReadOnly {
opts += ",ro";
}

tokio::process::Command::new("mount")
.arg(logicalname.as_ref())
.arg(mountpoint.as_ref())
.arg("-o")
.arg(opts)
.invoke(crate::ErrorKind::Filesystem)
.await?;
Ok(())
}

#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "kebab-case")]
pub struct LoopDev<LogicalName: AsRef<Path>> {
logicalname: LogicalName,
offset: u64,
size: u64,
}
impl<LogicalName: AsRef<Path>> LoopDev<LogicalName> {
pub fn new(logicalname: LogicalName, offset: u64, size: u64) -> Self {
Self {
logicalname,
offset,
size,
}
}
}
#[async_trait]
impl<LogicalName: AsRef<Path> + Send + Sync> FileSystem for LoopDev<LogicalName> {
async fn mount<P: AsRef<Path> + Send + Sync>(
&self,
mountpoint: P,
mount_type: MountType,
) -> Result<(), Error> {
mount(
self.logicalname.as_ref(),
self.offset,
self.size,
mountpoint,
mount_type,
)
.await
}
async fn source_hash(
&self,
) -> Result<GenericArray<u8, <Sha256 as OutputSizeUser>::OutputSize>, Error> {
let mut sha = Sha256::new();
sha.update("LoopDev");
sha.update(
tokio::fs::canonicalize(self.logicalname.as_ref())
.await
.with_ctx(|_| {
(
crate::ErrorKind::Filesystem,
self.logicalname.as_ref().display().to_string(),
)
})?
.as_os_str()
.as_bytes(),
);
sha.update(&u64::to_be_bytes(self.offset)[..]);
Ok(sha.finalize())
}
}
1 change: 1 addition & 0 deletions backend/src/disk/mount/filesystem/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ pub mod ecryptfs;
pub mod efivarfs;
pub mod httpdirfs;
pub mod label;
pub mod loop_dev;

#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum MountType {
Expand Down
1 change: 1 addition & 0 deletions backend/src/prelude.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
pub use color_eyre::eyre::eyre;
pub use models::OptionExt;
pub use tracing::instrument;

pub use crate::db::prelude::*;
pub use crate::ensure_code;
Expand Down
199 changes: 199 additions & 0 deletions backend/src/s9pk/merkle_archive/directory_contents.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,199 @@
use std::collections::BTreeMap;
use std::path::Path;

use futures::future::BoxFuture;
use futures::FutureExt;
use imbl_value::InternedString;
use tokio::io::AsyncRead;

use crate::prelude::*;
use crate::s9pk::merkle_archive::hash::{Hash, HashWriter};
use crate::s9pk::merkle_archive::sink::{Sink, TrackingWriter};
use crate::s9pk::merkle_archive::source::{ArchiveSource, FileSource, Section};
use crate::s9pk::merkle_archive::write_queue::WriteQueue;
use crate::s9pk::merkle_archive::{varint, Entry, EntryContents};

#[derive(Debug)]
pub struct DirectoryContents<S>(BTreeMap<InternedString, Entry<S>>);
impl<S> DirectoryContents<S> {
pub fn new() -> Self {
Self(BTreeMap::new())
}

#[instrument(skip_all)]
pub fn get_path(&self, path: impl AsRef<Path>) -> Option<&Entry<S>> {
let mut dir = Some(self);
let mut res = None;
for segment in path.as_ref().into_iter() {
let segment = segment.to_str()?;
if segment == "/" {
continue;
}
res = dir?.get(segment);
if let Some(EntryContents::Directory(d)) = res.as_ref().map(|e| e.as_contents()) {
dir = Some(d);
} else {
dir = None
}
}
res
}

pub fn insert_path(&mut self, path: impl AsRef<Path>, entry: Entry<S>) -> Result<(), Error> {
let path = path.as_ref();
let (parent, Some(file)) = (path.parent(), path.file_name().and_then(|f| f.to_str()))
else {
return Err(Error::new(
eyre!("cannot create file at root"),
ErrorKind::Pack,
));
};
let mut dir = self;
for segment in parent.into_iter().flatten() {
let segment = segment
.to_str()
.ok_or_else(|| Error::new(eyre!("non-utf8 path segment"), ErrorKind::Utf8))?;
if segment == "/" {
continue;
}
if !dir.contains_key(segment) {
dir.insert(
segment.into(),
Entry::new(EntryContents::Directory(DirectoryContents::new())),
);
}
if let Some(EntryContents::Directory(d)) =
dir.get_mut(segment).map(|e| e.as_contents_mut())
{
dir = d;
} else {
return Err(Error::new(eyre!("failed to insert entry at path {path:?}: ancestor exists and is not a directory"), ErrorKind::Pack));
}
}
dir.insert(file.into(), entry);
Ok(())
}

pub const fn header_size() -> u64 {
8 // position: u64 BE
+ 8 // size: u64 BE
}

#[instrument(skip_all)]
pub async fn serialize_header<W: Sink>(&self, position: u64, w: &mut W) -> Result<u64, Error> {
use tokio::io::AsyncWriteExt;

let size = self.toc_size();

w.write_all(&position.to_be_bytes()).await?;
w.write_all(&size.to_be_bytes()).await?;

Ok(position)
}

pub fn toc_size(&self) -> u64 {
self.0.iter().fold(
varint::serialized_varint_size(self.0.len() as u64),
|acc, (name, entry)| {
acc + varint::serialized_varstring_size(&**name) + entry.header_size()
},
)
}
}
impl<S: ArchiveSource> DirectoryContents<Section<S>> {
#[instrument(skip_all)]
pub fn deserialize<'a>(
source: &'a S,
header: &'a mut (impl AsyncRead + Unpin + Send),
sighash: Hash,
) -> BoxFuture<'a, Result<Self, Error>> {
async move {
use tokio::io::AsyncReadExt;

let mut position = [0u8; 8];
header.read_exact(&mut position).await?;
let position = u64::from_be_bytes(position);

let mut size = [0u8; 8];
header.read_exact(&mut size).await?;
let size = u64::from_be_bytes(size);

let mut toc_reader = source.fetch(position, size).await?;

let len = varint::deserialize_varint(&mut toc_reader).await?;
let mut entries = BTreeMap::new();
for _ in 0..len {
entries.insert(
varint::deserialize_varstring(&mut toc_reader).await?.into(),
Entry::deserialize(source, &mut toc_reader).await?,
);
}

let res = Self(entries);

if res.sighash().await? == sighash {
Ok(res)
} else {
Err(Error::new(
eyre!("hash sum does not match"),
ErrorKind::InvalidSignature,
))
}
}
.boxed()
}
}
impl<S: FileSource> DirectoryContents<S> {
#[instrument(skip_all)]
pub fn update_hashes<'a>(&'a mut self, only_missing: bool) -> BoxFuture<'a, Result<(), Error>> {
async move {
for (_, entry) in &mut self.0 {
entry.update_hash(only_missing).await?;
}
Ok(())
}
.boxed()
}

#[instrument(skip_all)]
pub fn sighash<'a>(&'a self) -> BoxFuture<'a, Result<Hash, Error>> {
async move {
let mut hasher = TrackingWriter::new(0, HashWriter::new());
let mut sig_contents = BTreeMap::new();
for (name, entry) in &self.0 {
sig_contents.insert(name.clone(), entry.to_missing().await?);
}
Self(sig_contents)
.serialize_toc(&mut WriteQueue::new(0), &mut hasher)
.await?;
Ok(hasher.into_inner().finalize())
}
.boxed()
}

#[instrument(skip_all)]
pub async fn serialize_toc<'a, W: Sink>(
&'a self,
queue: &mut WriteQueue<'a, S>,
w: &mut W,
) -> Result<(), Error> {
varint::serialize_varint(self.0.len() as u64, w).await?;
for (name, entry) in self.0.iter() {
varint::serialize_varstring(&**name, w).await?;
entry.serialize_header(queue.add(entry).await?, w).await?;
}

Ok(())
}
}
impl<S> std::ops::Deref for DirectoryContents<S> {
type Target = BTreeMap<InternedString, Entry<S>>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<S> std::ops::DerefMut for DirectoryContents<S> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
Loading

0 comments on commit fd96859

Please sign in to comment.