rustfmt all the things
This commit is contained in:
parent
a9a54f2516
commit
6bf0d2cdd9
48 changed files with 1896 additions and 915 deletions
|
@ -1,7 +1,7 @@
|
|||
use clap::{Parser, Subcommand};
|
||||
use humantime::Duration;
|
||||
use ufh::item::ItemRef;
|
||||
use std::path::PathBuf;
|
||||
use ufh::item::ItemRef;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(name = "ufh", version = env!("CARGO_PKG_VERSION"), about = "cli to interact with a ufh server")]
|
||||
|
@ -55,14 +55,10 @@ pub enum ShareAction {
|
|||
item_ref: ItemRef,
|
||||
},
|
||||
/// get info about a share
|
||||
Info {
|
||||
share: String,
|
||||
},
|
||||
Info { share: String },
|
||||
/// delete a share
|
||||
#[command(name = "rm")]
|
||||
Remove {
|
||||
share: String,
|
||||
},
|
||||
Remove { share: String },
|
||||
}
|
||||
|
||||
#[derive(Debug, Subcommand)]
|
||||
|
@ -104,7 +100,11 @@ pub enum EventAction {
|
|||
},
|
||||
/// get info about a file
|
||||
Info {
|
||||
#[arg(short, long, help = "what relations to fetch (in the form reltype/event.type")]
|
||||
#[arg(
|
||||
short,
|
||||
long,
|
||||
help = "what relations to fetch (in the form reltype/event.type"
|
||||
)]
|
||||
rels: Vec<String>,
|
||||
#[arg(name = "ref")]
|
||||
item_ref: ItemRef,
|
||||
|
@ -174,6 +174,6 @@ pub enum FileAction {
|
|||
#[arg(short, long, help = "whether to run in backgorund")]
|
||||
daemon: bool,
|
||||
#[arg(help = "where to mount")]
|
||||
path: PathBuf,
|
||||
path: PathBuf,
|
||||
},
|
||||
}
|
||||
|
|
|
@ -3,17 +3,29 @@
|
|||
// TODO (future): proper fuse support
|
||||
// have by-ref/, by-name/, by-tag/ directories
|
||||
|
||||
use std::{path::PathBuf, time::{Duration, SystemTime}, collections::HashSet, sync::Arc, ffi::c_int, num::NonZeroUsize};
|
||||
use fuser::{FileAttr, BackgroundSession};
|
||||
use ufh::{event::{EventContent, Event}, actor::ActorSecret, query::Query, item::ItemRef};
|
||||
use crate::net::Item;
|
||||
use tokio::{runtime::Handle, sync::Mutex};
|
||||
use once_cell::sync::OnceCell;
|
||||
use fuser::{BackgroundSession, FileAttr};
|
||||
use lru::LruCache;
|
||||
use once_cell::sync::OnceCell;
|
||||
use std::{
|
||||
collections::HashSet,
|
||||
ffi::c_int,
|
||||
num::NonZeroUsize,
|
||||
path::PathBuf,
|
||||
sync::Arc,
|
||||
time::{Duration, SystemTime},
|
||||
};
|
||||
use tokio::{runtime::Handle, sync::Mutex};
|
||||
use ufh::{
|
||||
actor::ActorSecret,
|
||||
event::{Event, EventContent},
|
||||
item::ItemRef,
|
||||
query::Query,
|
||||
};
|
||||
|
||||
use crate::net::Client;
|
||||
mod tree;
|
||||
use tree::{Graph, GraphItem, DirKind};
|
||||
use tree::{DirKind, Graph, GraphItem};
|
||||
|
||||
use self::tree::GraphEntry;
|
||||
|
||||
|
@ -36,17 +48,17 @@ impl Filesystem {
|
|||
poller: None,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
pub fn _mount(self, mountpoint: &PathBuf) -> Result<(), std::io::Error> {
|
||||
fuser::mount2(self, mountpoint, &[])?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
pub fn spawn_mount(self, mountpoint: &PathBuf) -> Result<BackgroundSession, std::io::Error> {
|
||||
use fuser::MountOption;
|
||||
fuser::spawn_mount2(self, mountpoint, &[MountOption::RO, MountOption::NoAtime])
|
||||
}
|
||||
|
||||
|
||||
fn get_attr(&self, ino: u64, uid: u32, gid: u32) -> Option<FileAttr> {
|
||||
let attr = FileAttr {
|
||||
ino,
|
||||
|
@ -63,22 +75,21 @@ impl Filesystem {
|
|||
gid,
|
||||
rdev: 0,
|
||||
blksize: 1024 * 1024,
|
||||
flags: 0
|
||||
flags: 0,
|
||||
};
|
||||
|
||||
let inodes = self.inodes.blocking_lock();
|
||||
|
||||
match &inodes.get(ino).unwrap().item {
|
||||
GraphItem::Directory { .. } => {
|
||||
Some(FileAttr {
|
||||
kind: fuser::FileType::Directory,
|
||||
perm: 0o550,
|
||||
..attr
|
||||
})
|
||||
},
|
||||
GraphItem::Text { content, .. } => {
|
||||
Some(FileAttr { size: content.len() as u64, ..attr })
|
||||
},
|
||||
GraphItem::Directory { .. } => Some(FileAttr {
|
||||
kind: fuser::FileType::Directory,
|
||||
perm: 0o550,
|
||||
..attr
|
||||
}),
|
||||
GraphItem::Text { content, .. } => Some(FileAttr {
|
||||
size: content.len() as u64,
|
||||
..attr
|
||||
}),
|
||||
GraphItem::Event(event) => {
|
||||
let EventContent::File(file) = &event.content else {
|
||||
panic!("validated at input");
|
||||
|
@ -91,14 +102,18 @@ impl Filesystem {
|
|||
ctime: to_time(event.origin_ts),
|
||||
crtime: to_time(event.origin_ts),
|
||||
..attr
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fuser::Filesystem for Filesystem {
|
||||
fn init(&mut self, _req: &fuser::Request<'_>, _config: &mut fuser::KernelConfig) -> Result<(), c_int> {
|
||||
fn init(
|
||||
&mut self,
|
||||
_req: &fuser::Request<'_>,
|
||||
_config: &mut fuser::KernelConfig,
|
||||
) -> Result<(), c_int> {
|
||||
let query = Query {
|
||||
refs: None,
|
||||
senders: Some(HashSet::from([self.key.get_id()])),
|
||||
|
@ -109,24 +124,54 @@ impl fuser::Filesystem for Filesystem {
|
|||
};
|
||||
|
||||
let mut inodes = self.inodes.blocking_lock();
|
||||
|
||||
|
||||
let dir_root = 1;
|
||||
let dir_all_ref = inodes.create(GraphItem::new_dir("by-ref", DirKind::Default), vec![dir_root]);
|
||||
let dir_all_name = inodes.create(GraphItem::new_dir("by-name", DirKind::ShowNames), vec![dir_root]);
|
||||
let dir_tag_ref = inodes.create(GraphItem::new_dir("by-tag", DirKind::Default), vec![dir_root]);
|
||||
let dir_tag_name = inodes.create(GraphItem::new_dir("by-tag-name", DirKind::ShowNames), vec![dir_root]);
|
||||
let dir_upload = inodes.create(GraphItem::new_dir("upload", DirKind::Default), vec![dir_root]);
|
||||
inodes.create(GraphItem::Text { name: "README".into(), content: "currently a work in progress and read only".into() }, vec![dir_root]);
|
||||
inodes.create(GraphItem::Text { name: "TODO".into(), content: "currently unimplemented!".into() }, vec![dir_upload]);
|
||||
let dir_all_ref = inodes.create(
|
||||
GraphItem::new_dir("by-ref", DirKind::Default),
|
||||
vec![dir_root],
|
||||
);
|
||||
let dir_all_name = inodes.create(
|
||||
GraphItem::new_dir("by-name", DirKind::ShowNames),
|
||||
vec![dir_root],
|
||||
);
|
||||
let dir_tag_ref = inodes.create(
|
||||
GraphItem::new_dir("by-tag", DirKind::Default),
|
||||
vec![dir_root],
|
||||
);
|
||||
let dir_tag_name = inodes.create(
|
||||
GraphItem::new_dir("by-tag-name", DirKind::ShowNames),
|
||||
vec![dir_root],
|
||||
);
|
||||
let dir_upload = inodes.create(
|
||||
GraphItem::new_dir("upload", DirKind::Default),
|
||||
vec![dir_root],
|
||||
);
|
||||
inodes.create(
|
||||
GraphItem::Text {
|
||||
name: "README".into(),
|
||||
content: "currently a work in progress and read only".into(),
|
||||
},
|
||||
vec![dir_root],
|
||||
);
|
||||
inodes.create(
|
||||
GraphItem::Text {
|
||||
name: "TODO".into(),
|
||||
content: "currently unimplemented!".into(),
|
||||
},
|
||||
vec![dir_upload],
|
||||
);
|
||||
|
||||
let client = self.client.clone();
|
||||
let inodes = self.inodes.clone();
|
||||
|
||||
|
||||
let handle = self.rt.spawn(async move {
|
||||
let query = client.query(&query).await.unwrap();
|
||||
let mut after: Option<String> = None;
|
||||
loop {
|
||||
let items = client.list(&query, None, after.clone(), Some(30000)).await.unwrap();
|
||||
let items = client
|
||||
.list(&query, None, after.clone(), Some(30000))
|
||||
.await
|
||||
.unwrap();
|
||||
let mut inodes = inodes.lock().await;
|
||||
for event in items.events {
|
||||
match &event.content {
|
||||
|
@ -137,31 +182,35 @@ impl fuser::Filesystem for Filesystem {
|
|||
let mut vivify_tags = |kind: DirKind, parent: u64| -> Vec<u64> {
|
||||
fetch_tags(&mut inodes, &normalized, kind, parent, true)
|
||||
};
|
||||
|
||||
let parents: Vec<u64> = [dir_all_name, dir_all_ref].into_iter()
|
||||
|
||||
let parents: Vec<u64> = [dir_all_name, dir_all_ref]
|
||||
.into_iter()
|
||||
.chain(vivify_tags(DirKind::Default, dir_tag_ref).into_iter())
|
||||
.chain(vivify_tags(DirKind::ShowNames, dir_tag_name).into_iter())
|
||||
.collect();
|
||||
inodes.create(GraphItem::Event(Box::new(event)), parents);
|
||||
},
|
||||
}
|
||||
EventContent::LocalTag(content) => {
|
||||
let tags = HashSet::from_iter(content.tags.iter().cloned());
|
||||
let normalized = normalize_tags(&tags);
|
||||
let mut vivify_tags = |kind: DirKind, parent: u64| -> Vec<u64> {
|
||||
fetch_tags(&mut inodes, &normalized, kind, parent, true)
|
||||
};
|
||||
|
||||
let parents: Vec<u64> = [dir_all_name, dir_all_ref].into_iter()
|
||||
|
||||
let parents: Vec<u64> = [dir_all_name, dir_all_ref]
|
||||
.into_iter()
|
||||
.chain(vivify_tags(DirKind::Default, dir_tag_ref).into_iter())
|
||||
.chain(vivify_tags(DirKind::ShowNames, dir_tag_name).into_iter())
|
||||
.collect();
|
||||
|
||||
|
||||
let targets: Vec<&ItemRef> = event.relations.keys().collect();
|
||||
let matched: Vec<_> = inodes.read_all().iter()
|
||||
let matched: Vec<_> = inodes
|
||||
.read_all()
|
||||
.iter()
|
||||
.filter_map(|(ino, entry)| match &entry.item {
|
||||
GraphItem::Event(event) => {
|
||||
targets.contains(&&event.id).then_some(*ino)
|
||||
},
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
.collect();
|
||||
|
@ -169,19 +218,23 @@ impl fuser::Filesystem for Filesystem {
|
|||
for ino in matched {
|
||||
inodes.retarget(ino, parents.clone());
|
||||
}
|
||||
},
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
if let Some(relations) = items.relations {
|
||||
for source in relations.values() {
|
||||
if source.content.get_type() != "x.redact" { continue; }
|
||||
if source.content.get_type() != "x.redact" {
|
||||
continue;
|
||||
}
|
||||
let targets: Vec<&ItemRef> = source.relations.keys().collect();
|
||||
let matched: Vec<_> = inodes.read_all().iter()
|
||||
let matched: Vec<_> = inodes
|
||||
.read_all()
|
||||
.iter()
|
||||
.filter_map(|(ino, entry)| match &entry.item {
|
||||
GraphItem::Event(event) => {
|
||||
targets.contains(&&event.id).then_some(*ino)
|
||||
},
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
.collect();
|
||||
|
@ -189,8 +242,9 @@ impl fuser::Filesystem for Filesystem {
|
|||
inodes.remove(ino);
|
||||
}
|
||||
}
|
||||
|
||||
let to_remove: Vec<u64> = [].into_iter()
|
||||
|
||||
let to_remove: Vec<u64> = []
|
||||
.into_iter()
|
||||
.chain(inodes.read_dir(dir_tag_ref).unwrap().into_iter())
|
||||
.chain(inodes.read_dir(dir_tag_name).unwrap().into_iter())
|
||||
.filter_map(|(ino, entry)| match &entry.item {
|
||||
|
@ -206,8 +260,14 @@ impl fuser::Filesystem for Filesystem {
|
|||
after = items.next;
|
||||
}
|
||||
}
|
||||
|
||||
fn fetch_tags(inodes: &mut Graph, names: &Vec<String>, kind: DirKind, parent: u64, create: bool) -> Vec<u64> {
|
||||
|
||||
fn fetch_tags(
|
||||
inodes: &mut Graph,
|
||||
names: &Vec<String>,
|
||||
kind: DirKind,
|
||||
parent: u64,
|
||||
create: bool,
|
||||
) -> Vec<u64> {
|
||||
let entries = inodes.read_dir(parent).unwrap();
|
||||
enum Ops<'a> {
|
||||
Use(u64),
|
||||
|
@ -215,7 +275,9 @@ impl fuser::Filesystem for Filesystem {
|
|||
}
|
||||
let mut ops = Vec::new();
|
||||
for name in names {
|
||||
if let Some(ino) = entries.iter().find_map(|(ino, entry)| (&entry.item.get_name(kind) == name).then_some(ino)) {
|
||||
if let Some(ino) = entries.iter().find_map(|(ino, entry)| {
|
||||
(&entry.item.get_name(kind) == name).then_some(ino)
|
||||
}) {
|
||||
ops.push(Ops::Use(*ino));
|
||||
} else if create {
|
||||
ops.push(Ops::Create(name));
|
||||
|
@ -225,33 +287,46 @@ impl fuser::Filesystem for Filesystem {
|
|||
for op in ops {
|
||||
let inode = match op {
|
||||
Ops::Use(ino) => ino,
|
||||
Ops::Create(name) => inodes.create(GraphItem::new_dir(name, kind), vec![parent]),
|
||||
Ops::Create(name) => {
|
||||
inodes.create(GraphItem::new_dir(name, kind), vec![parent])
|
||||
}
|
||||
};
|
||||
tags.push(inode);
|
||||
}
|
||||
tags
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
self.poller = Some(handle);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
fn destroy(&mut self) {
|
||||
self.poller.as_ref().unwrap().abort();
|
||||
}
|
||||
|
||||
fn lookup(&mut self, req: &fuser::Request<'_>, parent: u64, name: &std::ffi::OsStr, reply: fuser::ReplyEntry) {
|
||||
|
||||
fn lookup(
|
||||
&mut self,
|
||||
req: &fuser::Request<'_>,
|
||||
parent: u64,
|
||||
name: &std::ffi::OsStr,
|
||||
reply: fuser::ReplyEntry,
|
||||
) {
|
||||
let Some(name) = name.to_str() else {
|
||||
return reply.error(1);
|
||||
};
|
||||
|
||||
let lock = self.inodes.blocking_lock();
|
||||
let entry = lock.get(parent).unwrap();
|
||||
let GraphItem::Directory(dir) = &entry.item else { panic!("parent is not a directory") };
|
||||
let GraphItem::Directory(dir) = &entry.item else {
|
||||
panic!("parent is not a directory")
|
||||
};
|
||||
let entries = lock.read_dir(parent).unwrap();
|
||||
let Some(inode) = entries.iter().find_map(|(ino, entry)| (entry.item.get_name(dir.kind) == name).then_some(*ino)) else {
|
||||
let Some(inode) = entries
|
||||
.iter()
|
||||
.find_map(|(ino, entry)| (entry.item.get_name(dir.kind) == name).then_some(*ino))
|
||||
else {
|
||||
return reply.error(2);
|
||||
};
|
||||
|
||||
|
@ -262,38 +337,49 @@ impl fuser::Filesystem for Filesystem {
|
|||
}
|
||||
|
||||
fn read(
|
||||
&mut self,
|
||||
_req: &fuser::Request<'_>,
|
||||
ino: u64,
|
||||
_fh: u64,
|
||||
offset: i64,
|
||||
size: u32,
|
||||
_flags: i32,
|
||||
_lock_owner: Option<u64>,
|
||||
reply: fuser::ReplyData,
|
||||
) {
|
||||
&mut self,
|
||||
_req: &fuser::Request<'_>,
|
||||
ino: u64,
|
||||
_fh: u64,
|
||||
offset: i64,
|
||||
size: u32,
|
||||
_flags: i32,
|
||||
_lock_owner: Option<u64>,
|
||||
reply: fuser::ReplyData,
|
||||
) {
|
||||
let offset = offset as usize;
|
||||
let size = size as usize;
|
||||
|
||||
let inodes = self.inodes.blocking_lock();
|
||||
match &inodes.get(ino).unwrap().item {
|
||||
GraphItem::Directory(_) => panic!(),
|
||||
GraphItem::Text { content, .. } => reply.data(&content.as_bytes()[(offset).max(0)..(offset + size).min(content.len())]),
|
||||
GraphItem::Text { content, .. } => {
|
||||
reply.data(&content.as_bytes()[(offset).max(0)..(offset + size).min(content.len())])
|
||||
}
|
||||
GraphItem::Event(event) => {
|
||||
let Ok(buffer) = self.rt.block_on(get_blob(self, event)) else {
|
||||
return reply.error(1);
|
||||
};
|
||||
|
||||
reply.data(&buffer[(offset).max(0)..(offset + size).min(buffer.len())]);
|
||||
},
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
fn readdir(&mut self, _req: &fuser::Request<'_>, ino: u64, _fh: u64, offset: i64, mut reply: fuser::ReplyDirectory) {
|
||||
fn readdir(
|
||||
&mut self,
|
||||
_req: &fuser::Request<'_>,
|
||||
ino: u64,
|
||||
_fh: u64,
|
||||
offset: i64,
|
||||
mut reply: fuser::ReplyDirectory,
|
||||
) {
|
||||
let inodes = self.inodes.blocking_lock();
|
||||
let GraphItem::Directory(dir) = &inodes.get(ino).unwrap().item else { panic!() };
|
||||
let GraphItem::Directory(dir) = &inodes.get(ino).unwrap().item else {
|
||||
panic!()
|
||||
};
|
||||
let entries = inodes.read_dir(ino).unwrap();
|
||||
|
||||
|
||||
let start = offset as usize;
|
||||
let end = (start + 100).min(entries.len());
|
||||
for (idx, (inode, entry)) in entries[start..end].iter().enumerate() {
|
||||
|
@ -301,16 +387,30 @@ impl fuser::Filesystem for Filesystem {
|
|||
GraphItem::Directory(_) => fuser::FileType::Directory,
|
||||
_ => fuser::FileType::RegularFile,
|
||||
};
|
||||
let _ = reply.add(*inode, (start + idx + 1) as i64, file_type, entry.item.get_name(dir.kind));
|
||||
let _ = reply.add(
|
||||
*inode,
|
||||
(start + idx + 1) as i64,
|
||||
file_type,
|
||||
entry.item.get_name(dir.kind),
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
reply.ok();
|
||||
}
|
||||
|
||||
fn opendir(&mut self, _req: &fuser::Request<'_>, ino: u64, _flags: i32, reply: fuser::ReplyOpen) {
|
||||
|
||||
fn opendir(
|
||||
&mut self,
|
||||
_req: &fuser::Request<'_>,
|
||||
ino: u64,
|
||||
_flags: i32,
|
||||
reply: fuser::ReplyOpen,
|
||||
) {
|
||||
let inodes = self.inodes.blocking_lock();
|
||||
match inodes.get(ino) {
|
||||
Some(GraphEntry { item: GraphItem::Directory(_), .. }) => reply.opened(0, 0),
|
||||
Some(GraphEntry {
|
||||
item: GraphItem::Directory(_),
|
||||
..
|
||||
}) => reply.opened(0, 0),
|
||||
Some(_) => reply.error(1),
|
||||
None => reply.error(2),
|
||||
}
|
||||
|
@ -331,7 +431,7 @@ fn to_time(time: u64) -> SystemTime {
|
|||
async fn get_blob(fs: &Filesystem, event: &Event) -> Result<Vec<u8>, ()> {
|
||||
static CACHE: OnceCell<Mutex<LruCache<ItemRef, bytes::Bytes>>> = OnceCell::new();
|
||||
let cache = CACHE.get_or_init(|| Mutex::new(LruCache::new(NonZeroUsize::new(100).unwrap())));
|
||||
|
||||
|
||||
let EventContent::File(file) = &event.content else {
|
||||
return Err(());
|
||||
};
|
||||
|
@ -343,14 +443,14 @@ async fn get_blob(fs: &Filesystem, event: &Event) -> Result<Vec<u8>, ()> {
|
|||
Some(blob) => {
|
||||
lock.promote(item_ref);
|
||||
blob
|
||||
},
|
||||
}
|
||||
None => {
|
||||
let Ok(Item::Blob(blob)) = fs.client.get(item_ref).await else {
|
||||
return Err(());
|
||||
};
|
||||
lock.put(item_ref.clone(), blob.clone());
|
||||
blob
|
||||
},
|
||||
}
|
||||
};
|
||||
chunks.push(blob);
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@ impl Poller<'_> {
|
|||
pub fn new<'a>(tree: &'a mut Tree) -> Poller<'a> {
|
||||
Poller { tree, task: None }
|
||||
}
|
||||
|
||||
|
||||
pub fn start(&mut self) {
|
||||
self.stop();
|
||||
let query = Query {
|
||||
|
@ -30,7 +30,7 @@ impl Poller<'_> {
|
|||
inode
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
pub fn stop(&self) {
|
||||
self.task.take().map(|task| task.abort());
|
||||
}
|
||||
|
|
|
@ -41,17 +41,21 @@ impl Graph {
|
|||
let root = GraphEntry {
|
||||
inode: 1,
|
||||
parents: Vec::new(),
|
||||
item: GraphItem::new_dir("root", DirKind::Default)
|
||||
item: GraphItem::new_dir("root", DirKind::Default),
|
||||
};
|
||||
Graph {
|
||||
counter: 2,
|
||||
items: HashMap::from([(1, root)]),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
pub fn create(&mut self, item: GraphItem, parents: Vec<Inode>) -> Inode {
|
||||
let inode = self.counter;
|
||||
let entry = GraphEntry { inode, parents: parents.clone(), item };
|
||||
let entry = GraphEntry {
|
||||
inode,
|
||||
parents: parents.clone(),
|
||||
item,
|
||||
};
|
||||
self.items.insert(inode, entry);
|
||||
for parent in parents {
|
||||
self.get_mut(parent)
|
||||
|
@ -72,8 +76,16 @@ impl Graph {
|
|||
}
|
||||
}
|
||||
for parent in parents.iter() {
|
||||
if let GraphEntry { item: GraphItem::Directory(dir), .. } = self.get_mut(*parent).expect("parent doesnt exist?") {
|
||||
let pos = dir.entries.iter().position(|ino| *ino == inode).expect("already removed or not a parent?");
|
||||
if let GraphEntry {
|
||||
item: GraphItem::Directory(dir),
|
||||
..
|
||||
} = self.get_mut(*parent).expect("parent doesnt exist?")
|
||||
{
|
||||
let pos = dir
|
||||
.entries
|
||||
.iter()
|
||||
.position(|ino| *ino == inode)
|
||||
.expect("already removed or not a parent?");
|
||||
dir.entries.remove(pos);
|
||||
}
|
||||
}
|
||||
|
@ -90,7 +102,11 @@ impl Graph {
|
|||
entries.retain(|ino| *ino != inode);
|
||||
}
|
||||
for parent in &parents {
|
||||
self.get_mut(*parent).unwrap().as_mut_entries().unwrap().push(inode);
|
||||
self.get_mut(*parent)
|
||||
.unwrap()
|
||||
.as_mut_entries()
|
||||
.unwrap()
|
||||
.push(inode);
|
||||
}
|
||||
self.get_mut(inode).unwrap().parents = parents;
|
||||
}
|
||||
|
@ -98,22 +114,31 @@ impl Graph {
|
|||
pub fn get(&self, inode: Inode) -> Option<&GraphEntry> {
|
||||
self.items.get(&inode)
|
||||
}
|
||||
|
||||
|
||||
fn get_mut(&mut self, inode: Inode) -> Option<&mut GraphEntry> {
|
||||
self.items.get_mut(&inode)
|
||||
}
|
||||
|
||||
|
||||
pub fn read_dir(&self, inode: Inode) -> Option<Vec<(Inode, &GraphEntry)>> {
|
||||
self.items.get(&inode)
|
||||
self.items
|
||||
.get(&inode)
|
||||
.and_then(|entry| match &entry.item {
|
||||
GraphItem::Directory(dir) => Some(&dir.entries),
|
||||
_ => None,
|
||||
})
|
||||
.map(|inodes| inodes.iter().map(|i| (*i, self.items.get(i).unwrap())).collect())
|
||||
.map(|inodes| {
|
||||
inodes
|
||||
.iter()
|
||||
.map(|i| (*i, self.items.get(i).unwrap()))
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
pub fn read_all(&self) -> Vec<(Inode, &GraphEntry)> {
|
||||
self.items.iter().map(|(ino, entry)| (*ino, entry)).collect()
|
||||
self.items
|
||||
.iter()
|
||||
.map(|(ino, entry)| (*ino, entry))
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -134,21 +159,21 @@ impl GraphItem {
|
|||
entries: Vec::new(),
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
pub fn get_name(&self, in_dir: DirKind) -> String {
|
||||
match self {
|
||||
GraphItem::Directory(dir) => dir.name.to_string(),
|
||||
GraphItem::Text { name, .. } => name.to_string(),
|
||||
GraphItem::Directory(dir) => dir.name.to_string(),
|
||||
GraphItem::Text { name, .. } => name.to_string(),
|
||||
GraphItem::Event(event) => {
|
||||
let EventContent::File(file) = &event.content else {
|
||||
panic!();
|
||||
};
|
||||
|
||||
|
||||
match in_dir {
|
||||
DirKind::Default => event.id.to_string(),
|
||||
DirKind::ShowNames => file.name.as_deref().unwrap_or("unnamed").to_string(),
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
347
cli/src/main.rs
347
cli/src/main.rs
|
@ -3,17 +3,28 @@
|
|||
// TODO: proper error handling
|
||||
|
||||
mod cli;
|
||||
mod net;
|
||||
mod fuse;
|
||||
mod net;
|
||||
|
||||
use std::{collections::{HashMap, HashSet}, io::{Write, IsTerminal}, fmt::Display, time::Duration, hash::Hash};
|
||||
use bytes::Bytes;
|
||||
use cli::{Command, Action, ShareAction, FileAction, EventAction};
|
||||
use clap::Parser;
|
||||
use serde::{Serialize, Deserialize};
|
||||
use net::Item;
|
||||
use ufh::{query::Query, actor::{ActorSecret, ActorId}, event::{WipEvent, EventContent, TagEvent, RedactEvent, RelInfo, FileEvent}, item::{ItemRef, HashType}};
|
||||
use cli::{Action, Command, EventAction, FileAction, ShareAction};
|
||||
use colored::Colorize;
|
||||
use net::Item;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
fmt::Display,
|
||||
hash::Hash,
|
||||
io::{IsTerminal, Write},
|
||||
time::Duration,
|
||||
};
|
||||
use ufh::{
|
||||
actor::{ActorId, ActorSecret},
|
||||
event::{EventContent, FileEvent, RedactEvent, RelInfo, TagEvent, WipEvent},
|
||||
item::{HashType, ItemRef},
|
||||
query::Query,
|
||||
};
|
||||
|
||||
use crate::net::PutItem;
|
||||
|
||||
|
@ -53,28 +64,35 @@ impl Display for DynError {
|
|||
impl std::error::Error for StaticError {}
|
||||
impl std::error::Error for DynError {}
|
||||
|
||||
fn get_or_init_config(file: Option<&std::path::Path>, profile: Option<&str>) -> Result<ConfigProfile, Error> {
|
||||
let path = file
|
||||
.map(|f| f.to_path_buf())
|
||||
.unwrap_or_else(|| dirs::config_dir()
|
||||
fn get_or_init_config(
|
||||
file: Option<&std::path::Path>,
|
||||
profile: Option<&str>,
|
||||
) -> Result<ConfigProfile, Error> {
|
||||
let path = file.map(|f| f.to_path_buf()).unwrap_or_else(|| {
|
||||
dirs::config_dir()
|
||||
.expect("should have config dir")
|
||||
.join("ufh/config.json"));
|
||||
.join("ufh/config.json")
|
||||
});
|
||||
let mut config: Config = if std::fs::try_exists(&path)? {
|
||||
serde_json::from_slice(&std::fs::read(&path)?)?
|
||||
} else {
|
||||
let (_, key) = ActorId::new();
|
||||
let config = Config {
|
||||
profiles: HashMap::from([("default".to_owned(), ConfigProfile {
|
||||
base_url: "http://localhost:3210/".to_owned(),
|
||||
key,
|
||||
token: String::from("changeme"),
|
||||
})]),
|
||||
profiles: HashMap::from([(
|
||||
"default".to_owned(),
|
||||
ConfigProfile {
|
||||
base_url: "http://localhost:3210/".to_owned(),
|
||||
key,
|
||||
token: String::from("changeme"),
|
||||
},
|
||||
)]),
|
||||
default: "default".to_owned(),
|
||||
};
|
||||
std::fs::write(&path, serde_json::to_string(&config)?)?;
|
||||
Err(StaticError("please set your token in config"))?
|
||||
};
|
||||
let profile = config.profiles
|
||||
let profile = config
|
||||
.profiles
|
||||
.remove(profile.unwrap_or(&config.default))
|
||||
.ok_or(StaticError("couldnt find that profile"))?;
|
||||
Ok(profile)
|
||||
|
@ -86,7 +104,9 @@ async fn main() -> Result<(), Error> {
|
|||
let config = get_or_init_config(args.config.as_deref(), args.profile.as_deref())?;
|
||||
let client = net::Client::new(config.base_url, config.token);
|
||||
|
||||
let make_wip_event = |content: EventContent, relations: Option<HashMap<ItemRef, RelInfo>>| -> Result<WipEvent, Error> {
|
||||
let make_wip_event = |content: EventContent,
|
||||
relations: Option<HashMap<ItemRef, RelInfo>>|
|
||||
-> Result<WipEvent, Error> {
|
||||
let mut wip = WipEvent::new(content, &config.key.get_id());
|
||||
wip.relations = relations;
|
||||
wip.signature = Some(config.key.sign(wip.to_json().as_bytes()));
|
||||
|
@ -94,9 +114,17 @@ async fn main() -> Result<(), Error> {
|
|||
};
|
||||
|
||||
let make_relations = |refs: Vec<ItemRef>, rel_type: &str| {
|
||||
Some(HashMap::from_iter(refs.into_iter().map(|i| (i, RelInfo { rel_type: rel_type.to_string(), key: None }))))
|
||||
Some(HashMap::from_iter(refs.into_iter().map(|i| {
|
||||
(
|
||||
i,
|
||||
RelInfo {
|
||||
rel_type: rel_type.to_string(),
|
||||
key: None,
|
||||
},
|
||||
)
|
||||
})))
|
||||
};
|
||||
|
||||
|
||||
match args.action {
|
||||
Action::File { action } => match action {
|
||||
FileAction::Info { item_ref } => {
|
||||
|
@ -105,17 +133,21 @@ async fn main() -> Result<(), Error> {
|
|||
Item::Event(event) => match &event.content {
|
||||
EventContent::File(FileEvent { .. }) => {
|
||||
println!("{}", serde_json::to_string(&event)?);
|
||||
},
|
||||
}
|
||||
_ => println!("not a file (is event of {})", event.content.get_type()),
|
||||
},
|
||||
};
|
||||
},
|
||||
}
|
||||
FileAction::List { tags, long, stream } => {
|
||||
let query = Query {
|
||||
refs: None,
|
||||
senders: Some(HashSet::from([config.key.get_id()])),
|
||||
types: Some(HashSet::from(["x.file".into()])),
|
||||
tags: if tags.is_empty() { None } else { Some(into_hashset(tags)) },
|
||||
tags: if tags.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(into_hashset(tags))
|
||||
},
|
||||
relations: HashSet::new(),
|
||||
with_redacts: false,
|
||||
};
|
||||
|
@ -123,7 +155,7 @@ async fn main() -> Result<(), Error> {
|
|||
let timeout = if stream { Some(30000) } else { None };
|
||||
let mut after = None;
|
||||
if long {
|
||||
println!("{:63} {:24} {:20} {}", "ref", "type", "date", "sender");
|
||||
println!("{:63} {:24} {:20} {}", "ref", "type", "date", "sender");
|
||||
}
|
||||
loop {
|
||||
let items = client.list(&query, None, after, timeout).await?;
|
||||
|
@ -132,26 +164,39 @@ async fn main() -> Result<(), Error> {
|
|||
continue;
|
||||
};
|
||||
if long {
|
||||
let time = std::time::SystemTime::UNIX_EPOCH + Duration::from_millis(event.origin_ts);
|
||||
println!("{} {:24} {} {}",
|
||||
let time = std::time::SystemTime::UNIX_EPOCH
|
||||
+ Duration::from_millis(event.origin_ts);
|
||||
println!(
|
||||
"{} {:24} {} {}",
|
||||
&event.id,
|
||||
file.name.as_deref().map(|name| name.blue()).unwrap_or("unnamed".dimmed()),
|
||||
humantime::format_rfc3339_seconds(time).to_string().magenta(),
|
||||
file.name
|
||||
.as_deref()
|
||||
.map(|name| name.blue())
|
||||
.unwrap_or("unnamed".dimmed()),
|
||||
humantime::format_rfc3339_seconds(time)
|
||||
.to_string()
|
||||
.magenta(),
|
||||
event.sender.to_string().green(),
|
||||
);
|
||||
} else {
|
||||
println!("{}", &event.id);
|
||||
}
|
||||
};
|
||||
}
|
||||
if items.next.is_none() {
|
||||
break;
|
||||
}
|
||||
after = items.next;
|
||||
}
|
||||
},
|
||||
FileAction::Put { tags, files, name, wait, direct } => {
|
||||
}
|
||||
FileAction::Put {
|
||||
tags,
|
||||
files,
|
||||
name,
|
||||
wait,
|
||||
direct,
|
||||
} => {
|
||||
let mut item_refs = Vec::new();
|
||||
|
||||
|
||||
for file in files {
|
||||
let buffer = std::fs::read(&file)?;
|
||||
let refs = if direct {
|
||||
|
@ -181,7 +226,7 @@ async fn main() -> Result<(), Error> {
|
|||
}
|
||||
let ref_set: Vec<_> = chunks.iter().map(|i| i.0.clone()).collect();
|
||||
let have = client.check(&ref_set).await?;
|
||||
|
||||
|
||||
for (item_ref, chunk) in chunks {
|
||||
if have.contains(&item_ref) {
|
||||
println!("reuse {item_ref} ({} bytes)", chunk.len());
|
||||
|
@ -196,28 +241,29 @@ async fn main() -> Result<(), Error> {
|
|||
}
|
||||
refs
|
||||
};
|
||||
|
||||
|
||||
let content = EventContent::File(FileEvent {
|
||||
chunks: refs,
|
||||
name: name.as_deref()
|
||||
name: name
|
||||
.as_deref()
|
||||
.or(file.file_name().and_then(|i| i.to_str()))
|
||||
.map(ToOwned::to_owned),
|
||||
});
|
||||
|
||||
|
||||
let wip = make_wip_event(content, None)?;
|
||||
let item_ref = client.put(PutItem::WipEvent(&wip), wait).await?;
|
||||
println!("fully uploaded to: {item_ref}");
|
||||
|
||||
|
||||
item_refs.push(item_ref);
|
||||
}
|
||||
|
||||
|
||||
if !tags.is_empty() {
|
||||
let content = EventContent::LocalTag(TagEvent { tags });
|
||||
let wip = make_wip_event(content, make_relations(item_refs, "tag"))?;
|
||||
let tag_ref = client.put(PutItem::WipEvent(&wip), true).await?;
|
||||
println!("tagged with: {tag_ref}");
|
||||
}
|
||||
},
|
||||
}
|
||||
FileAction::Cat { refs, force } => {
|
||||
let stdout = std::io::stdout().lock();
|
||||
let is_term = stdout.is_terminal();
|
||||
|
@ -233,29 +279,41 @@ async fn main() -> Result<(), Error> {
|
|||
return Err(StaticError("not a blob"))?;
|
||||
};
|
||||
if is_term && !force && std::str::from_utf8(&blob).is_err() {
|
||||
return Err(StaticError("refusing to output binary data to tty, use `-f` to force"))?;
|
||||
return Err(StaticError(
|
||||
"refusing to output binary data to tty, use `-f` to force",
|
||||
))?;
|
||||
}
|
||||
std::io::stdout().write_all(&blob)?;
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
FileAction::Tag { tags, refs } => {
|
||||
let content = EventContent::LocalTag(TagEvent { tags });
|
||||
let wip = make_wip_event(content, make_relations(refs, "tag"))?;
|
||||
let tag_ref = client.put(PutItem::WipEvent(&wip), true).await?;
|
||||
println!("tagged with: {tag_ref}");
|
||||
},
|
||||
}
|
||||
FileAction::Redact { refs } => {
|
||||
let content = EventContent::Redact(RedactEvent {});
|
||||
let wip = make_wip_event(content, make_relations(refs, "redact"))?;
|
||||
let redact_ref = client.put(PutItem::WipEvent(&wip), true).await?;
|
||||
println!("redacted with: {redact_ref}");
|
||||
},
|
||||
FileAction::Search { query, long, snippets } => {
|
||||
}
|
||||
FileAction::Search {
|
||||
query,
|
||||
long,
|
||||
snippets,
|
||||
} => {
|
||||
let search = client.search(&query).await?;
|
||||
if long {
|
||||
if snippets {
|
||||
println!(" {:63} {:24} {:20} {:44}", "ref".bold(), "name".bold(), "date".bold(), "sender".bold());
|
||||
println!(
|
||||
" {:63} {:24} {:20} {:44}",
|
||||
"ref".bold(),
|
||||
"name".bold(),
|
||||
"date".bold(),
|
||||
"sender".bold()
|
||||
);
|
||||
} else {
|
||||
println!("{:63} {:24} {:20} {:44}", "ref", "name", "date", "sender");
|
||||
}
|
||||
|
@ -265,14 +323,21 @@ async fn main() -> Result<(), Error> {
|
|||
let EventContent::File(file) = &event.content else {
|
||||
continue;
|
||||
};
|
||||
let time = std::time::SystemTime::UNIX_EPOCH + Duration::from_millis(event.origin_ts);
|
||||
let time =
|
||||
std::time::SystemTime::UNIX_EPOCH + Duration::from_millis(event.origin_ts);
|
||||
match (long, snippets) {
|
||||
(true, true) => {
|
||||
print!("{} {} {:24} {} {}",
|
||||
print!(
|
||||
"{} {} {:24} {} {}",
|
||||
"┏".dimmed(),
|
||||
&event.id,
|
||||
file.name.as_deref().map(|name| name.blue()).unwrap_or("unnamed".dimmed()),
|
||||
humantime::format_rfc3339_seconds(time).to_string().magenta(),
|
||||
file.name
|
||||
.as_deref()
|
||||
.map(|name| name.blue())
|
||||
.unwrap_or("unnamed".dimmed()),
|
||||
humantime::format_rfc3339_seconds(time)
|
||||
.to_string()
|
||||
.magenta(),
|
||||
event.sender.to_string().green(),
|
||||
);
|
||||
if let Some(snippet) = result.snippet {
|
||||
|
@ -291,19 +356,26 @@ async fn main() -> Result<(), Error> {
|
|||
print!("\n{} ", "┃ no snippet".dimmed());
|
||||
}
|
||||
println!("\n{}", "┗━━━━━━━━━━━━━━━━".dimmed());
|
||||
},
|
||||
}
|
||||
(true, false) => {
|
||||
println!("{} {:24} {} {}",
|
||||
println!(
|
||||
"{} {:24} {} {}",
|
||||
&event.id,
|
||||
file.name.as_deref().map(|name| name.blue()).unwrap_or("unnamed".dimmed()),
|
||||
humantime::format_rfc3339_seconds(time).to_string().magenta(),
|
||||
file.name
|
||||
.as_deref()
|
||||
.map(|name| name.blue())
|
||||
.unwrap_or("unnamed".dimmed()),
|
||||
humantime::format_rfc3339_seconds(time)
|
||||
.to_string()
|
||||
.magenta(),
|
||||
event.sender.to_string().green(),
|
||||
);
|
||||
},
|
||||
}
|
||||
(false, true) => {
|
||||
print!("{}: ", event.id.to_string());
|
||||
if let Some(snippet) = result.snippet {
|
||||
let line = snippet.get_lines()
|
||||
let line = snippet
|
||||
.get_lines()
|
||||
.into_iter()
|
||||
.find(|line| line.iter().any(|(_, bold)| *bold));
|
||||
if let Some(line) = line {
|
||||
|
@ -321,21 +393,28 @@ async fn main() -> Result<(), Error> {
|
|||
} else {
|
||||
println!("{}", "no snippet".dimmed());
|
||||
}
|
||||
},
|
||||
}
|
||||
(false, false) => println!("{}", &event.id),
|
||||
};
|
||||
}
|
||||
},
|
||||
}
|
||||
FileAction::Mount { path, daemon } => {
|
||||
println!("{}: fuse support is {} experimental!", "WARNING".yellow(), "very".italic());
|
||||
println!(
|
||||
"{}: fuse support is {} experimental!",
|
||||
"WARNING".yellow(),
|
||||
"very".italic()
|
||||
);
|
||||
let handle = tokio::runtime::Handle::current();
|
||||
let fs = fuse::Filesystem::new(handle, client, config.key.clone());
|
||||
if daemon {
|
||||
todo!("not implemented yet");
|
||||
} else {
|
||||
let handle = std::thread::spawn(move || fs.spawn_mount(&path)).join().unwrap().unwrap();
|
||||
let handle = std::thread::spawn(move || fs.spawn_mount(&path))
|
||||
.join()
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
let (send, recv) = std::sync::mpsc::channel();
|
||||
|
||||
|
||||
ctrlc::set_handler(move || {
|
||||
println!("goodbye!");
|
||||
send.send(()).unwrap();
|
||||
|
@ -345,7 +424,7 @@ async fn main() -> Result<(), Error> {
|
|||
let () = recv.recv().unwrap();
|
||||
drop(handle);
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
Action::Event { action } => match action {
|
||||
EventAction::Info { rels, item_ref } => {
|
||||
|
@ -357,7 +436,10 @@ async fn main() -> Result<(), Error> {
|
|||
} else {
|
||||
let relations: Option<HashSet<_, _>> = rels
|
||||
.into_iter()
|
||||
.map(|i| i.split_once('/').map(|i| (i.0.to_string(), i.1.to_string())))
|
||||
.map(|i| {
|
||||
i.split_once('/')
|
||||
.map(|i| (i.0.to_string(), i.1.to_string()))
|
||||
})
|
||||
.collect();
|
||||
let query = Query {
|
||||
refs: Some(HashSet::from([item_ref])),
|
||||
|
@ -376,13 +458,26 @@ async fn main() -> Result<(), Error> {
|
|||
println!("{}", serde_json::to_string(&event)?);
|
||||
}
|
||||
}
|
||||
},
|
||||
EventAction::List { types, tags, long, stream } => {
|
||||
}
|
||||
EventAction::List {
|
||||
types,
|
||||
tags,
|
||||
long,
|
||||
stream,
|
||||
} => {
|
||||
let query = Query {
|
||||
refs: None,
|
||||
senders: Some(HashSet::from([config.key.get_id()])),
|
||||
types: if types.is_empty() { None } else { Some(into_hashset(types)) },
|
||||
tags: if tags.is_empty() { None } else { Some(into_hashset(tags)) },
|
||||
types: if types.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(into_hashset(types))
|
||||
},
|
||||
tags: if tags.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(into_hashset(tags))
|
||||
},
|
||||
relations: HashSet::new(),
|
||||
with_redacts: false,
|
||||
};
|
||||
|
@ -390,94 +485,118 @@ async fn main() -> Result<(), Error> {
|
|||
let timeout = if stream { Some(30000) } else { None };
|
||||
let mut after = None;
|
||||
if long {
|
||||
println!("{:63} {:12} {:20} {}", "ref", "type", "date", "sender");
|
||||
println!("{:63} {:12} {:20} {}", "ref", "type", "date", "sender");
|
||||
}
|
||||
loop {
|
||||
let items = client.list(&query, None, after, timeout).await?;
|
||||
for event in items.events {
|
||||
if long {
|
||||
let time = std::time::SystemTime::UNIX_EPOCH + Duration::from_millis(event.origin_ts);
|
||||
println!("{} {:12} {} {}",
|
||||
let time = std::time::SystemTime::UNIX_EPOCH
|
||||
+ Duration::from_millis(event.origin_ts);
|
||||
println!(
|
||||
"{} {:12} {} {}",
|
||||
&event.id,
|
||||
event.content.get_type().blue(),
|
||||
humantime::format_rfc3339_seconds(time).to_string().magenta(),
|
||||
humantime::format_rfc3339_seconds(time)
|
||||
.to_string()
|
||||
.magenta(),
|
||||
event.sender.to_string().green(),
|
||||
);
|
||||
} else {
|
||||
println!("{}", &event.id);
|
||||
}
|
||||
};
|
||||
}
|
||||
if items.next.is_none() {
|
||||
break;
|
||||
}
|
||||
after = items.next;
|
||||
}
|
||||
},
|
||||
EventAction::Create { rels, event_type, content, wait } => {
|
||||
}
|
||||
EventAction::Create {
|
||||
rels,
|
||||
event_type,
|
||||
content,
|
||||
wait,
|
||||
} => {
|
||||
let content = match event_type.as_str() {
|
||||
"x.file" => EventContent::File(serde_json::from_str(&content)?),
|
||||
"x.tag" => EventContent::Tag(serde_json::from_str(&content)?),
|
||||
"x.tag.local" => EventContent::LocalTag(serde_json::from_str(&content)?),
|
||||
"x.annotate" => EventContent::Annotate(serde_json::from_str(&content)?),
|
||||
"x.annotate.local" => EventContent::LocalAnnotate(serde_json::from_str(&content)?),
|
||||
"x.user" => EventContent::User(serde_json::from_str(&content)?),
|
||||
"x.acl" => EventContent::Acl(serde_json::from_str(&content)?),
|
||||
"x.redact" => EventContent::Redact(serde_json::from_str(&content)?),
|
||||
_ => EventContent::Other { event_type, content: serde_json::from_str(&content)? },
|
||||
"x.file" => EventContent::File(serde_json::from_str(&content)?),
|
||||
"x.tag" => EventContent::Tag(serde_json::from_str(&content)?),
|
||||
"x.tag.local" => EventContent::LocalTag(serde_json::from_str(&content)?),
|
||||
"x.annotate" => EventContent::Annotate(serde_json::from_str(&content)?),
|
||||
"x.annotate.local" => {
|
||||
EventContent::LocalAnnotate(serde_json::from_str(&content)?)
|
||||
}
|
||||
"x.user" => EventContent::User(serde_json::from_str(&content)?),
|
||||
"x.acl" => EventContent::Acl(serde_json::from_str(&content)?),
|
||||
"x.redact" => EventContent::Redact(serde_json::from_str(&content)?),
|
||||
_ => EventContent::Other {
|
||||
event_type,
|
||||
content: serde_json::from_str(&content)?,
|
||||
},
|
||||
};
|
||||
|
||||
let relations: Option<HashMap<ItemRef, RelInfo>> = rels
|
||||
.into_iter()
|
||||
.map(|i| i
|
||||
.split_once('/')
|
||||
.map(|i| (i.0.parse().expect("invalid item ref"), RelInfo { rel_type: i.1.to_string(), key: None })))
|
||||
.map(|i| {
|
||||
i.split_once('/').map(|i| {
|
||||
(
|
||||
i.0.parse().expect("invalid item ref"),
|
||||
RelInfo {
|
||||
rel_type: i.1.to_string(),
|
||||
key: None,
|
||||
},
|
||||
)
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
let wip = make_wip_event(content, relations)?;
|
||||
let item_ref = client.put(PutItem::WipEvent(&wip), wait).await?;
|
||||
println!("fully uploaded to: {item_ref}");
|
||||
},
|
||||
}
|
||||
EventAction::Tag { tags, refs } => {
|
||||
let content = EventContent::LocalTag(TagEvent { tags });
|
||||
let wip = make_wip_event(content, make_relations(refs, "tag"))?;
|
||||
let tag_ref = client.put(PutItem::WipEvent(&wip), true).await?;
|
||||
println!("tagged with: {tag_ref}");
|
||||
},
|
||||
}
|
||||
EventAction::Redact { refs } => {
|
||||
let content = EventContent::Redact(RedactEvent {});
|
||||
let wip = make_wip_event(content, make_relations(refs, "redact"))?;
|
||||
let redact_ref = client.put(PutItem::WipEvent(&wip), true).await?;
|
||||
println!("redacted with: {redact_ref}");
|
||||
},
|
||||
},
|
||||
Action::Share { action } => {
|
||||
match action {
|
||||
ShareAction::Create { name, item_ref, expires } => {
|
||||
let now = std::time::SystemTime::now().duration_since(std::time::UNIX_EPOCH)?;
|
||||
let time = expires.map(|i| Into::<Duration>::into(i) + now);
|
||||
let share = client.share_create(&item_ref, name.as_deref(), time.as_ref()).await?;
|
||||
println!("shared to: {share}");
|
||||
},
|
||||
ShareAction::Info { share } => {
|
||||
let info = client.share_get(&share).await?;
|
||||
println!("{}", serde_json::to_string(&info)?);
|
||||
},
|
||||
ShareAction::Remove { share } => {
|
||||
client.share_delete(&share).await?;
|
||||
println!("removed share");
|
||||
},
|
||||
}
|
||||
},
|
||||
Action::Key { action } => {
|
||||
match action {
|
||||
cli::KeyAction::ExportForWeb => {
|
||||
let secret = config.key.extract_secret();
|
||||
let secret_bytes = Bytes::copy_from_slice(&secret);
|
||||
println!("id: {}", config.key.get_id());
|
||||
println!("secret: {:x}", secret_bytes);
|
||||
},
|
||||
Action::Share { action } => match action {
|
||||
ShareAction::Create {
|
||||
name,
|
||||
item_ref,
|
||||
expires,
|
||||
} => {
|
||||
let now = std::time::SystemTime::now().duration_since(std::time::UNIX_EPOCH)?;
|
||||
let time = expires.map(|i| Into::<Duration>::into(i) + now);
|
||||
let share = client
|
||||
.share_create(&item_ref, name.as_deref(), time.as_ref())
|
||||
.await?;
|
||||
println!("shared to: {share}");
|
||||
}
|
||||
}
|
||||
ShareAction::Info { share } => {
|
||||
let info = client.share_get(&share).await?;
|
||||
println!("{}", serde_json::to_string(&info)?);
|
||||
}
|
||||
ShareAction::Remove { share } => {
|
||||
client.share_delete(&share).await?;
|
||||
println!("removed share");
|
||||
}
|
||||
},
|
||||
Action::Key { action } => match action {
|
||||
cli::KeyAction::ExportForWeb => {
|
||||
let secret = config.key.extract_secret();
|
||||
let secret_bytes = Bytes::copy_from_slice(&secret);
|
||||
println!("id: {}", config.key.get_id());
|
||||
println!("secret: {:x}", secret_bytes);
|
||||
}
|
||||
},
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
|
|
104
cli/src/net.rs
104
cli/src/net.rs
|
@ -1,11 +1,15 @@
|
|||
use reqwest::{Client as HttpClient, StatusCode};
|
||||
use std::{collections::{HashMap, HashSet}, time::Duration, ops::Range};
|
||||
use ufh::{item::ItemRef, event::WipEvent};
|
||||
use crate::{DynError, Error, StaticError};
|
||||
use bytes::Bytes;
|
||||
use ufh::event::Event;
|
||||
use serde::{Serialize, Deserialize};
|
||||
use crate::{Error, StaticError, DynError};
|
||||
use reqwest::{Client as HttpClient, StatusCode};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
ops::Range,
|
||||
time::Duration,
|
||||
};
|
||||
use ufh::event::Event;
|
||||
use ufh::{event::WipEvent, item::ItemRef};
|
||||
|
||||
// TODO: more helpful errors
|
||||
// TODO: urlencode
|
||||
|
@ -91,7 +95,7 @@ impl Client {
|
|||
token,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
pub async fn put(&self, item: PutItem<'_>, wait: bool) -> Result<ItemRef, Error> {
|
||||
let buffer = match item {
|
||||
PutItem::WipEvent(event) => Bytes::from(json_canon::to_string(event)?),
|
||||
|
@ -101,7 +105,8 @@ impl Client {
|
|||
PutItem::WipEvent(_) => "application/json",
|
||||
PutItem::Blob(_) => "application/octet-stream",
|
||||
};
|
||||
let res = self.http
|
||||
let res = self
|
||||
.http
|
||||
.post(format!("{}things?wait={}", self.base_url, wait))
|
||||
.header("content-length", buffer.len())
|
||||
.header("content-type", content_type)
|
||||
|
@ -112,26 +117,21 @@ impl Client {
|
|||
if !res.status().is_success() {
|
||||
return Err(DynError(res.text().await?))?;
|
||||
}
|
||||
let json: UploadResponse = res
|
||||
.json()
|
||||
.await?;
|
||||
let json: UploadResponse = res.json().await?;
|
||||
Ok(json.item_ref)
|
||||
}
|
||||
|
||||
pub async fn get(&self, item: &ItemRef) -> Result<Item, Error> {
|
||||
let req = self.http
|
||||
let req = self
|
||||
.http
|
||||
.get(format!("{}things/{}", self.base_url, item))
|
||||
.header("authorization", format!("Bearer {}", self.token))
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?;
|
||||
match req.headers().get("content-type").map(|i| i.to_str()) {
|
||||
Some(Ok("application/json")) => {
|
||||
Ok(Item::Event(req.json().await?))
|
||||
},
|
||||
Some(Ok("application/octet-stream")) => {
|
||||
Ok(Item::Blob(req.bytes().await?))
|
||||
},
|
||||
Some(Ok("application/json")) => Ok(Item::Event(req.json().await?)),
|
||||
Some(Ok("application/octet-stream")) => Ok(Item::Blob(req.bytes().await?)),
|
||||
Some(Ok(_)) => unreachable!(),
|
||||
Some(Err(err)) => Err(Box::new(err)),
|
||||
_ => Err(StaticError("something went wrong"))?,
|
||||
|
@ -139,7 +139,8 @@ impl Client {
|
|||
}
|
||||
|
||||
pub async fn query(&self, query: &ufh::query::Query) -> Result<String, Error> {
|
||||
let res = self.http
|
||||
let res = self
|
||||
.http
|
||||
.post(format!("{}things/query", self.base_url))
|
||||
.header("content-type", "application/json")
|
||||
.header("authorization", format!("Bearer {}", self.token))
|
||||
|
@ -149,18 +150,29 @@ impl Client {
|
|||
if res.status() != StatusCode::CREATED {
|
||||
return Err(DynError(res.text().await?))?;
|
||||
}
|
||||
let res: QueryResponse = res
|
||||
.json()
|
||||
.await?;
|
||||
let res: QueryResponse = res.json().await?;
|
||||
Ok(res.query)
|
||||
}
|
||||
|
||||
pub async fn list(&self, query: &str, limit: Option<u32>, after: Option<String>, timeout: Option<u32>) -> Result<ListResponse, Error> {
|
||||
pub async fn list(
|
||||
&self,
|
||||
query: &str,
|
||||
limit: Option<u32>,
|
||||
after: Option<String>,
|
||||
timeout: Option<u32>,
|
||||
) -> Result<ListResponse, Error> {
|
||||
let mut params = Vec::from([("query", query.to_string())]);
|
||||
if let Some(limit) = limit { params.push(("limit", limit.to_string())) };
|
||||
if let Some(after) = after { params.push(("after", after)) };
|
||||
if let Some(timeout) = timeout{ params.push(("timeout", timeout.to_string())) };
|
||||
let res = self.http
|
||||
if let Some(limit) = limit {
|
||||
params.push(("limit", limit.to_string()))
|
||||
};
|
||||
if let Some(after) = after {
|
||||
params.push(("after", after))
|
||||
};
|
||||
if let Some(timeout) = timeout {
|
||||
params.push(("timeout", timeout.to_string()))
|
||||
};
|
||||
let res = self
|
||||
.http
|
||||
.get(format!("{}things", self.base_url))
|
||||
.header("authorization", format!("Bearer {}", self.token))
|
||||
.query(¶ms)
|
||||
|
@ -172,13 +184,19 @@ impl Client {
|
|||
Ok(res.json().await?)
|
||||
}
|
||||
|
||||
pub async fn share_create(&self, item_ref: &ItemRef, name: Option<&str>, expires_at: Option<&Duration>) -> Result<Share, Error> {
|
||||
pub async fn share_create(
|
||||
&self,
|
||||
item_ref: &ItemRef,
|
||||
name: Option<&str>,
|
||||
expires_at: Option<&Duration>,
|
||||
) -> Result<Share, Error> {
|
||||
let value = json!({
|
||||
"ref": item_ref,
|
||||
"share_id": name,
|
||||
"expires_at": expires_at.map(|i| i.as_millis()),
|
||||
});
|
||||
let res: ShareCreate = self.http
|
||||
let res: ShareCreate = self
|
||||
.http
|
||||
.post(format!("{}shares", self.base_url))
|
||||
.header("content-type", "application/json")
|
||||
.header("authorization", format!("Bearer {}", self.token))
|
||||
|
@ -190,9 +208,10 @@ impl Client {
|
|||
.await?;
|
||||
Ok(res.share)
|
||||
}
|
||||
|
||||
|
||||
pub async fn share_get(&self, share: &Share) -> Result<ShareInfo, Error> {
|
||||
let res: ShareInfo = self.http
|
||||
let res: ShareInfo = self
|
||||
.http
|
||||
.get(format!("{}shares/{}", self.base_url, share))
|
||||
.header("authorization", format!("Bearer {}", self.token))
|
||||
.send()
|
||||
|
@ -202,7 +221,7 @@ impl Client {
|
|||
.await?;
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
|
||||
pub async fn share_delete(&self, share: &Share) -> Result<(), Error> {
|
||||
self.http
|
||||
.delete(format!("{}shares/{}", self.base_url, share))
|
||||
|
@ -212,10 +231,12 @@ impl Client {
|
|||
.error_for_status()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
pub async fn search(&self, query: &str) -> Result<SearchResult, Error> {
|
||||
let url = reqwest::Url::parse_with_params(&format!("{}search", self.base_url), [("q", query)])?;
|
||||
let res: SearchResult = self.http
|
||||
let url =
|
||||
reqwest::Url::parse_with_params(&format!("{}search", self.base_url), [("q", query)])?;
|
||||
let res: SearchResult = self
|
||||
.http
|
||||
.get(url)
|
||||
.header("authorization", format!("Bearer {}", self.token))
|
||||
.send()
|
||||
|
@ -235,10 +256,13 @@ impl Client {
|
|||
struct Response {
|
||||
have: HashSet<ItemRef>,
|
||||
}
|
||||
let res: Response = self.http
|
||||
let res: Response = self
|
||||
.http
|
||||
.post(format!("{}things/check", self.base_url))
|
||||
.header("authorization", format!("Bearer {}", self.token))
|
||||
.json(&Request { refs: refs.to_vec() })
|
||||
.json(&Request {
|
||||
refs: refs.to_vec(),
|
||||
})
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?
|
||||
|
@ -249,7 +273,7 @@ impl Client {
|
|||
}
|
||||
|
||||
impl Snippet {
|
||||
pub fn get_lines(&self) -> Vec<Vec<(String, bool)>>{
|
||||
pub fn get_lines(&self) -> Vec<Vec<(String, bool)>> {
|
||||
let ranges = self.get_ranges();
|
||||
let mut start_at = 0;
|
||||
let mut chunks = Vec::new();
|
||||
|
@ -271,7 +295,7 @@ impl Snippet {
|
|||
chunks.push(current_chunk);
|
||||
chunks
|
||||
}
|
||||
|
||||
|
||||
// modified from https://github.com/quickwit-oss/tantivy/blob/820f126075897091bcd48ae78aebfc0e0ee87326/src/snippet/mod.rs#L203
|
||||
/// Returns ranges that are collapsed into non-overlapped ranges.
|
||||
///
|
||||
|
@ -303,5 +327,5 @@ impl Snippet {
|
|||
|
||||
result.push(current);
|
||||
result
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
215
lib/src/acl.rs
215
lib/src/acl.rs
|
@ -1,5 +1,5 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use serde::{Serialize, Deserialize};
|
||||
|
||||
use crate::{actor::ActorId, event::Event, item::ItemRef};
|
||||
|
||||
|
@ -24,7 +24,12 @@ type Permission = (String, String, String);
|
|||
type Permissions = HashSet<Permission>;
|
||||
|
||||
impl Acl {
|
||||
pub fn can_send(&self, user: &ActorId, event: &Event, targets: HashMap<ItemRef, &Event>) -> bool {
|
||||
pub fn can_send(
|
||||
&self,
|
||||
user: &ActorId,
|
||||
event: &Event,
|
||||
targets: HashMap<ItemRef, &Event>,
|
||||
) -> bool {
|
||||
if self.admins.contains(user) {
|
||||
return true;
|
||||
}
|
||||
|
@ -33,18 +38,13 @@ impl Acl {
|
|||
return false;
|
||||
};
|
||||
|
||||
let permissions: Option<Vec<&Permissions>> = roles.iter()
|
||||
.map(|r| self.roles.get(r))
|
||||
.collect();
|
||||
let permissions: Option<Vec<&Permissions>> =
|
||||
roles.iter().map(|r| self.roles.get(r)).collect();
|
||||
let Some(permissions) = permissions else {
|
||||
return false;
|
||||
};
|
||||
|
||||
let permissions: Permissions = permissions
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.cloned()
|
||||
.collect();
|
||||
|
||||
let permissions: Permissions = permissions.into_iter().flatten().cloned().collect();
|
||||
|
||||
for (from_event_type, rel_type, to_event_type) in permissions {
|
||||
if from_event_type != event.content.get_type() && from_event_type != "*" {
|
||||
|
@ -63,7 +63,7 @@ impl Acl {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
false
|
||||
}
|
||||
}
|
||||
|
@ -73,7 +73,7 @@ mod tests {
|
|||
use std::collections::HashMap;
|
||||
|
||||
use super::*;
|
||||
use crate::{item::HashType, event::EventContent, derived::Derived, actor::ActorSecret};
|
||||
use crate::{actor::ActorSecret, derived::Derived, event::EventContent, item::HashType};
|
||||
|
||||
// TODO: find a way to deduplicate this code a bit?
|
||||
|
||||
|
@ -94,20 +94,32 @@ mod tests {
|
|||
#[test]
|
||||
fn test_admin() {
|
||||
let (id, secret) = ActorId::new();
|
||||
let acl = Acl { admins: HashSet::from([id.clone()]), ..Default::default() };
|
||||
let event = create_event(&secret, EventContent::LocalTag(crate::event::TagEvent { tags: Vec::new() }));
|
||||
let acl = Acl {
|
||||
admins: HashSet::from([id.clone()]),
|
||||
..Default::default()
|
||||
};
|
||||
let event = create_event(
|
||||
&secret,
|
||||
EventContent::LocalTag(crate::event::TagEvent { tags: Vec::new() }),
|
||||
);
|
||||
assert!(acl.can_send(&id, &event, HashMap::new()));
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_not_admin() {
|
||||
let (id, secret) = ActorId::new();
|
||||
let (id2, _) = ActorId::new();
|
||||
let acl = Acl { admins: HashSet::from([id.clone()]), ..Default::default() };
|
||||
let event = create_event(&secret, EventContent::LocalTag(crate::event::TagEvent { tags: Vec::new() }));
|
||||
let acl = Acl {
|
||||
admins: HashSet::from([id.clone()]),
|
||||
..Default::default()
|
||||
};
|
||||
let event = create_event(
|
||||
&secret,
|
||||
EventContent::LocalTag(crate::event::TagEvent { tags: Vec::new() }),
|
||||
);
|
||||
assert!(!acl.can_send(&id2, &event, HashMap::new()));
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_type() {
|
||||
let (id, secret) = ActorId::new();
|
||||
|
@ -119,13 +131,28 @@ mod tests {
|
|||
..Default::default()
|
||||
};
|
||||
dbg!(&acl);
|
||||
let initial = create_event(&secret, EventContent::File(crate::event::FileEvent { chunks: Vec::new(), name: None }));
|
||||
let mut event = create_event(&secret, EventContent::LocalTag(crate::event::TagEvent { tags: Vec::new() }));
|
||||
event.relations.insert(initial.id.clone(), crate::event::RelInfo { rel_type: "tag".into(), key: None });
|
||||
let initial = create_event(
|
||||
&secret,
|
||||
EventContent::File(crate::event::FileEvent {
|
||||
chunks: Vec::new(),
|
||||
name: None,
|
||||
}),
|
||||
);
|
||||
let mut event = create_event(
|
||||
&secret,
|
||||
EventContent::LocalTag(crate::event::TagEvent { tags: Vec::new() }),
|
||||
);
|
||||
event.relations.insert(
|
||||
initial.id.clone(),
|
||||
crate::event::RelInfo {
|
||||
rel_type: "tag".into(),
|
||||
key: None,
|
||||
},
|
||||
);
|
||||
let relations = HashMap::from([(initial.id.clone(), &initial)]);
|
||||
assert!(acl.can_send(&id, &event, relations));
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_wrong_from() {
|
||||
let (id, secret) = ActorId::new();
|
||||
|
@ -136,30 +163,64 @@ mod tests {
|
|||
users: HashMap::from([(id.clone(), roles)]),
|
||||
..Default::default()
|
||||
};
|
||||
let initial = create_event(&secret, EventContent::File(crate::event::FileEvent { chunks: Vec::new(), name: None }));
|
||||
let mut event = create_event(&secret, EventContent::LocalTag(crate::event::TagEvent { tags: Vec::new() }));
|
||||
event.relations.insert(initial.id.clone(), crate::event::RelInfo { rel_type: "tag".into(), key: None });
|
||||
let initial = create_event(
|
||||
&secret,
|
||||
EventContent::File(crate::event::FileEvent {
|
||||
chunks: Vec::new(),
|
||||
name: None,
|
||||
}),
|
||||
);
|
||||
let mut event = create_event(
|
||||
&secret,
|
||||
EventContent::LocalTag(crate::event::TagEvent { tags: Vec::new() }),
|
||||
);
|
||||
event.relations.insert(
|
||||
initial.id.clone(),
|
||||
crate::event::RelInfo {
|
||||
rel_type: "tag".into(),
|
||||
key: None,
|
||||
},
|
||||
);
|
||||
let relations = HashMap::from([(initial.id.clone(), &initial)]);
|
||||
assert!(!acl.can_send(&id, &event, relations));
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_wrong_rel() {
|
||||
let (id, secret) = ActorId::new();
|
||||
let perms = HashSet::from([("x.tag.local".into(), "somethingelse".into(), "x.file".into())]);
|
||||
let perms = HashSet::from([(
|
||||
"x.tag.local".into(),
|
||||
"somethingelse".into(),
|
||||
"x.file".into(),
|
||||
)]);
|
||||
let roles = HashSet::from(["example".into()]);
|
||||
let acl = Acl {
|
||||
roles: HashMap::from([("example".into(), perms)]),
|
||||
users: HashMap::from([(id.clone(), roles)]),
|
||||
..Default::default()
|
||||
};
|
||||
let initial = create_event(&secret, EventContent::File(crate::event::FileEvent { chunks: Vec::new(), name: None }));
|
||||
let mut event = create_event(&secret, EventContent::LocalTag(crate::event::TagEvent { tags: Vec::new() }));
|
||||
event.relations.insert(initial.id.clone(), crate::event::RelInfo { rel_type: "tag".into(), key: None });
|
||||
let initial = create_event(
|
||||
&secret,
|
||||
EventContent::File(crate::event::FileEvent {
|
||||
chunks: Vec::new(),
|
||||
name: None,
|
||||
}),
|
||||
);
|
||||
let mut event = create_event(
|
||||
&secret,
|
||||
EventContent::LocalTag(crate::event::TagEvent { tags: Vec::new() }),
|
||||
);
|
||||
event.relations.insert(
|
||||
initial.id.clone(),
|
||||
crate::event::RelInfo {
|
||||
rel_type: "tag".into(),
|
||||
key: None,
|
||||
},
|
||||
);
|
||||
let relations = HashMap::from([(initial.id.clone(), &initial)]);
|
||||
assert!(!acl.can_send(&id, &event, relations));
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_wrong_to() {
|
||||
let (id, secret) = ActorId::new();
|
||||
|
@ -170,13 +231,28 @@ mod tests {
|
|||
users: HashMap::from([(id.clone(), roles)]),
|
||||
..Default::default()
|
||||
};
|
||||
let initial = create_event(&secret, EventContent::File(crate::event::FileEvent { chunks: Vec::new(), name: None }));
|
||||
let mut event = create_event(&secret, EventContent::LocalTag(crate::event::TagEvent { tags: Vec::new() }));
|
||||
event.relations.insert(initial.id.clone(), crate::event::RelInfo { rel_type: "tag".into(), key: None });
|
||||
let initial = create_event(
|
||||
&secret,
|
||||
EventContent::File(crate::event::FileEvent {
|
||||
chunks: Vec::new(),
|
||||
name: None,
|
||||
}),
|
||||
);
|
||||
let mut event = create_event(
|
||||
&secret,
|
||||
EventContent::LocalTag(crate::event::TagEvent { tags: Vec::new() }),
|
||||
);
|
||||
event.relations.insert(
|
||||
initial.id.clone(),
|
||||
crate::event::RelInfo {
|
||||
rel_type: "tag".into(),
|
||||
key: None,
|
||||
},
|
||||
);
|
||||
let relations = HashMap::from([(initial.id.clone(), &initial)]);
|
||||
assert!(!acl.can_send(&id, &event, relations));
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_wildcard_from() {
|
||||
let (id, secret) = ActorId::new();
|
||||
|
@ -187,13 +263,28 @@ mod tests {
|
|||
users: HashMap::from([(id.clone(), roles)]),
|
||||
..Default::default()
|
||||
};
|
||||
let initial = create_event(&secret, EventContent::File(crate::event::FileEvent { chunks: Vec::new(), name: None }));
|
||||
let mut event = create_event(&secret, EventContent::LocalTag(crate::event::TagEvent { tags: Vec::new() }));
|
||||
event.relations.insert(initial.id.clone(), crate::event::RelInfo { rel_type: "tag".into(), key: None });
|
||||
let initial = create_event(
|
||||
&secret,
|
||||
EventContent::File(crate::event::FileEvent {
|
||||
chunks: Vec::new(),
|
||||
name: None,
|
||||
}),
|
||||
);
|
||||
let mut event = create_event(
|
||||
&secret,
|
||||
EventContent::LocalTag(crate::event::TagEvent { tags: Vec::new() }),
|
||||
);
|
||||
event.relations.insert(
|
||||
initial.id.clone(),
|
||||
crate::event::RelInfo {
|
||||
rel_type: "tag".into(),
|
||||
key: None,
|
||||
},
|
||||
);
|
||||
let relations = HashMap::from([(initial.id.clone(), &initial)]);
|
||||
assert!(acl.can_send(&id, &event, relations));
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_wildcard_rel() {
|
||||
let (id, secret) = ActorId::new();
|
||||
|
@ -204,13 +295,28 @@ mod tests {
|
|||
users: HashMap::from([(id.clone(), roles)]),
|
||||
..Default::default()
|
||||
};
|
||||
let initial = create_event(&secret, EventContent::File(crate::event::FileEvent { chunks: Vec::new(), name: None }));
|
||||
let mut event = create_event(&secret, EventContent::LocalTag(crate::event::TagEvent { tags: Vec::new() }));
|
||||
event.relations.insert(initial.id.clone(), crate::event::RelInfo { rel_type: "tag".into(), key: None });
|
||||
let initial = create_event(
|
||||
&secret,
|
||||
EventContent::File(crate::event::FileEvent {
|
||||
chunks: Vec::new(),
|
||||
name: None,
|
||||
}),
|
||||
);
|
||||
let mut event = create_event(
|
||||
&secret,
|
||||
EventContent::LocalTag(crate::event::TagEvent { tags: Vec::new() }),
|
||||
);
|
||||
event.relations.insert(
|
||||
initial.id.clone(),
|
||||
crate::event::RelInfo {
|
||||
rel_type: "tag".into(),
|
||||
key: None,
|
||||
},
|
||||
);
|
||||
let relations = HashMap::from([(initial.id.clone(), &initial)]);
|
||||
assert!(acl.can_send(&id, &event, relations));
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_wildcard_to() {
|
||||
let (id, secret) = ActorId::new();
|
||||
|
@ -221,9 +327,24 @@ mod tests {
|
|||
users: HashMap::from([(id.clone(), roles)]),
|
||||
..Default::default()
|
||||
};
|
||||
let initial = create_event(&secret, EventContent::File(crate::event::FileEvent { chunks: Vec::new(), name: None }));
|
||||
let mut event = create_event(&secret, EventContent::LocalTag(crate::event::TagEvent { tags: Vec::new() }));
|
||||
event.relations.insert(initial.id.clone(), crate::event::RelInfo { rel_type: "tag".into(), key: None });
|
||||
let initial = create_event(
|
||||
&secret,
|
||||
EventContent::File(crate::event::FileEvent {
|
||||
chunks: Vec::new(),
|
||||
name: None,
|
||||
}),
|
||||
);
|
||||
let mut event = create_event(
|
||||
&secret,
|
||||
EventContent::LocalTag(crate::event::TagEvent { tags: Vec::new() }),
|
||||
);
|
||||
event.relations.insert(
|
||||
initial.id.clone(),
|
||||
crate::event::RelInfo {
|
||||
rel_type: "tag".into(),
|
||||
key: None,
|
||||
},
|
||||
);
|
||||
let relations = HashMap::from([(initial.id.clone(), &initial)]);
|
||||
assert!(acl.can_send(&id, &event, relations));
|
||||
}
|
||||
|
|
139
lib/src/actor.rs
139
lib/src/actor.rs
|
@ -1,9 +1,13 @@
|
|||
use std::{str::FromStr, fmt::{Display, Debug}, ops::Deref};
|
||||
use base64::Engine as _;
|
||||
use base64::engine::general_purpose::URL_SAFE_NO_PAD as b64engine;
|
||||
use thiserror::Error;
|
||||
use base64::Engine as _;
|
||||
use ed25519_dalek::{Keypair, PublicKey, Signature, Signer, Verifier};
|
||||
use serde::{Serialize, Deserialize};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
fmt::{Debug, Display},
|
||||
ops::Deref,
|
||||
str::FromStr,
|
||||
};
|
||||
use thiserror::Error;
|
||||
// use super::hostname::{Hostname, HostnameParseError};
|
||||
|
||||
#[derive(PartialEq, Eq, Clone, Hash)]
|
||||
|
@ -18,33 +22,44 @@ pub struct ActorSignature([u8; 64]);
|
|||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Error)]
|
||||
pub enum ActorIdParseError {
|
||||
#[error("incorrect actor id sigil")] IncorrectSigil,
|
||||
#[error("invalid base64")] InvalidBase64,
|
||||
#[error("incorrect byte count (should be 32)")] IncorrectByteCount,
|
||||
#[error("incorrect actor id sigil")]
|
||||
IncorrectSigil,
|
||||
#[error("invalid base64")]
|
||||
InvalidBase64,
|
||||
#[error("incorrect byte count (should be 32)")]
|
||||
IncorrectByteCount,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Error)]
|
||||
pub enum ActorSecretParseError {
|
||||
#[error("invalid base64")] InvalidBase64,
|
||||
#[error("incorrect byte count (should be 64)")] IncorrectByteCount,
|
||||
#[error("invalid base64")]
|
||||
InvalidBase64,
|
||||
#[error("incorrect byte count (should be 64)")]
|
||||
IncorrectByteCount,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Error)]
|
||||
pub enum ActorSignatureParseError {
|
||||
#[error("invalid base64")] InvalidBase64,
|
||||
#[error("incorrect byte count (should be 64)")] IncorrectByteCount,
|
||||
#[error("invalid base64")]
|
||||
InvalidBase64,
|
||||
#[error("incorrect byte count (should be 64)")]
|
||||
IncorrectByteCount,
|
||||
}
|
||||
|
||||
impl ActorId {
|
||||
pub fn new() -> (ActorId, ActorSecret) {
|
||||
let mut rng = rand::thread_rng();
|
||||
let keys = Keypair::generate(&mut rng);
|
||||
(ActorId(keys.public.to_bytes()), ActorSecret(keys.to_bytes()))
|
||||
(
|
||||
ActorId(keys.public.to_bytes()),
|
||||
ActorSecret(keys.to_bytes()),
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
pub fn verify(&self, hash: &[u8], signature: &ActorSignature) -> bool {
|
||||
let pubkey = PublicKey::from_bytes(&self.0).expect("can only generate valid keys");
|
||||
let signature = Signature::from_bytes(&signature.0).expect("can only generate valid signatures");
|
||||
let signature =
|
||||
Signature::from_bytes(&signature.0).expect("can only generate valid signatures");
|
||||
pubkey.verify(hash, &signature).is_ok()
|
||||
}
|
||||
}
|
||||
|
@ -68,8 +83,9 @@ impl ActorSecret {
|
|||
|
||||
impl Serialize for ActorSecret {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer {
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
if serializer.is_human_readable() {
|
||||
serializer.serialize_str(&b64engine.encode(self.0))
|
||||
} else {
|
||||
|
@ -80,8 +96,9 @@ impl Serialize for ActorSecret {
|
|||
|
||||
impl<'de> Deserialize<'de> for ActorSecret {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de> {
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
let bytes = if deserializer.is_human_readable() {
|
||||
let string = String::deserialize(deserializer)?;
|
||||
b64engine.decode(string).map_err(serde::de::Error::custom)?
|
||||
|
@ -95,24 +112,30 @@ impl<'de> Deserialize<'de> for ActorSecret {
|
|||
|
||||
impl FromStr for ActorId {
|
||||
type Err = ActorIdParseError;
|
||||
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let s = s.strip_prefix('%').ok_or(ActorIdParseError::IncorrectSigil)?;
|
||||
let key = b64engine.decode(s).map_err(|_| ActorIdParseError::InvalidBase64)?;
|
||||
let bytes = key.try_into().map_err(|_| ActorIdParseError::IncorrectByteCount)?;
|
||||
let s = s
|
||||
.strip_prefix('%')
|
||||
.ok_or(ActorIdParseError::IncorrectSigil)?;
|
||||
let key = b64engine
|
||||
.decode(s)
|
||||
.map_err(|_| ActorIdParseError::InvalidBase64)?;
|
||||
let bytes = key
|
||||
.try_into()
|
||||
.map_err(|_| ActorIdParseError::IncorrectByteCount)?;
|
||||
Ok(Self(bytes))
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&str> for ActorId {
|
||||
type Error = ActorIdParseError;
|
||||
|
||||
|
||||
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
||||
Self::from_str(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ActorId> for String {
|
||||
impl From<ActorId> for String {
|
||||
fn from(value: ActorId) -> Self {
|
||||
value.to_string()
|
||||
}
|
||||
|
@ -142,41 +165,44 @@ impl Serialize for ActorId {
|
|||
|
||||
impl<'de> Deserialize<'de> for ActorId {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de> {
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
if deserializer.is_human_readable() {
|
||||
struct Visitor;
|
||||
impl<'de> serde::de::Visitor<'de> for Visitor {
|
||||
type Value = ActorId;
|
||||
|
||||
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
formatter.write_str("a valid user id in the form `@ed25519keyinbase64`")
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error, {
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
ActorId::from_str(v).map_err(serde::de::Error::custom)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
deserializer.deserialize_str(Visitor)
|
||||
} else {
|
||||
struct Visitor;
|
||||
impl<'de> serde::de::Visitor<'de> for Visitor {
|
||||
type Value = ActorId;
|
||||
|
||||
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
formatter.write_str("a valid user id from [u8; 32]")
|
||||
}
|
||||
|
||||
fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error, {
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(ActorId(v.try_into().map_err(serde::de::Error::custom)?))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
deserializer.deserialize_bytes(Visitor)
|
||||
}
|
||||
}
|
||||
|
@ -184,17 +210,21 @@ impl<'de> Deserialize<'de> for ActorId {
|
|||
|
||||
impl FromStr for ActorSignature {
|
||||
type Err = ActorSignatureParseError;
|
||||
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let key = b64engine.decode(s).map_err(|_| ActorSignatureParseError::InvalidBase64)?;
|
||||
let bytes = key.try_into().map_err(|_| ActorSignatureParseError::IncorrectByteCount)?;
|
||||
let key = b64engine
|
||||
.decode(s)
|
||||
.map_err(|_| ActorSignatureParseError::InvalidBase64)?;
|
||||
let bytes = key
|
||||
.try_into()
|
||||
.map_err(|_| ActorSignatureParseError::IncorrectByteCount)?;
|
||||
Ok(Self(bytes))
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&str> for ActorSignature {
|
||||
type Error = ActorSignatureParseError;
|
||||
|
||||
|
||||
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
||||
Self::from_str(value)
|
||||
}
|
||||
|
@ -221,7 +251,7 @@ impl Debug for ActorSignature {
|
|||
/*
|
||||
impl FromStr for UserAlias {
|
||||
type Err = UserAliasParseError;
|
||||
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let s = s.strip_prefix('@').ok_or(UserAliasParseError::IncorrectSigil)?;
|
||||
let (name, server) = s.split_once(':').ok_or(UserAliasParseError::MissingHostname)?;
|
||||
|
@ -238,13 +268,13 @@ impl FromStr for UserAlias {
|
|||
|
||||
impl TryFrom<&str> for UserAlias {
|
||||
type Error = UserAliasParseError;
|
||||
|
||||
|
||||
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
||||
Self::from_str(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<UserAlias> for String {
|
||||
impl From<UserAlias> for String {
|
||||
fn from(value: UserAlias) -> Self {
|
||||
value.to_string()
|
||||
}
|
||||
|
@ -261,23 +291,30 @@ impl Display for UserAlias {
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_id_serialize() {
|
||||
let user_id = ActorId([
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d,
|
||||
0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b,
|
||||
0x1c, 0x1d, 0x1e, 0x1f,
|
||||
]);
|
||||
assert_eq!(user_id.to_string(), "%AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8");
|
||||
assert_eq!(
|
||||
user_id.to_string(),
|
||||
"%AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_id_deserialize() {
|
||||
let target_id = ActorId([
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d,
|
||||
0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b,
|
||||
0x1c, 0x1d, 0x1e, 0x1f,
|
||||
]);
|
||||
let user_id: ActorId = "%AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8".parse().expect("should be a user id");
|
||||
let user_id: ActorId = "%AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8"
|
||||
.parse()
|
||||
.expect("should be a user id");
|
||||
assert_eq!(user_id, target_id);
|
||||
}
|
||||
|
||||
|
@ -325,7 +362,7 @@ mod tests {
|
|||
fn test_alias_incorrect_sigil() {
|
||||
let user_alias = "#example:hostname.tld"
|
||||
.parse::<UserAlias>()
|
||||
.expect_err("should be invalid sigil");
|
||||
.expect_err("should be invalid sigil");
|
||||
assert_eq!(user_alias, UserAliasParseError::IncorrectSigil);
|
||||
}
|
||||
|
||||
|
@ -333,7 +370,7 @@ mod tests {
|
|||
fn test_alias_missing_hostname() {
|
||||
let user_alias = "@example"
|
||||
.parse::<UserAlias>()
|
||||
.expect_err("should be missing hostname");
|
||||
.expect_err("should be missing hostname");
|
||||
assert_eq!(user_alias, UserAliasParseError::MissingHostname);
|
||||
}
|
||||
|
||||
|
@ -341,7 +378,7 @@ mod tests {
|
|||
fn test_alias_invalid_hostname() {
|
||||
let user_alias = "@example:"
|
||||
.parse::<UserAlias>()
|
||||
.expect_err("should have invalid hostname");
|
||||
.expect_err("should have invalid hostname");
|
||||
assert_eq!(user_alias, UserAliasParseError::InvalidHostname(HostnameParseError::Empty));
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashSet;
|
||||
use serde::{Serialize, Deserialize};
|
||||
// use std::collections::{HashSet, HashMap};
|
||||
// use serde::{Serialize, Deserialize};
|
||||
// use serde_json::Value;
|
||||
|
@ -34,17 +34,21 @@ pub struct DeriveFile {
|
|||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
|
||||
// whether media-specific derives should be put in this library is... debatable. maybe it will be revised later on.
|
||||
pub struct DeriveMedia {
|
||||
#[serde(skip_serializing_if = "Option::is_none")] pub title: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")] pub artist: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")] pub album: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")] pub comment: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")] pub url: Option<String>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub title: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub artist: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub album: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub comment: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub url: Option<String>,
|
||||
// maybe add lyrics? location data?
|
||||
// this seems to already be adding a lot of implementation details
|
||||
// is it worth adding a ton of fields or is `exif: HashMap<String, String>` fine?
|
||||
// #[serde(skip_serializing_if = "HashMap::is_empty")] pub other: HashMap<DeriveComment>,
|
||||
|
||||
|
||||
// unneeded due to `/things/{item_ref}/thumbnail` existing already?
|
||||
// #[serde(skip_serializing_if = "Vec::is_empty")] thumbnails: Vec<String>,
|
||||
}
|
||||
|
|
159
lib/src/event.rs
159
lib/src/event.rs
|
@ -1,9 +1,9 @@
|
|||
use std::collections::HashMap;
|
||||
use serde::{Serialize, Deserialize};
|
||||
use serde_json::Value;
|
||||
use crate::item::ItemRef;
|
||||
use crate::actor::{ActorId, ActorSignature};
|
||||
use crate::derived::Derived;
|
||||
use crate::item::ItemRef;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
|
||||
// TODO (future): stabilize move to the library (ufh)
|
||||
// TODO (future, maybe?): also come up with a better name than ufh
|
||||
|
@ -12,7 +12,11 @@ use crate::derived::Derived;
|
|||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]
|
||||
pub struct Event {
|
||||
pub id: ItemRef,
|
||||
#[serde(flatten, serialize_with = "serialize_event_content", deserialize_with = "deserialize_event_content")]
|
||||
#[serde(
|
||||
flatten,
|
||||
serialize_with = "serialize_event_content",
|
||||
deserialize_with = "deserialize_event_content"
|
||||
)]
|
||||
pub content: EventContent,
|
||||
#[serde(skip_serializing_if = "HashMap::is_empty", default)]
|
||||
pub relations: HashMap<ItemRef, RelInfo>,
|
||||
|
@ -34,7 +38,11 @@ pub struct Event {
|
|||
// TODO: this looks like a builder, maybe refactor it into one?
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]
|
||||
pub struct WipEvent {
|
||||
#[serde(flatten, serialize_with = "serialize_event_content", deserialize_with = "deserialize_event_content")]
|
||||
#[serde(
|
||||
flatten,
|
||||
serialize_with = "serialize_event_content",
|
||||
deserialize_with = "deserialize_event_content"
|
||||
)]
|
||||
pub content: EventContent,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub relations: Option<HashMap<ItemRef, RelInfo>>,
|
||||
|
@ -49,14 +57,22 @@ pub struct WipEvent {
|
|||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]
|
||||
#[serde(tag = "type", content = "content")]
|
||||
pub enum EventContent {
|
||||
#[serde(rename = "x.user")] User(UserEvent),
|
||||
#[serde(rename = "x.file")] File(FileEvent),
|
||||
#[serde(rename = "x.redact")] Redact(RedactEvent),
|
||||
#[serde(rename = "x.acl")] Acl(AclEvent),
|
||||
#[serde(rename = "x.annotate")] Annotate(AnnotateEvent),
|
||||
#[serde(rename = "x.annotate.local")] LocalAnnotate(AnnotateEvent),
|
||||
#[serde(rename = "x.tag")] Tag(TagEvent),
|
||||
#[serde(rename = "x.tag.local")] LocalTag(TagEvent),
|
||||
#[serde(rename = "x.user")]
|
||||
User(UserEvent),
|
||||
#[serde(rename = "x.file")]
|
||||
File(FileEvent),
|
||||
#[serde(rename = "x.redact")]
|
||||
Redact(RedactEvent),
|
||||
#[serde(rename = "x.acl")]
|
||||
Acl(AclEvent),
|
||||
#[serde(rename = "x.annotate")]
|
||||
Annotate(AnnotateEvent),
|
||||
#[serde(rename = "x.annotate.local")]
|
||||
LocalAnnotate(AnnotateEvent),
|
||||
#[serde(rename = "x.tag")]
|
||||
Tag(TagEvent),
|
||||
#[serde(rename = "x.tag.local")]
|
||||
LocalTag(TagEvent),
|
||||
#[serde(skip)]
|
||||
Other {
|
||||
event_type: String,
|
||||
|
@ -75,11 +91,15 @@ pub struct RelInfo {
|
|||
}
|
||||
|
||||
// empty events; they have no content, or store content in relations
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] pub struct UserEvent {} // TODO: currently unused
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] pub struct AnnotateEvent {} // TODO: currently unused
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] pub struct RedactEvent {} // uses`redact` relationship
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]
|
||||
pub struct UserEvent {} // TODO: currently unused
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]
|
||||
pub struct AnnotateEvent {} // TODO: currently unused
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]
|
||||
pub struct RedactEvent {} // uses`redact` relationship
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] pub struct TagEvent {
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]
|
||||
pub struct TagEvent {
|
||||
pub tags: Vec<String>,
|
||||
}
|
||||
|
||||
|
@ -113,8 +133,9 @@ impl EventContent {
|
|||
}
|
||||
|
||||
fn deserialize_event_content<'de, D>(deserializer: D) -> Result<EventContent, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de> {
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum MaybeKnown {
|
||||
|
@ -134,32 +155,47 @@ fn deserialize_event_content<'de, D>(deserializer: D) -> Result<EventContent, D:
|
|||
|
||||
let event = match MaybeKnown::deserialize(deserializer)? {
|
||||
MaybeKnown::Known(event) => event,
|
||||
MaybeKnown::Unknown { event_type, .. } if matches!(event_type.as_str(),
|
||||
"x.user" | "x.file" | "x.redact" | "x.acl" | "x.annotate" | "x.annotate.local"
|
||||
) => return Err(serde::de::Error::custom("invalid content")),
|
||||
MaybeKnown::Unknown { event_type, content } => EventContent::Other { event_type, content },
|
||||
MaybeKnown::Unknown { event_type, .. }
|
||||
if matches!(
|
||||
event_type.as_str(),
|
||||
"x.user" | "x.file" | "x.redact" | "x.acl" | "x.annotate" | "x.annotate.local"
|
||||
) =>
|
||||
{
|
||||
return Err(serde::de::Error::custom("invalid content"))
|
||||
}
|
||||
MaybeKnown::Unknown {
|
||||
event_type,
|
||||
content,
|
||||
} => EventContent::Other {
|
||||
event_type,
|
||||
content,
|
||||
},
|
||||
MaybeKnown::Redacted { event_type, .. } => EventContent::Redacted(event_type),
|
||||
};
|
||||
Ok(event)
|
||||
}
|
||||
|
||||
fn serialize_event_content<S>(content: &EventContent, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer {
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
use serde::ser::SerializeStruct;
|
||||
match content {
|
||||
EventContent::Other { event_type, content } => {
|
||||
EventContent::Other {
|
||||
event_type,
|
||||
content,
|
||||
} => {
|
||||
let mut s = serializer.serialize_struct("EventContent", 2)?;
|
||||
s.serialize_field("type", event_type)?;
|
||||
s.serialize_field("content", content)?;
|
||||
s.end()
|
||||
},
|
||||
}
|
||||
EventContent::Redacted(event_type) => {
|
||||
let mut s = serializer.serialize_struct("EventContent", 2)?;
|
||||
s.serialize_field("type", event_type)?;
|
||||
s.serialize_field("content", &None as &Option<()>)?;
|
||||
s.end()
|
||||
},
|
||||
}
|
||||
_ => content.serialize(serializer),
|
||||
}
|
||||
}
|
||||
|
@ -185,7 +221,7 @@ impl WipEvent {
|
|||
.as_millis() as u64,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
pub fn to_json(&self) -> String {
|
||||
json_canon::to_string(&self).expect("can always be serialized")
|
||||
}
|
||||
|
@ -195,10 +231,13 @@ impl WipEvent {
|
|||
return false;
|
||||
};
|
||||
|
||||
let sigless = WipEvent { signature: None, ..self.clone() };
|
||||
let sigless = WipEvent {
|
||||
signature: None,
|
||||
..self.clone()
|
||||
};
|
||||
self.sender.verify(sigless.to_json().as_bytes(), signature)
|
||||
}
|
||||
|
||||
|
||||
// pub fn into_event(self) -> Option<Event> {
|
||||
// Some(Event {
|
||||
// id: None,
|
||||
|
@ -209,16 +248,16 @@ impl WipEvent {
|
|||
// signature: self.signature?,
|
||||
// origin_ts: self.origin_ts,
|
||||
// })
|
||||
// }
|
||||
// }
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::collections::HashMap;
|
||||
use crate::{item::HashType, event::EventContent, derived::Derived, actor::ActorSecret};
|
||||
use crate::{actor::ActorSecret, derived::Derived, event::EventContent, item::HashType};
|
||||
use serde_json::json;
|
||||
use std::collections::HashMap;
|
||||
|
||||
// TODO: find a way to deduplicate this code a bit?
|
||||
|
||||
|
@ -226,7 +265,10 @@ mod tests {
|
|||
|
||||
fn create_event(content: EventContent) -> Event {
|
||||
let secret: ActorSecret = serde_json::from_str(SECRET).unwrap();
|
||||
let item_ref = ItemRef(HashType::Sha224, "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa".to_string());
|
||||
let item_ref = ItemRef(
|
||||
HashType::Sha224,
|
||||
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa".to_string(),
|
||||
);
|
||||
let garbage = [0; 10];
|
||||
let id = secret.get_id();
|
||||
Event {
|
||||
|
@ -239,32 +281,50 @@ mod tests {
|
|||
origin_ts: 0,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_serialize_file() {
|
||||
let event = create_event(EventContent::File(FileEvent { chunks: Vec::new(), name: Some("file".into()) }));
|
||||
let event = create_event(EventContent::File(FileEvent {
|
||||
chunks: Vec::new(),
|
||||
name: Some("file".into()),
|
||||
}));
|
||||
let string = json_canon::to_string(&event).unwrap();
|
||||
assert_eq!(string, r#"{"content":{"chunks":[],"name":"file"},"id":"sha224-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa","origin_ts":0,"sender":"%D1qUbQbgC0IDWdyw6GtiexfKHKaGH_yWox_d7dkJgOI","signature":"Xdpi-WfSUVnV0tbY6OBaphT_ZTPtN_dnHdaOjoqcNJJ9gU3Fh0R35xur6OAD_zMRA6N-KpZCfrsaXl7yznd2BQ","type":"x.file"}"#);
|
||||
assert_eq!(
|
||||
string,
|
||||
r#"{"content":{"chunks":[],"name":"file"},"id":"sha224-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa","origin_ts":0,"sender":"%D1qUbQbgC0IDWdyw6GtiexfKHKaGH_yWox_d7dkJgOI","signature":"Xdpi-WfSUVnV0tbY6OBaphT_ZTPtN_dnHdaOjoqcNJJ9gU3Fh0R35xur6OAD_zMRA6N-KpZCfrsaXl7yznd2BQ","type":"x.file"}"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_serialize_generic() {
|
||||
let event = create_event(EventContent::Other { event_type: "l.thing".into(), content: HashMap::from([("hello".into(), json!("world"))]) });
|
||||
let event = create_event(EventContent::Other {
|
||||
event_type: "l.thing".into(),
|
||||
content: HashMap::from([("hello".into(), json!("world"))]),
|
||||
});
|
||||
let string = json_canon::to_string(&event).unwrap();
|
||||
assert_eq!(string, r#"{"content":{"hello":"world"},"id":"sha224-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa","origin_ts":0,"sender":"%D1qUbQbgC0IDWdyw6GtiexfKHKaGH_yWox_d7dkJgOI","signature":"Xdpi-WfSUVnV0tbY6OBaphT_ZTPtN_dnHdaOjoqcNJJ9gU3Fh0R35xur6OAD_zMRA6N-KpZCfrsaXl7yznd2BQ","type":"l.thing"}"#);
|
||||
assert_eq!(
|
||||
string,
|
||||
r#"{"content":{"hello":"world"},"id":"sha224-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa","origin_ts":0,"sender":"%D1qUbQbgC0IDWdyw6GtiexfKHKaGH_yWox_d7dkJgOI","signature":"Xdpi-WfSUVnV0tbY6OBaphT_ZTPtN_dnHdaOjoqcNJJ9gU3Fh0R35xur6OAD_zMRA6N-KpZCfrsaXl7yznd2BQ","type":"l.thing"}"#
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_serialize_redacted() {
|
||||
let event = create_event(EventContent::Redacted("x.file".into()));
|
||||
let string = json_canon::to_string(&event).unwrap();
|
||||
assert_eq!(string, r#"{"content":null,"id":"sha224-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa","origin_ts":0,"sender":"%D1qUbQbgC0IDWdyw6GtiexfKHKaGH_yWox_d7dkJgOI","signature":"Xdpi-WfSUVnV0tbY6OBaphT_ZTPtN_dnHdaOjoqcNJJ9gU3Fh0R35xur6OAD_zMRA6N-KpZCfrsaXl7yznd2BQ","type":"x.file"}"#);
|
||||
assert_eq!(
|
||||
string,
|
||||
r#"{"content":null,"id":"sha224-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa","origin_ts":0,"sender":"%D1qUbQbgC0IDWdyw6GtiexfKHKaGH_yWox_d7dkJgOI","signature":"Xdpi-WfSUVnV0tbY6OBaphT_ZTPtN_dnHdaOjoqcNJJ9gU3Fh0R35xur6OAD_zMRA6N-KpZCfrsaXl7yznd2BQ","type":"x.file"}"#
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_deserialize_file() {
|
||||
let string = r#"{"content":{"chunks":[],"name":"file"},"id":"sha224-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa","origin_ts":0,"sender":"%D1qUbQbgC0IDWdyw6GtiexfKHKaGH_yWox_d7dkJgOI","signature":"Xdpi-WfSUVnV0tbY6OBaphT_ZTPtN_dnHdaOjoqcNJJ9gU3Fh0R35xur6OAD_zMRA6N-KpZCfrsaXl7yznd2BQ","type":"x.file"}"#;
|
||||
let good_event = create_event(EventContent::File(FileEvent { chunks: Vec::new(), name: Some("file".into()) }));
|
||||
let good_event = create_event(EventContent::File(FileEvent {
|
||||
chunks: Vec::new(),
|
||||
name: Some("file".into()),
|
||||
}));
|
||||
let event: Event = serde_json::from_str(&string).unwrap();
|
||||
assert_eq!(event, good_event);
|
||||
}
|
||||
|
@ -272,11 +332,14 @@ mod tests {
|
|||
#[test]
|
||||
fn test_deserialize_generic() {
|
||||
let string = r#"{"content":{"hello":"world"},"id":"sha224-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa","origin_ts":0,"sender":"%D1qUbQbgC0IDWdyw6GtiexfKHKaGH_yWox_d7dkJgOI","signature":"Xdpi-WfSUVnV0tbY6OBaphT_ZTPtN_dnHdaOjoqcNJJ9gU3Fh0R35xur6OAD_zMRA6N-KpZCfrsaXl7yznd2BQ","type":"l.thing"}"#;
|
||||
let good_event = create_event(EventContent::Other { event_type: "l.thing".into(), content: HashMap::from([("hello".into(), json!("world"))]) });
|
||||
let good_event = create_event(EventContent::Other {
|
||||
event_type: "l.thing".into(),
|
||||
content: HashMap::from([("hello".into(), json!("world"))]),
|
||||
});
|
||||
let event: Event = serde_json::from_str(&string).unwrap();
|
||||
assert_eq!(event, good_event);
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_deserialize_redacted() {
|
||||
let string = r#"{"content":null,"id":"sha224-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa","origin_ts":0,"sender":"%D1qUbQbgC0IDWdyw6GtiexfKHKaGH_yWox_d7dkJgOI","signature":"Xdpi-WfSUVnV0tbY6OBaphT_ZTPtN_dnHdaOjoqcNJJ9gU3Fh0R35xur6OAD_zMRA6N-KpZCfrsaXl7yznd2BQ","type":"x.file"}"#;
|
||||
|
@ -284,7 +347,7 @@ mod tests {
|
|||
let event: Event = serde_json::from_str(&string).unwrap();
|
||||
assert_eq!(event, good_event);
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_deserialize_invalid() {
|
||||
let string = r#"{"content":{},"id":"sha224-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa","origin_ts":0,"sender":"%D1qUbQbgC0IDWdyw6GtiexfKHKaGH_yWox_d7dkJgOI","signature":"Xdpi-WfSUVnV0tbY6OBaphT_ZTPtN_dnHdaOjoqcNJJ9gU3Fh0R35xur6OAD_zMRA6N-KpZCfrsaXl7yznd2BQ","type":"x.file"}"#;
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
use std::{fmt::Display, str::FromStr};
|
||||
use serde::{Serialize, Deserialize};
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Hash)]
|
||||
|
@ -10,12 +10,18 @@ pub enum HashType {
|
|||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum ItemRefParseError {
|
||||
#[error("missing hash type")] MissingHashType,
|
||||
#[error("unknown hash type")] UnknownHashType,
|
||||
#[error("missing hash")] MissingHash,
|
||||
#[error("invalid hash char")] InvalidHashChar,
|
||||
#[error("invalid hash length")] InvalidHashLength,
|
||||
#[error("unknown ref kind")] ExtraData,
|
||||
#[error("missing hash type")]
|
||||
MissingHashType,
|
||||
#[error("unknown hash type")]
|
||||
UnknownHashType,
|
||||
#[error("missing hash")]
|
||||
MissingHash,
|
||||
#[error("invalid hash char")]
|
||||
InvalidHashChar,
|
||||
#[error("invalid hash length")]
|
||||
InvalidHashLength,
|
||||
#[error("unknown ref kind")]
|
||||
ExtraData,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)]
|
||||
|
@ -29,12 +35,12 @@ impl ItemRef {
|
|||
.map(|i| u8::from_str_radix(&self.1[i..i + 2], 16))
|
||||
.collect::<Result<_, _>>()
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&str> for ItemRef {
|
||||
type Error = ItemRefParseError;
|
||||
|
||||
|
||||
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
||||
let mut split = value.split('-');
|
||||
let hash_type_str = split.next().ok_or(ItemRefParseError::MissingHashType)?;
|
||||
|
@ -67,7 +73,7 @@ impl From<ItemRef> for String {
|
|||
|
||||
impl FromStr for ItemRef {
|
||||
type Err = ItemRefParseError;
|
||||
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
ItemRef::try_from(s)
|
||||
}
|
||||
|
@ -92,7 +98,7 @@ impl HashType {
|
|||
Self::Sha224 => 28,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// get length of this kind of hash in hexadecimal
|
||||
fn get_hex_len(&self) -> u32 {
|
||||
self.get_len() * 2
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
pub mod item;
|
||||
pub mod actor;
|
||||
pub mod event;
|
||||
pub mod derived;
|
||||
pub mod query;
|
||||
pub mod acl;
|
||||
pub mod actor;
|
||||
pub mod derived;
|
||||
pub mod event;
|
||||
pub mod item;
|
||||
pub mod query;
|
||||
|
||||
// TODO: use critereon or something for benchmarking
|
||||
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
use std::collections::{HashSet, HashMap};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
|
||||
use serde::{Serialize, Deserialize};
|
||||
use crate::item::ItemRef;
|
||||
use crate::actor::ActorId;
|
||||
use crate::event::{Event, RelInfo, EventContent};
|
||||
use crate::event::{Event, EventContent, RelInfo};
|
||||
use crate::item::ItemRef;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
|
||||
pub struct Query {
|
||||
|
@ -41,26 +41,38 @@ pub enum MatchType {
|
|||
}
|
||||
|
||||
impl Query {
|
||||
pub fn matches_relationless(&self, event: &Event) -> bool {
|
||||
pub fn matches_relationless(&self, event: &Event) -> bool {
|
||||
let bad_ref = self.refs.as_ref().is_some_and(|s| !s.contains(&event.id));
|
||||
let bad_sender = self.senders.as_ref().is_some_and(|s| !s.contains(&event.sender));
|
||||
let bad_type = self.types.as_ref().is_some_and(|s| !s.contains(event.content.get_type()));
|
||||
let bad_tags = self.tags.as_ref().is_some_and(|s| s.is_disjoint(&event.derived.tags));
|
||||
let bad_sender = self
|
||||
.senders
|
||||
.as_ref()
|
||||
.is_some_and(|s| !s.contains(&event.sender));
|
||||
let bad_type = self
|
||||
.types
|
||||
.as_ref()
|
||||
.is_some_and(|s| !s.contains(event.content.get_type()));
|
||||
let bad_tags = self
|
||||
.tags
|
||||
.as_ref()
|
||||
.is_some_and(|s| s.is_disjoint(&event.derived.tags));
|
||||
let bad_redact = !self.with_redacts && matches!(event.content, EventContent::Redacted(_));
|
||||
|
||||
|
||||
!(bad_ref || bad_sender || bad_type || bad_tags || bad_redact)
|
||||
}
|
||||
|
||||
pub fn matches_relation(&self, source: &Event, target: &Event, rel_info: &RelInfo) -> bool {
|
||||
for (rel_type, source_type) in &self.relations {
|
||||
if &rel_info.rel_type == rel_type && source.content.get_type() == source_type && self.matches_relationless(target) {
|
||||
if &rel_info.rel_type == rel_type
|
||||
&& source.content.get_type() == source_type
|
||||
&& self.matches_relationless(target)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
|
||||
pub fn matches(&self, event: &Event, relations: &Relations) -> MatchType {
|
||||
if self.matches_relationless(event) {
|
||||
return MatchType::Event;
|
||||
|
@ -71,7 +83,7 @@ impl Query {
|
|||
return MatchType::Relation;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
MatchType::None
|
||||
}
|
||||
}
|
||||
|
@ -82,7 +94,11 @@ mod tests {
|
|||
use std::collections::HashMap;
|
||||
|
||||
use super::*;
|
||||
use crate::{item::HashType, event::{EventContent, FileEvent}, derived::Derived};
|
||||
use crate::{
|
||||
derived::Derived,
|
||||
event::{EventContent, FileEvent},
|
||||
item::HashType,
|
||||
};
|
||||
|
||||
fn create_event(content: EventContent) -> Event {
|
||||
let garbage = [0; 10];
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
mod peer;
|
||||
|
||||
use std::{net::{SocketAddr, SocketAddrV4, Ipv4Addr}, sync::Arc};
|
||||
use std::{
|
||||
net::{Ipv4Addr, SocketAddr, SocketAddrV4},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use axum::{Router, Server, extract::State, Json};
|
||||
use peer::{Node, NodeId, RPCRequest, Contact, RPCResponse};
|
||||
use axum::{extract::State, Json, Router, Server};
|
||||
use peer::{Contact, Node, NodeId, RPCRequest, RPCResponse};
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
struct NodeState {
|
||||
|
@ -18,9 +21,14 @@ struct Request {
|
|||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
let port: u16 = std::env::args().nth(1).and_then(|s| s.parse().ok()).unwrap();
|
||||
let port: u16 = std::env::args()
|
||||
.nth(1)
|
||||
.and_then(|s| s.parse().ok())
|
||||
.unwrap();
|
||||
let node = Node::new(NodeId::new(), port);
|
||||
let state = Arc::new(NodeState { node: Mutex::new(node) });
|
||||
let state = Arc::new(NodeState {
|
||||
node: Mutex::new(node),
|
||||
});
|
||||
let router = Router::new()
|
||||
.route("/send", axum::routing::post(handle))
|
||||
.with_state(state.clone());
|
||||
|
@ -36,23 +44,27 @@ async fn main() {
|
|||
"ping" => println!("pong"),
|
||||
"info" => println!("{}", serde_json::to_string(&node.contact).unwrap()),
|
||||
"bootstrap" => {
|
||||
node.bootstrap(serde_json::from_str(parts[1]).unwrap()).await;
|
||||
node.bootstrap(serde_json::from_str(parts[1]).unwrap())
|
||||
.await;
|
||||
println!("added bootstrap node");
|
||||
},
|
||||
}
|
||||
"set" => {
|
||||
node.set(&NodeId::new_from_str(parts[1]), parts[2]).await;
|
||||
println!("set");
|
||||
},
|
||||
}
|
||||
"get" => {
|
||||
let result = node.get(&NodeId::new_from_str(parts[1])).await;
|
||||
println!("get: {:?}", result);
|
||||
},
|
||||
}
|
||||
_ => println!("not a command"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle(State(state): State<Arc<NodeState>>, Json(request): Json<Request>) -> Json<RPCResponse> {
|
||||
async fn handle(
|
||||
State(state): State<Arc<NodeState>>,
|
||||
Json(request): Json<Request>,
|
||||
) -> Json<RPCResponse> {
|
||||
// println!("handle request");
|
||||
let mut node = state.node.lock().await;
|
||||
let response = node.receive(&request.contact, request.info);
|
||||
|
|
|
@ -2,9 +2,9 @@
|
|||
|
||||
// #![allow(unused)] // TODO (commit): remove this before comitting
|
||||
|
||||
use std::{collections::{HashMap, HashSet}};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sha2::Digest;
|
||||
use serde::{Serialize, Deserialize};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
|
||||
/// the length of each key
|
||||
const KEY_LEN: usize = 20;
|
||||
|
@ -98,10 +98,13 @@ impl Distance {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
impl Contact {
|
||||
// TODO (future): i really should split apart network logic
|
||||
async fn send(&self, sender: &Self, message: RPCRequest) -> Result<RPCResponse, reqwest::Error> {
|
||||
async fn send(
|
||||
&self,
|
||||
sender: &Self,
|
||||
message: RPCRequest,
|
||||
) -> Result<RPCResponse, reqwest::Error> {
|
||||
#[derive(Debug, Serialize)]
|
||||
struct Request<'a> {
|
||||
info: RPCRequest,
|
||||
|
@ -112,9 +115,9 @@ impl Contact {
|
|||
info: message,
|
||||
contact: sender,
|
||||
};
|
||||
|
||||
|
||||
dbg!(format!("http://{}/send", self.host));
|
||||
|
||||
|
||||
reqwest::Client::new()
|
||||
.post(format!("http://{}/send", self.host))
|
||||
.json(&request)
|
||||
|
@ -194,7 +197,7 @@ impl Node {
|
|||
if let Ok(res) = contact.send(&self.contact, message).await {
|
||||
return Some(res);
|
||||
}
|
||||
|
||||
|
||||
// node is dead
|
||||
self.router.remove(&contact.id);
|
||||
None
|
||||
|
@ -226,7 +229,8 @@ impl Node {
|
|||
pub async fn set(&mut self, key: &NodeId, value: &str) {
|
||||
let contacts = self.router.find_closest(key, 1);
|
||||
for contact in contacts {
|
||||
self.send(&contact, RPCRequest::Store(*key, value.to_owned())).await;
|
||||
self.send(&contact, RPCRequest::Store(*key, value.to_owned()))
|
||||
.await;
|
||||
}
|
||||
self.store.insert(*key, value.to_owned());
|
||||
}
|
||||
|
|
|
@ -1,10 +1,13 @@
|
|||
use axum::http::StatusCode;
|
||||
use sha2::Digest;
|
||||
use tracing::debug;
|
||||
use std::ops::Deref;
|
||||
use ufh::{item::{ItemRef, HashType}, event::WipEvent};
|
||||
use bytes::Bytes;
|
||||
use crate::Error;
|
||||
use axum::http::StatusCode;
|
||||
use bytes::Bytes;
|
||||
use sha2::Digest;
|
||||
use std::ops::Deref;
|
||||
use tracing::debug;
|
||||
use ufh::{
|
||||
event::WipEvent,
|
||||
item::{HashType, ItemRef},
|
||||
};
|
||||
|
||||
// TODO (future): multiple backends/clients
|
||||
// -> for this, i need to find out how to split blobs (every backend
|
||||
|
@ -30,10 +33,12 @@ impl Item {
|
|||
match buffer[0] {
|
||||
0x00 => Ok(Item::Raw(buffer.slice(1..))),
|
||||
0x01 => Ok(Item::WipEvent(serde_json::from_slice(&buffer[1..])?)),
|
||||
_ => Err(Error::Validation("all stored content (at this commit) should either be a blob or event")),
|
||||
_ => Err(Error::Validation(
|
||||
"all stored content (at this commit) should either be a blob or event",
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
pub fn to_bytes(&self) -> Bytes {
|
||||
let buffer = match self {
|
||||
Item::WipEvent(event) => [&[0x01], event.to_json().as_bytes()].concat(),
|
||||
|
@ -57,11 +62,12 @@ impl Client {
|
|||
base_url: base_url.to_owned(),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
pub async fn put(&self, item: Item) -> Result<ItemRef, Error> {
|
||||
debug!("put blob");
|
||||
let bytes = item.to_bytes();
|
||||
let res = self.http
|
||||
let res = self
|
||||
.http
|
||||
.post(format!("{}/blobs", self.base_url))
|
||||
.header("content-length", bytes.len())
|
||||
.body(bytes.clone())
|
||||
|
@ -73,10 +79,11 @@ impl Client {
|
|||
let res: UploadResponse = serde_json::from_str(&res)?;
|
||||
Ok(res.item_ref)
|
||||
}
|
||||
|
||||
|
||||
pub async fn get(&self, item_ref: &ItemRef) -> Result<Item, Error> {
|
||||
debug!("get blob");
|
||||
let req = self.http
|
||||
let req = self
|
||||
.http
|
||||
.get(format!("{}/blobs/{}", self.base_url, item_ref))
|
||||
.send()
|
||||
.await?
|
||||
|
@ -90,13 +97,15 @@ impl Client {
|
|||
debug!("get blob through server {}", via);
|
||||
let url = format!("https://changeme/things/{}", item_ref);
|
||||
let mut url = reqwest::Url::parse(&url).expect("invalid url?");
|
||||
url.set_host(Some(via)).map_err(|_| Error::Validation("invalid hostname"))?;
|
||||
|
||||
url.set_host(Some(via))
|
||||
.map_err(|_| Error::Validation("invalid hostname"))?;
|
||||
|
||||
let req = match self.http.get(url).send().await? {
|
||||
req if req.status() == StatusCode::GONE => req,
|
||||
req => req.error_for_status()?,
|
||||
};
|
||||
let header = req.headers()
|
||||
let header = req
|
||||
.headers()
|
||||
.get("content-type")
|
||||
.and_then(|h| h.to_str().ok())
|
||||
.map(|h| Ok(h.to_owned()))
|
||||
|
@ -107,7 +116,7 @@ impl Client {
|
|||
"application/octet-stream" => Item::Raw(bytes),
|
||||
_ => return Err(Error::Validation("couldn't fetch item")),
|
||||
};
|
||||
|
||||
|
||||
let hash: Vec<u8> = {
|
||||
let mut hasher = sha2::Sha224::default();
|
||||
hasher.update(item.to_bytes());
|
||||
|
@ -119,7 +128,7 @@ impl Client {
|
|||
if !valid_hash {
|
||||
return Err(Error::Validation("server gave an invalid hash"));
|
||||
}
|
||||
|
||||
|
||||
Ok(item)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,11 @@
|
|||
use clap::{Parser, Subcommand};
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(name = "ufh", version = "0.1.0", about = "cli to interact with a ufh server")]
|
||||
#[command(
|
||||
name = "ufh",
|
||||
version = "0.1.0",
|
||||
about = "cli to interact with a ufh server"
|
||||
)]
|
||||
pub struct Command {
|
||||
#[command(subcommand)]
|
||||
pub action: Action,
|
||||
|
@ -11,7 +15,7 @@ pub struct Command {
|
|||
pub enum Action {
|
||||
/// start a server
|
||||
Serve {
|
||||
#[arg(short, long, help = "which port to listen on", default_value="3210")]
|
||||
#[arg(short, long, help = "which port to listen on", default_value = "3210")]
|
||||
port: u16,
|
||||
}
|
||||
},
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use tokio::process::Command;
|
||||
use std::process::Stdio;
|
||||
use std::io::Cursor;
|
||||
use std::process::Stdio;
|
||||
use tokio::process::Command;
|
||||
|
||||
// FIXME: some files (mp4, mov) may fail to thumbnail with stdin
|
||||
// they can have a MOOV atom at the end, and ffmpeg can't seek to the beginning
|
||||
|
@ -13,9 +13,7 @@ pub async fn extract(buffer: &[u8], args: &[&str]) -> Result<Vec<u8>, ()> {
|
|||
.spawn()
|
||||
.expect("couldn't find ffmpeg");
|
||||
|
||||
let mut cmd_stdin = cmd.stdin
|
||||
.take()
|
||||
.expect("ffmpeg should take stdin");
|
||||
let mut cmd_stdin = cmd.stdin.take().expect("ffmpeg should take stdin");
|
||||
|
||||
let mut cursor = Cursor::new(&buffer);
|
||||
let _ = tokio::io::copy(&mut cursor, &mut cmd_stdin).await;
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
use serde::{Serialize, Deserialize};
|
||||
use super::ffmpeg;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::io::Cursor;
|
||||
use std::process::Stdio;
|
||||
use tokio::process::Command;
|
||||
use ufh::derived::DeriveMedia;
|
||||
use std::collections::HashMap;
|
||||
use std::process::Stdio;
|
||||
use std::io::Cursor;
|
||||
use super::ffmpeg;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Ffprobe {
|
||||
|
@ -38,21 +38,32 @@ pub struct Disposition {
|
|||
}
|
||||
|
||||
#[derive(Debug, Default, Serialize, Deserialize)]
|
||||
pub struct Tags(#[serde(deserialize_with = "serde_aux::container_attributes::deserialize_struct_case_insensitive")] HashMap<String, String>);
|
||||
pub struct Tags(
|
||||
#[serde(
|
||||
deserialize_with = "serde_aux::container_attributes::deserialize_struct_case_insensitive"
|
||||
)]
|
||||
HashMap<String, String>,
|
||||
);
|
||||
|
||||
impl Ffprobe {
|
||||
pub async fn derive(buffer: &[u8]) -> Ffprobe {
|
||||
let mut cmd = Command::new("/usr/bin/ffprobe")
|
||||
.args(["-v", "quiet", "-of", "json", "-show_streams", "-show_format", "-"])
|
||||
.args([
|
||||
"-v",
|
||||
"quiet",
|
||||
"-of",
|
||||
"json",
|
||||
"-show_streams",
|
||||
"-show_format",
|
||||
"-",
|
||||
])
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::inherit())
|
||||
.spawn()
|
||||
.expect("couldn't find ffprobe");
|
||||
|
||||
let mut cmd_stdin = cmd.stdin
|
||||
.take()
|
||||
.expect("ffprobe should take stdin");
|
||||
|
||||
let mut cmd_stdin = cmd.stdin.take().expect("ffprobe should take stdin");
|
||||
|
||||
let mut cursor = Cursor::new(&buffer);
|
||||
let _ = tokio::io::copy(&mut cursor, &mut cmd_stdin).await;
|
||||
|
@ -77,9 +88,13 @@ impl Ffprobe {
|
|||
}
|
||||
info
|
||||
}
|
||||
|
||||
|
||||
pub fn dimensions(&self) -> (Option<u64>, Option<u64>, Option<u64>) {
|
||||
let mut dimensions = (None, None, self.format.duration.clone().and_then(|s| s.parse().ok()));
|
||||
let mut dimensions = (
|
||||
None,
|
||||
None,
|
||||
self.format.duration.clone().and_then(|s| s.parse().ok()),
|
||||
);
|
||||
for stream in &self.streams {
|
||||
let (width, height, duration) = stream.dimensions();
|
||||
dimensions.0 = dimensions.0.or(width);
|
||||
|
@ -88,24 +103,47 @@ impl Ffprobe {
|
|||
}
|
||||
dimensions
|
||||
}
|
||||
|
||||
|
||||
pub async fn thumbnail(&self, buffer: &[u8]) -> Option<Vec<u8>> {
|
||||
println!("generate thumbnail");
|
||||
match self.streams.iter().find(|s| s.disposition.attached_pic == 1) {
|
||||
Some(stream) => ffmpeg::extract(buffer, &["-map", &format!("0:{}", stream.index), "-f", "webp", "-"]).await.ok(),
|
||||
None => ffmpeg::extract(buffer, &["-vframes", "1", "-f", "webp", "-"]).await.ok(),
|
||||
match self
|
||||
.streams
|
||||
.iter()
|
||||
.find(|s| s.disposition.attached_pic == 1)
|
||||
{
|
||||
Some(stream) => ffmpeg::extract(
|
||||
buffer,
|
||||
&["-map", &format!("0:{}", stream.index), "-f", "webp", "-"],
|
||||
)
|
||||
.await
|
||||
.ok(),
|
||||
None => ffmpeg::extract(buffer, &["-vframes", "1", "-f", "webp", "-"])
|
||||
.await
|
||||
.ok(),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
pub async fn subtitles(&self, buffer: &[u8]) -> Option<String> {
|
||||
let stream = self.streams.iter().find(|s| s.codec_type == "subtitle")?;
|
||||
String::from_utf8(ffmpeg::extract(buffer, &["-map", &format!("0:{}", stream.index), "-f", "webvtt", "-"]).await.ok()?).ok()
|
||||
String::from_utf8(
|
||||
ffmpeg::extract(
|
||||
buffer,
|
||||
&["-map", &format!("0:{}", stream.index), "-f", "webvtt", "-"],
|
||||
)
|
||||
.await
|
||||
.ok()?,
|
||||
)
|
||||
.ok()
|
||||
}
|
||||
}
|
||||
|
||||
impl Stream {
|
||||
pub fn dimensions(&self) -> (Option<u64>, Option<u64>, Option<u64>) {
|
||||
(self.width, self.height, self.duration.clone().and_then(|s| s.parse().ok()))
|
||||
(
|
||||
self.width,
|
||||
self.height,
|
||||
self.duration.clone().and_then(|s| s.parse().ok()),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,21 +1,31 @@
|
|||
use axum::response::IntoResponse;
|
||||
use axum::http::StatusCode;
|
||||
use axum::extract::Json;
|
||||
use axum::http::StatusCode;
|
||||
use axum::response::IntoResponse;
|
||||
use serde_json::json;
|
||||
use tracing::error;
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum Error {
|
||||
#[error("{0}")] Reqwest(reqwest::Error),
|
||||
#[error("{0}")] Json(serde_json::Error),
|
||||
#[error("{0}")] Database(sqlx::Error),
|
||||
#[error("not found")] NotFound,
|
||||
#[error("{0}")] Image(image::ImageError),
|
||||
#[error("{1}")] Http(StatusCode, String),
|
||||
#[error("{0}")] Validation(&'static str),
|
||||
#[error("{0}")] ItemRefParse(ufh::item::ItemRefParseError),
|
||||
#[error("{0}")] RecvError(tokio::sync::broadcast::error::RecvError),
|
||||
#[error("{0}")] Tantivy(tantivy::error::TantivyError),
|
||||
#[error("{0}")]
|
||||
Reqwest(reqwest::Error),
|
||||
#[error("{0}")]
|
||||
Json(serde_json::Error),
|
||||
#[error("{0}")]
|
||||
Database(sqlx::Error),
|
||||
#[error("not found")]
|
||||
NotFound,
|
||||
#[error("{0}")]
|
||||
Image(image::ImageError),
|
||||
#[error("{1}")]
|
||||
Http(StatusCode, String),
|
||||
#[error("{0}")]
|
||||
Validation(&'static str),
|
||||
#[error("{0}")]
|
||||
ItemRefParse(ufh::item::ItemRefParseError),
|
||||
#[error("{0}")]
|
||||
RecvError(tokio::sync::broadcast::error::RecvError),
|
||||
#[error("{0}")]
|
||||
Tantivy(tantivy::error::TantivyError),
|
||||
// #[error("{0}")] Unknown(Box<dyn std::error::Error>),
|
||||
}
|
||||
|
||||
|
@ -39,7 +49,7 @@ impl From<sqlx::Error> for Error {
|
|||
fn from(err: sqlx::Error) -> Self {
|
||||
match err {
|
||||
sqlx::Error::RowNotFound => Error::NotFound,
|
||||
err => Error::Database(err)
|
||||
err => Error::Database(err),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -90,13 +100,11 @@ impl Error {
|
|||
} else {
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
}
|
||||
},
|
||||
}
|
||||
Error::Json(_) => StatusCode::BAD_REQUEST,
|
||||
Error::Database(ref err) => {
|
||||
match err {
|
||||
sqlx::Error::RowNotFound => unreachable!(),
|
||||
_ => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
}
|
||||
Error::Database(ref err) => match err {
|
||||
sqlx::Error::RowNotFound => unreachable!(),
|
||||
_ => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
},
|
||||
Error::NotFound => StatusCode::NOT_FOUND,
|
||||
Error::Image(_) => StatusCode::UNSUPPORTED_MEDIA_TYPE,
|
||||
|
|
|
@ -1,9 +1,20 @@
|
|||
use std::collections::{HashSet, HashMap};
|
||||
use bytes::Bytes;
|
||||
use tracing::debug;
|
||||
use ufh::{derived::Derived, event::{EventContent, Event, RelInfo, FileEvent}, item::ItemRef};
|
||||
use crate::{state::{db::{DbItem, sqlite::Sqlite, Database}, search::{Search, Document}}, routes::{things::Error, util::get_blob}, derive::Deriver};
|
||||
use super::Items;
|
||||
use crate::{
|
||||
derive::Deriver,
|
||||
routes::{things::Error, util::get_blob},
|
||||
state::{
|
||||
db::{sqlite::Sqlite, Database, DbItem},
|
||||
search::{Document, Search},
|
||||
},
|
||||
};
|
||||
use bytes::Bytes;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use tracing::debug;
|
||||
use ufh::{
|
||||
derived::Derived,
|
||||
event::{Event, EventContent, FileEvent, RelInfo},
|
||||
item::ItemRef,
|
||||
};
|
||||
|
||||
type Relations = HashMap<ItemRef, (Event, RelInfo)>;
|
||||
|
||||
|
@ -11,12 +22,15 @@ pub async fn get_relations(db: &Sqlite, event: &Event) -> Result<Relations, Erro
|
|||
// get/validate relations
|
||||
let rel_ids: Vec<_> = event.relations.keys().cloned().collect();
|
||||
let rel_events = db.bulk_fetch(&rel_ids, false).await?;
|
||||
rel_events.into_iter()
|
||||
rel_events
|
||||
.into_iter()
|
||||
.map(|(item_ref, item)| {
|
||||
let rel_info = event.relations.get(&item_ref).unwrap();
|
||||
match item {
|
||||
DbItem::Event(event) => Ok((item_ref, (*event, rel_info.clone()))),
|
||||
DbItem::Blob => Err(Error::Validation("some relations are to blobs instead of events")),
|
||||
DbItem::Blob => Err(Error::Validation(
|
||||
"some relations are to blobs instead of events",
|
||||
)),
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
|
@ -29,17 +43,23 @@ pub enum DelayedAction {
|
|||
None,
|
||||
}
|
||||
|
||||
pub async fn prepare_special(me: &Items, event: &Event, relations: &Relations) -> Result<DelayedAction, Error> {
|
||||
pub async fn prepare_special(
|
||||
me: &Items,
|
||||
event: &Event,
|
||||
relations: &Relations,
|
||||
) -> Result<DelayedAction, Error> {
|
||||
match &event.content {
|
||||
EventContent::File(f) => {
|
||||
match me.db.bulk_fetch(&f.chunks, false).await {
|
||||
Ok(items) if items.iter().all(|i| matches!(i, (_, DbItem::Blob))) => Ok(()),
|
||||
Ok(_) => Err(Error::Validation("one or more chunk items isn't a blob")),
|
||||
Err(Error::NotFound) => Err(Error::Validation("one or more chunk items doesn't exist")),
|
||||
Err(Error::NotFound) => {
|
||||
Err(Error::Validation("one or more chunk items doesn't exist"))
|
||||
}
|
||||
Err(err) => Err(err),
|
||||
}?;
|
||||
debug!("validated file");
|
||||
},
|
||||
}
|
||||
EventContent::Redact(_) => {
|
||||
if event.relations.is_empty() {
|
||||
return Err(Error::Validation("missing any relations"))?; // soft error
|
||||
|
@ -49,27 +69,29 @@ pub async fn prepare_special(me: &Items, event: &Event, relations: &Relations) -
|
|||
if rel_info.key.is_some() {
|
||||
return Err(Error::Validation("redaction relation cannot have key"));
|
||||
}
|
||||
|
||||
|
||||
if rel_info.rel_type != "redact" {
|
||||
return Err(Error::Validation("redaction relation must be \"redact\""));
|
||||
}
|
||||
|
||||
if event.sender != rel_event.sender {
|
||||
return Err(Error::Validation("you currently cannot redact someone else's event"));
|
||||
return Err(Error::Validation(
|
||||
"you currently cannot redact someone else's event",
|
||||
));
|
||||
}
|
||||
|
||||
|
||||
debug!("validated redaction");
|
||||
|
||||
refs.push(rel_ref.clone());
|
||||
}
|
||||
|
||||
|
||||
return Ok(DelayedAction::Redact(refs));
|
||||
},
|
||||
}
|
||||
EventContent::LocalTag(content) => {
|
||||
if event.relations.is_empty() {
|
||||
return Err(Error::Validation("missing relations")); // soft fail
|
||||
};
|
||||
|
||||
|
||||
let mut targets = Vec::with_capacity(event.relations.len());
|
||||
for (item_ref, (_, rel_info)) in relations {
|
||||
if rel_info.rel_type != "tag" || rel_info.key.is_some() {
|
||||
|
@ -79,13 +101,16 @@ pub async fn prepare_special(me: &Items, event: &Event, relations: &Relations) -
|
|||
}
|
||||
|
||||
return Ok(DelayedAction::Tag(targets, content.tags.clone()));
|
||||
},
|
||||
}
|
||||
EventContent::Other { event_type, .. } => {
|
||||
if event_type.starts_with("x.") {
|
||||
return Err(Error::Validation("unknown core event"));
|
||||
}
|
||||
},
|
||||
_ => todo!("handler for event type {} not implemened (yet)", event.content.get_type()),
|
||||
}
|
||||
_ => todo!(
|
||||
"handler for event type {} not implemened (yet)",
|
||||
event.content.get_type()
|
||||
),
|
||||
};
|
||||
|
||||
Ok(DelayedAction::None)
|
||||
|
@ -103,7 +128,7 @@ pub async fn commit_special(me: &Items, action: &DelayedAction) -> Result<(), Er
|
|||
me.search.delete(item_ref).await?;
|
||||
drop(lock);
|
||||
}
|
||||
},
|
||||
}
|
||||
DelayedAction::Tag(refs, tags) => {
|
||||
// TODO (performance): batch transactions
|
||||
for item_ref in refs {
|
||||
|
@ -112,14 +137,18 @@ pub async fn commit_special(me: &Items, action: &DelayedAction) -> Result<(), Er
|
|||
me.db.tags_set(&[item_ref.clone()], tags).await?;
|
||||
drop(lock);
|
||||
}
|
||||
},
|
||||
}
|
||||
DelayedAction::None => (),
|
||||
};
|
||||
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn derive(me: &Items, event: &Event, file: &FileEvent) -> Result<(Derived, Option<Bytes>), Error> {
|
||||
pub async fn derive(
|
||||
me: &Items,
|
||||
event: &Event,
|
||||
file: &FileEvent,
|
||||
) -> Result<(Derived, Option<Bytes>), Error> {
|
||||
let bytes = get_blob(me, event, None).await?;
|
||||
let deriver = Deriver::begin(bytes, file.name.as_deref()).await;
|
||||
let derived = Derived {
|
||||
|
@ -130,14 +159,22 @@ pub async fn derive(me: &Items, event: &Event, file: &FileEvent) -> Result<(Deri
|
|||
Ok((derived, deriver.get_thumbnail().await))
|
||||
}
|
||||
|
||||
pub async fn update_search_index(me: &Items, event: &Event, relations: &Relations) -> Result<(), Error> {
|
||||
pub async fn update_search_index(
|
||||
me: &Items,
|
||||
event: &Event,
|
||||
relations: &Relations,
|
||||
) -> Result<(), Error> {
|
||||
match event.content {
|
||||
EventContent::Redact(_) => for rel in relations.keys() {
|
||||
me.search.delete(rel).await?;
|
||||
},
|
||||
EventContent::LocalTag(_) | EventContent::Tag(_) => for (rel, _) in relations.values() {
|
||||
reindex(me, rel).await?;
|
||||
},
|
||||
EventContent::Redact(_) => {
|
||||
for rel in relations.keys() {
|
||||
me.search.delete(rel).await?;
|
||||
}
|
||||
}
|
||||
EventContent::LocalTag(_) | EventContent::Tag(_) => {
|
||||
for (rel, _) in relations.values() {
|
||||
reindex(me, rel).await?;
|
||||
}
|
||||
}
|
||||
_ => reindex(me, event).await?,
|
||||
};
|
||||
Ok(())
|
||||
|
@ -145,7 +182,10 @@ pub async fn update_search_index(me: &Items, event: &Event, relations: &Relation
|
|||
|
||||
pub async fn reindex(me: &Items, event: &Event) -> Result<(), Error> {
|
||||
let bytes = get_blob(me, event, None).await?;
|
||||
let file = match &event.content { EventContent::File(f) => Some(f), _ => None };
|
||||
let file = match &event.content {
|
||||
EventContent::File(f) => Some(f),
|
||||
_ => None,
|
||||
};
|
||||
let derived = Deriver::begin(bytes, None).await.get_text().await;
|
||||
let doc = Document {
|
||||
text: derived.unwrap_or_default(),
|
||||
|
|
|
@ -1,12 +1,25 @@
|
|||
use std::{num::NonZeroUsize, collections::HashMap, sync::Arc};
|
||||
use crate::{
|
||||
blobs,
|
||||
items::events::update_search_index,
|
||||
routes::things::{thumbnail::ThumbnailSize, Error},
|
||||
state::{
|
||||
db::{sqlite::Sqlite, Database},
|
||||
search::tantivy::Tantivy,
|
||||
},
|
||||
Relations,
|
||||
};
|
||||
use bytes::Bytes;
|
||||
use events::DelayedAction;
|
||||
use lru::LruCache;
|
||||
use once_cell::sync::OnceCell;
|
||||
use std::{collections::HashMap, num::NonZeroUsize, sync::Arc};
|
||||
use tokio::sync::Mutex;
|
||||
use tracing::{debug, info};
|
||||
use ufh::{event::{Event, WipEvent, EventContent}, item::ItemRef, derived::Derived};
|
||||
use crate::{state::{db::{sqlite::Sqlite, Database}, search::tantivy::Tantivy}, routes::things::{Error, thumbnail::ThumbnailSize}, blobs, Relations, items::events::update_search_index};
|
||||
use events::DelayedAction;
|
||||
use ufh::{
|
||||
derived::Derived,
|
||||
event::{Event, EventContent, WipEvent},
|
||||
item::ItemRef,
|
||||
};
|
||||
|
||||
pub mod events;
|
||||
|
||||
|
@ -52,7 +65,7 @@ impl Items {
|
|||
// unsure whether to return rowid (and relations) here...
|
||||
pub async fn begin_event_create(&self, wip: WipEvent) -> Result<WipCreate, Error> {
|
||||
debug!("begin new create");
|
||||
|
||||
|
||||
if !wip.has_valid_signature() {
|
||||
return Err(Error::Validation("missing or invalid signature"));
|
||||
}
|
||||
|
@ -74,7 +87,7 @@ impl Items {
|
|||
// get/validate relations
|
||||
let relations = events::get_relations(&self.db, &event).await?;
|
||||
debug!("collected {} relations", relations.len());
|
||||
|
||||
|
||||
// handle special events
|
||||
let action = events::prepare_special(self, &event, &relations).await?;
|
||||
|
||||
|
@ -102,15 +115,17 @@ impl Items {
|
|||
let event = create.event;
|
||||
let mutex = self.mutex_for(&wip.item_ref).await;
|
||||
let lock = mutex.lock().await;
|
||||
|
||||
|
||||
events::commit_special(self, &wip.action).await?;
|
||||
|
||||
|
||||
let derived = if let EventContent::File(file) = &event.content {
|
||||
debug!("begin derive");
|
||||
let (derived, thumb) = events::derive(self, &event, file).await?;
|
||||
self.db.derived_set(&event.id, &derived).await?;
|
||||
if let Some(thumb) = thumb {
|
||||
self.db.thumbnail_create(&event.id, &ThumbnailSize::Raw, &thumb).await?;
|
||||
self.db
|
||||
.thumbnail_create(&event.id, &ThumbnailSize::Raw, &thumb)
|
||||
.await?;
|
||||
}
|
||||
debug!("derived file info");
|
||||
derived
|
||||
|
@ -124,7 +139,7 @@ impl Items {
|
|||
let relations = events::get_relations(&self.db, &event).await?;
|
||||
|
||||
update_search_index(self, &event, &relations).await?;
|
||||
|
||||
|
||||
drop(lock);
|
||||
|
||||
Ok(Create {
|
||||
|
@ -136,7 +151,9 @@ impl Items {
|
|||
|
||||
async fn mutex_for(&self, item_ref: &ItemRef) -> Arc<Mutex<()>> {
|
||||
let mut map = self.partials.lock().await;
|
||||
map.entry(item_ref.clone()).or_insert_with(|| Arc::new(Mutex::new(()))).clone()
|
||||
map.entry(item_ref.clone())
|
||||
.or_insert_with(|| Arc::new(Mutex::new(())))
|
||||
.clone()
|
||||
}
|
||||
|
||||
#[async_recursion::async_recursion]
|
||||
|
@ -156,17 +173,20 @@ impl Items {
|
|||
pub async fn get(&self, item_ref: &ItemRef, via: Option<&str>) -> Result<Item, Error> {
|
||||
static CACHE: OnceCell<Mutex<LruCache<ItemRef, Item>>> = OnceCell::new();
|
||||
// let mut cache = CACHE.get_or_init(|| Mutex::new(LruCache::new(NonZeroUsize::new(100).unwrap()))).lock().await;
|
||||
let mut cache = CACHE.get_or_init(|| Mutex::new(LruCache::new(NonZeroUsize::new(1).unwrap()))).lock().await;
|
||||
let mut cache = CACHE
|
||||
.get_or_init(|| Mutex::new(LruCache::new(NonZeroUsize::new(1).unwrap())))
|
||||
.lock()
|
||||
.await;
|
||||
match cache.peek(item_ref).cloned() {
|
||||
Some(item) => {
|
||||
cache.promote(item_ref);
|
||||
Ok(item)
|
||||
},
|
||||
}
|
||||
None => {
|
||||
let item = self.get_uncached(item_ref, via).await?;
|
||||
cache.put(item_ref.clone(), item.clone());
|
||||
Ok(item)
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -180,7 +200,7 @@ impl Items {
|
|||
debug!("event didn't exist in db, re-adding");
|
||||
let event = self.create_event(wip).await?.event;
|
||||
return Ok(Item::Event(event));
|
||||
},
|
||||
}
|
||||
Ok(blobs::Item::Raw(bytes)) => return Ok(Item::Blob(bytes)),
|
||||
Err(Error::NotFound) => (),
|
||||
Err(err) => return Err(err),
|
||||
|
@ -215,30 +235,30 @@ impl Items {
|
|||
self.blobs.put(item).await?;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
let event = self.create_event(wip).await?.event;
|
||||
|
||||
|
||||
Ok(Item::Event(event))
|
||||
}
|
||||
|
||||
async fn pull_blob(&self, item_ref: &ItemRef, via: &str) -> Result<(), Error> {
|
||||
let blob = match self.blobs.get(item_ref).await{
|
||||
let blob = match self.blobs.get(item_ref).await {
|
||||
Ok(blobs::Item::WipEvent(_)) => panic!("expected blob, got event"),
|
||||
Ok(blobs::Item::Raw(blob)) => blob,
|
||||
Err(Error::NotFound) => {
|
||||
match self.blobs.get_via(item_ref, via).await? {
|
||||
blobs::Item::WipEvent(_) => return Err(Error::Validation("expected blob, got event")),
|
||||
blobs::Item::Raw(blob) => {
|
||||
self.blobs.put(blobs::Item::Raw(blob.clone())).await?;
|
||||
blob
|
||||
},
|
||||
Err(Error::NotFound) => match self.blobs.get_via(item_ref, via).await? {
|
||||
blobs::Item::WipEvent(_) => {
|
||||
return Err(Error::Validation("expected blob, got event"))
|
||||
}
|
||||
blobs::Item::Raw(blob) => {
|
||||
self.blobs.put(blobs::Item::Raw(blob.clone())).await?;
|
||||
blob
|
||||
}
|
||||
},
|
||||
Err(err) => return Err(err),
|
||||
};
|
||||
|
||||
self.db.create_blob(item_ref, blob.len() as u32).await?;
|
||||
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,28 +1,28 @@
|
|||
#![feature(async_fn_in_trait)] // ahh yes, experimental features
|
||||
|
||||
use axum::extract::Json;
|
||||
use axum::{Router, Server, routing};
|
||||
use tower_http::{cors::CorsLayer, trace::TraceLayer};
|
||||
use serde::Serialize;
|
||||
use ufh::item::ItemRef;
|
||||
use ufh::query::Query;
|
||||
use axum::{routing, Router, Server};
|
||||
use clap::Parser;
|
||||
use serde::Serialize;
|
||||
use std::collections::HashMap;
|
||||
use std::net::{SocketAddr, SocketAddrV4};
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::{RwLock, Mutex};
|
||||
use tokio::sync::broadcast;
|
||||
use tokio::sync::{Mutex, RwLock};
|
||||
use tower_http::{cors::CorsLayer, trace::TraceLayer};
|
||||
use tracing::info;
|
||||
use ufh::item::ItemRef;
|
||||
use ufh::query::Query;
|
||||
|
||||
mod error;
|
||||
mod blobs;
|
||||
mod items;
|
||||
mod derive;
|
||||
mod state;
|
||||
mod routes;
|
||||
mod cli;
|
||||
mod derive;
|
||||
mod error;
|
||||
mod items;
|
||||
mod middleware;
|
||||
mod peer;
|
||||
mod routes;
|
||||
mod state;
|
||||
|
||||
pub(crate) use error::Error;
|
||||
use ufh::event::{Event, RelInfo};
|
||||
|
@ -56,9 +56,8 @@ async fn main() -> Result<(), Error> {
|
|||
let log_subscriber = tracing_subscriber::FmtSubscriber::builder()
|
||||
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
|
||||
.finish();
|
||||
tracing::subscriber::set_global_default(log_subscriber)
|
||||
.expect("failed to setup logger");
|
||||
|
||||
tracing::subscriber::set_global_default(log_subscriber).expect("failed to setup logger");
|
||||
|
||||
let command: cli::Command = cli::Command::parse();
|
||||
match command.action {
|
||||
cli::Action::Serve { port } => serve(port).await,
|
||||
|
@ -67,7 +66,7 @@ async fn main() -> Result<(), Error> {
|
|||
|
||||
async fn serve(port: u16) -> Result<(), Error> {
|
||||
info!("Hello, world!");
|
||||
|
||||
|
||||
info!("load db");
|
||||
let db = state::db::sqlite::Sqlite::open("data/data.db").await?;
|
||||
let blob_client = blobs::Client::new("http://localhost:3219");
|
||||
|
@ -82,7 +81,7 @@ async fn serve(port: u16) -> Result<(), Error> {
|
|||
events: tokio::sync::broadcast::channel(1).0, // set low for now, to check for bad code
|
||||
p2p: Mutex::new(p2p),
|
||||
};
|
||||
|
||||
|
||||
let router = Router::new()
|
||||
.route("/", routing::get(instance_info))
|
||||
.nest("/things", routes::things::routes())
|
||||
|
@ -96,7 +95,7 @@ async fn serve(port: u16) -> Result<(), Error> {
|
|||
.layer(axum::middleware::from_fn(middleware::csp))
|
||||
.layer(CorsLayer::permissive())
|
||||
.layer(TraceLayer::new_for_http());
|
||||
|
||||
|
||||
info!("start server");
|
||||
|
||||
let addr = SocketAddr::V4(SocketAddrV4::new("0.0.0.0".parse().unwrap(), port));
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use axum::body::HttpBody;
|
||||
use axum::http::{Request, HeaderValue, StatusCode};
|
||||
use axum::http::{HeaderValue, Request, StatusCode};
|
||||
use axum::middleware::Next;
|
||||
use axum::response::IntoResponse;
|
||||
use axum::TypedHeader;
|
||||
|
@ -8,7 +8,9 @@ use std::ops::Bound;
|
|||
pub async fn csp<B>(request: Request<B>, next: Next<B>) -> axum::response::Response {
|
||||
let mut response = next.run(request).await;
|
||||
let csp = HeaderValue::from_static("default-src 'unsafe-inline' 'self'; script-src 'none'");
|
||||
response.headers_mut().insert("Content-Security-Policy", csp);
|
||||
response
|
||||
.headers_mut()
|
||||
.insert("Content-Security-Policy", csp);
|
||||
response
|
||||
}
|
||||
|
||||
|
@ -27,7 +29,7 @@ pub async fn range<B>(
|
|||
let accept_range = TypedHeader(axum::headers::AcceptRanges::bytes());
|
||||
return (parts, accept_range, body).into_response();
|
||||
};
|
||||
|
||||
|
||||
let mut iter = range.iter();
|
||||
let range = iter.next().expect("range request always has one range?");
|
||||
if iter.next().is_some() {
|
||||
|
@ -43,35 +45,43 @@ pub async fn range<B>(
|
|||
chunks.concat()
|
||||
};
|
||||
|
||||
use bytes::Bytes;
|
||||
use axum::headers::ContentRange;
|
||||
use bytes::Bytes;
|
||||
|
||||
let slice = match range {
|
||||
(Bound::Unbounded, Bound::Unbounded) => Bytes::from(data),
|
||||
(Bound::Included(start), Bound::Unbounded) => {
|
||||
Bytes::copy_from_slice(&data[start as usize..])
|
||||
},
|
||||
}
|
||||
(Bound::Included(start), Bound::Included(end)) => {
|
||||
Bytes::copy_from_slice(&data[start as usize..=end as usize])
|
||||
},
|
||||
}
|
||||
_ => return axum::http::StatusCode::RANGE_NOT_SATISFIABLE.into_response(),
|
||||
};
|
||||
|
||||
let content_length = slice.len() as u64;
|
||||
|
||||
|
||||
let server_range = match range {
|
||||
(Bound::Unbounded, Bound::Unbounded) => ContentRange::bytes(.., Some(content_length)),
|
||||
(Bound::Included(start), Bound::Unbounded) => ContentRange::bytes(start.., Some(content_length)),
|
||||
(Bound::Included(start), Bound::Included(end)) => ContentRange::bytes(start..=end, Some(content_length)),
|
||||
(Bound::Included(start), Bound::Unbounded) => {
|
||||
ContentRange::bytes(start.., Some(content_length))
|
||||
}
|
||||
(Bound::Included(start), Bound::Included(end)) => {
|
||||
ContentRange::bytes(start..=end, Some(content_length))
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}.expect("invalid range");
|
||||
}
|
||||
.expect("invalid range");
|
||||
|
||||
let content_range = TypedHeader(server_range);
|
||||
let accept_range = TypedHeader(axum::headers::AcceptRanges::bytes());
|
||||
|
||||
parts.headers.insert("content-length", HeaderValue::from_str(&content_length.to_string()).unwrap());
|
||||
|
||||
parts.headers.insert(
|
||||
"content-length",
|
||||
HeaderValue::from_str(&content_length.to_string()).unwrap(),
|
||||
);
|
||||
|
||||
parts.status = StatusCode::PARTIAL_CONTENT;
|
||||
|
||||
|
||||
(parts, content_range, accept_range, slice).into_response()
|
||||
}
|
||||
|
|
|
@ -2,9 +2,9 @@
|
|||
|
||||
// #![allow(unused)] // TODO (commit): remove this before comitting
|
||||
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sha2::Digest;
|
||||
use serde::{Serialize, Deserialize};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
|
||||
/// the length of each key
|
||||
const KEY_LEN: usize = 20;
|
||||
|
@ -98,10 +98,13 @@ impl Distance {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
impl Contact {
|
||||
// TODO (future): i really should split apart network logic
|
||||
async fn send(&self, sender: &Self, message: RPCRequest) -> Result<RPCResponse, reqwest::Error> {
|
||||
async fn send(
|
||||
&self,
|
||||
sender: &Self,
|
||||
message: RPCRequest,
|
||||
) -> Result<RPCResponse, reqwest::Error> {
|
||||
#[derive(Debug, Serialize)]
|
||||
struct Request<'a> {
|
||||
info: RPCRequest,
|
||||
|
@ -112,7 +115,7 @@ impl Contact {
|
|||
info: message,
|
||||
contact: sender,
|
||||
};
|
||||
|
||||
|
||||
reqwest::Client::new()
|
||||
.post(format!("http://{}/p2p/recv", self.host))
|
||||
.json(&request)
|
||||
|
@ -192,7 +195,7 @@ impl Node {
|
|||
if let Ok(res) = contact.send(&self.contact, message).await {
|
||||
return Some(res);
|
||||
}
|
||||
|
||||
|
||||
// node is dead
|
||||
self.router.remove(&contact.id);
|
||||
None
|
||||
|
@ -224,7 +227,8 @@ impl Node {
|
|||
pub async fn set(&mut self, key: &NodeId, value: &str) {
|
||||
let contacts = self.router.find_closest(key, 1);
|
||||
for contact in contacts {
|
||||
self.send(&contact, RPCRequest::Store(*key, value.to_owned())).await;
|
||||
self.send(&contact, RPCRequest::Store(*key, value.to_owned()))
|
||||
.await;
|
||||
}
|
||||
self.store.insert(*key, value.to_owned());
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use axum::{Router, routing};
|
||||
use std::sync::Arc;
|
||||
use crate::ServerState;
|
||||
use axum::{routing, Router};
|
||||
use std::sync::Arc;
|
||||
// use super::things::Error;
|
||||
|
||||
// type Response<T> = Result<T, Error>;
|
||||
|
@ -8,8 +8,7 @@ use crate::ServerState;
|
|||
// TODO: list shares on an item
|
||||
|
||||
pub fn routes() -> Router<Arc<ServerState>> {
|
||||
Router::new()
|
||||
.route("/", routing::get(|| async { "todo!" }))
|
||||
// .route("/", routing::post(manage::create))
|
||||
// .route("/:share_id", routing::get(manage::get).delete(manage::delete))
|
||||
Router::new().route("/", routing::get(|| async { "todo!" }))
|
||||
// .route("/", routing::post(manage::create))
|
||||
// .route("/:share_id", routing::get(manage::get).delete(manage::delete))
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
pub mod things;
|
||||
pub mod shares;
|
||||
pub mod aliases;
|
||||
pub mod search;
|
||||
pub mod util;
|
||||
pub mod p2p;
|
||||
pub mod search;
|
||||
pub mod shares;
|
||||
pub mod things;
|
||||
pub mod util;
|
||||
|
|
|
@ -1,9 +1,15 @@
|
|||
use axum::{Router, routing, Json, extract::{State, ConnectInfo, Path}};
|
||||
use serde::{Serialize, Deserialize};
|
||||
use crate::{
|
||||
peer::{Contact, NodeId, RPCRequest, RPCResponse},
|
||||
ServerState,
|
||||
};
|
||||
use axum::{
|
||||
extract::{ConnectInfo, Path, State},
|
||||
routing, Json, Router,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::net::SocketAddr;
|
||||
use tracing::debug;
|
||||
use std::sync::Arc;
|
||||
use crate::{ServerState, peer::{RPCRequest, RPCResponse, Contact, NodeId}};
|
||||
use tracing::debug;
|
||||
|
||||
use super::things::Error;
|
||||
|
||||
|
@ -54,10 +60,7 @@ async fn set(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
async fn get(
|
||||
State(state): State<Arc<ServerState>>,
|
||||
Path(key): Path<String>,
|
||||
) -> Response<String> {
|
||||
async fn get(State(state): State<Arc<ServerState>>, Path(key): Path<String>) -> Response<String> {
|
||||
debug!("handle p2p get");
|
||||
let key = NodeId::new_from_str(&key);
|
||||
let mut p2p = state.p2p.lock().await;
|
||||
|
|
|
@ -1,19 +1,27 @@
|
|||
use axum::{Router, routing, extract::{State, Query}, Json};
|
||||
use super::things::Error;
|
||||
use super::util::{perms, Authenticate};
|
||||
use crate::{
|
||||
state::{
|
||||
db::{Database, DbItem},
|
||||
search::{Search, Snippet},
|
||||
},
|
||||
ServerState,
|
||||
};
|
||||
use axum::{
|
||||
extract::{Query, State},
|
||||
routing, Json, Router,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::sync::Arc;
|
||||
use tracing::debug;
|
||||
use ufh::{event::Event, item::ItemRef};
|
||||
use std::sync::Arc;
|
||||
use crate::{ServerState, state::{search::{Search, Snippet}, db::{Database, DbItem}}};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use super::util::{Authenticate, perms};
|
||||
use super::things::Error;
|
||||
|
||||
type Response<T> = Result<T, Error>;
|
||||
|
||||
// TODO: list shares on an item
|
||||
|
||||
pub fn routes() -> Router<Arc<ServerState>> {
|
||||
Router::new()
|
||||
.route("/", routing::get(search))
|
||||
Router::new().route("/", routing::get(search))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
|
@ -45,17 +53,27 @@ async fn search(
|
|||
let limit = params.limit.unwrap_or(50).min(100);
|
||||
let offset = params.offset.unwrap_or(0);
|
||||
let sniplen = params.sniplen.unwrap_or(256);
|
||||
let results = state.search.search(¶ms.q, limit, offset, sniplen).await?;
|
||||
let results = state
|
||||
.search
|
||||
.search(¶ms.q, limit, offset, sniplen)
|
||||
.await?;
|
||||
let item_refs: Vec<ItemRef> = results.iter().map(|i| i.item_ref.clone()).collect();
|
||||
let mut items = state.db.bulk_fetch(&item_refs, true).await?;
|
||||
let results: Vec<SearchDoc> = results
|
||||
.into_iter()
|
||||
.map(|doc| {
|
||||
let item = items.remove(&doc.item_ref).expect("search results has nonexistent event");
|
||||
let item = items
|
||||
.remove(&doc.item_ref)
|
||||
.expect("search results has nonexistent event");
|
||||
match item {
|
||||
DbItem::Blob => panic!("search result returned a blob"),
|
||||
DbItem::Event(event) => SearchDoc { score: doc.score, snippet: doc.snippet, event },
|
||||
DbItem::Event(event) => SearchDoc {
|
||||
score: doc.score,
|
||||
snippet: doc.snippet,
|
||||
event,
|
||||
},
|
||||
}
|
||||
}).collect();
|
||||
})
|
||||
.collect();
|
||||
Ok(Json(SearchResults { results }))
|
||||
}
|
||||
|
|
|
@ -1,16 +1,16 @@
|
|||
use axum::extract::{State, Path, Json};
|
||||
use axum::extract::{Json, Path, State};
|
||||
use nanoid::nanoid;
|
||||
use reqwest::StatusCode;
|
||||
use serde_json::{Value, json};
|
||||
use ufh::item::ItemRef;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{json, Value};
|
||||
use sqlx::query as sql;
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::query as sql;
|
||||
use nanoid::nanoid;
|
||||
use ufh::item::ItemRef;
|
||||
|
||||
use crate::ServerState;
|
||||
use crate::routes::things::Error;
|
||||
use crate::routes::util::{Authenticate, perms};
|
||||
use crate::routes::util::{perms, Authenticate};
|
||||
use crate::ServerState;
|
||||
|
||||
type Response<T> = Result<T, Error>;
|
||||
|
||||
|
@ -40,9 +40,14 @@ pub async fn create(
|
|||
let share_id = create.share_id.unwrap_or_else(|| nanoid!());
|
||||
let item_ref_str = create.item_ref.to_string();
|
||||
let expires_at = create.expires_at.map(|i| i as i64); // very not good, sqlx (or sqlite?) doesn't support u64 though
|
||||
sql!("INSERT INTO shares (id, ref, expires_at) VALUES (?, ?, ?)", share_id, item_ref_str, expires_at)
|
||||
.execute(state.db.pool())
|
||||
.await?;
|
||||
sql!(
|
||||
"INSERT INTO shares (id, ref, expires_at) VALUES (?, ?, ?)",
|
||||
share_id,
|
||||
item_ref_str,
|
||||
expires_at
|
||||
)
|
||||
.execute(state.db.pool())
|
||||
.await?;
|
||||
// TODO: use StatusCode::CONFLICT
|
||||
Ok((StatusCode::CREATED, Json(json!({ "share": share_id }))))
|
||||
}
|
||||
|
@ -52,9 +57,12 @@ pub async fn get(
|
|||
_auth: Authenticate<perms::ReadOnly>,
|
||||
Path(share_id): Path<String>,
|
||||
) -> Response<Json<Share>> {
|
||||
let record = sql!("SELECT *, ref as item_ref FROM shares WHERE id = ?", share_id)
|
||||
.fetch_one(state.db.pool())
|
||||
.await?;
|
||||
let record = sql!(
|
||||
"SELECT *, ref as item_ref FROM shares WHERE id = ?",
|
||||
share_id
|
||||
)
|
||||
.fetch_one(state.db.pool())
|
||||
.await?;
|
||||
let share = Share {
|
||||
id: share_id,
|
||||
item_ref: ItemRef::from_str(&record.item_ref)?,
|
||||
|
@ -73,4 +81,3 @@ pub async fn delete(
|
|||
.await?;
|
||||
Ok(StatusCode::NO_CONTENT)
|
||||
}
|
||||
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
use axum::{Router, routing};
|
||||
use axum::extract::{State, Path};
|
||||
use super::things::Error;
|
||||
use crate::ServerState;
|
||||
use axum::extract::{Path, State};
|
||||
use axum::{routing, Router};
|
||||
use reqwest::StatusCode;
|
||||
use ufh::item::ItemRef;
|
||||
use sqlx::query as sql;
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
use sqlx::query as sql;
|
||||
use crate::ServerState;
|
||||
use super::things::Error;
|
||||
use ufh::item::ItemRef;
|
||||
|
||||
pub mod view;
|
||||
pub mod manage;
|
||||
pub mod view;
|
||||
|
||||
type Response<T> = Result<T, Error>;
|
||||
|
||||
|
@ -18,5 +18,8 @@ type Response<T> = Result<T, Error>;
|
|||
pub fn routes() -> Router<Arc<ServerState>> {
|
||||
Router::new()
|
||||
.route("/", routing::post(manage::create))
|
||||
.route("/:share_id", routing::get(manage::get).delete(manage::delete))
|
||||
.route(
|
||||
"/:share_id",
|
||||
routing::get(manage::get).delete(manage::delete),
|
||||
)
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use super::*;
|
||||
use axum::http::{HeaderValue, HeaderMap};
|
||||
use ufh::event::EventContent;
|
||||
use crate::{routes::util::get_blob, state::db::Database};
|
||||
use axum::http::{HeaderMap, HeaderValue};
|
||||
use ufh::event::EventContent;
|
||||
|
||||
pub fn routes() -> Router<Arc<ServerState>> {
|
||||
Router::new()
|
||||
|
@ -10,7 +10,9 @@ pub fn routes() -> Router<Arc<ServerState>> {
|
|||
}
|
||||
|
||||
fn get_time() -> Result<u64, std::time::SystemTimeError> {
|
||||
Ok(std::time::SystemTime::now().duration_since(std::time::UNIX_EPOCH)?.as_millis() as u64)
|
||||
Ok(std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)?
|
||||
.as_millis() as u64)
|
||||
}
|
||||
|
||||
async fn share_notice() -> &'static str {
|
||||
|
@ -22,18 +24,33 @@ async fn share_view(
|
|||
State(state): State<Arc<ServerState>>,
|
||||
Path(share_id): Path<String>,
|
||||
) -> Response<(HeaderMap, Vec<u8>)> {
|
||||
let record = sql!("SELECT *, ref as item_ref FROM shares WHERE id = ?", share_id)
|
||||
.fetch_one(state.db.pool())
|
||||
.await?;
|
||||
let record = sql!(
|
||||
"SELECT *, ref as item_ref FROM shares WHERE id = ?",
|
||||
share_id
|
||||
)
|
||||
.fetch_one(state.db.pool())
|
||||
.await?;
|
||||
let time = get_time().map_err(|_| Error::Validation("system time error"))?;
|
||||
if record.expires_at.is_some_and(|i| time > i as u64) {
|
||||
return Err(Error::Http(StatusCode::GONE, "your link has expired".into()));
|
||||
return Err(Error::Http(
|
||||
StatusCode::GONE,
|
||||
"your link has expired".into(),
|
||||
));
|
||||
}
|
||||
let item_ref = ItemRef::from_str(&record.item_ref)?;
|
||||
let event = state.db.get_event(&item_ref).await?.ok_or(Error::NotFound)?;
|
||||
let event = state
|
||||
.db
|
||||
.get_event(&item_ref)
|
||||
.await?
|
||||
.ok_or(Error::NotFound)?;
|
||||
let blob = get_blob(&state.items, &event, None).await?;
|
||||
let EventContent::File(content) = event.content else { unreachable!("already validated by get_blob") };
|
||||
let mime = event.derived.file.map_or_else(|| String::from("application/octet-stream"), |f| f.mime);
|
||||
let EventContent::File(content) = event.content else {
|
||||
unreachable!("already validated by get_blob")
|
||||
};
|
||||
let mime = event
|
||||
.derived
|
||||
.file
|
||||
.map_or_else(|| String::from("application/octet-stream"), |f| f.mime);
|
||||
// TODO: research and decide whether to use inline or attachment
|
||||
let disposition = match content.name {
|
||||
None => HeaderValue::from_static("inline"),
|
||||
|
@ -41,10 +58,13 @@ async fn share_view(
|
|||
// TODO: properly handle names (especially urlencoded names) (may be security vuln currently?)
|
||||
HeaderValue::from_str(&format!("inline; filename*=UTF-8''{}", name))
|
||||
.unwrap_or_else(|_| HeaderValue::from_static("inline"))
|
||||
},
|
||||
}
|
||||
};
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert("content-type", mime.try_into().expect("probably is a header value"));
|
||||
headers.insert(
|
||||
"content-type",
|
||||
mime.try_into().expect("probably is a header value"),
|
||||
);
|
||||
headers.insert("content-disposition", disposition);
|
||||
Ok((headers, blob.to_vec()))
|
||||
}
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
use axum::extract::{State, Path, Query};
|
||||
use axum::http::{HeaderMap, HeaderValue, StatusCode};
|
||||
use ufh::event::EventContent;
|
||||
use crate::error::Error;
|
||||
use crate::ServerState;
|
||||
use crate::routes::util::get_blob;
|
||||
use ufh::item::ItemRef;
|
||||
use std::sync::Arc;
|
||||
use crate::ServerState;
|
||||
use axum::extract::{Path, Query, State};
|
||||
use axum::http::{HeaderMap, HeaderValue, StatusCode};
|
||||
use serde::Deserialize;
|
||||
use std::sync::Arc;
|
||||
use tracing::debug;
|
||||
use ufh::event::EventContent;
|
||||
use ufh::item::ItemRef;
|
||||
|
||||
use super::Response;
|
||||
use crate::items::Item;
|
||||
|
@ -33,10 +33,13 @@ pub async fn route(
|
|||
},
|
||||
Item::Blob(_) => return Err(Error::Validation("this is not an event at all")),
|
||||
};
|
||||
|
||||
|
||||
debug!("getting blob chunks");
|
||||
let blob = get_blob(&state.items, &event, params.via.as_deref()).await?;
|
||||
let mime = event.derived.file.map_or_else(|| String::from("application/octet-stream"), |f| f.mime);
|
||||
let mime = event
|
||||
.derived
|
||||
.file
|
||||
.map_or_else(|| String::from("application/octet-stream"), |f| f.mime);
|
||||
// TODO: research and decide whether to use inline or attachment
|
||||
let disposition = match &file.name {
|
||||
None => HeaderValue::from_static("inline"),
|
||||
|
@ -44,14 +47,17 @@ pub async fn route(
|
|||
// TODO: properly handle names (especially urlencoded names) (may be security vuln currently?)
|
||||
HeaderValue::from_str(&format!("inline; filename*=UTF-8''{}", name))
|
||||
.unwrap_or_else(|_| HeaderValue::from_static("inline"))
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
debug!("content-disposition: {}", disposition.to_str().unwrap());
|
||||
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert("content-type", mime.try_into().expect("probably is a header value"));
|
||||
headers.insert(
|
||||
"content-type",
|
||||
mime.try_into().expect("probably is a header value"),
|
||||
);
|
||||
headers.insert("content-disposition", disposition);
|
||||
|
||||
|
||||
Ok((StatusCode::OK, headers, blob.to_vec()))
|
||||
}
|
||||
|
|
|
@ -1,10 +1,14 @@
|
|||
use std::sync::Arc;
|
||||
use axum::{extract::State, Json};
|
||||
use ufh::item::ItemRef;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tracing::debug;
|
||||
use crate::{ServerState, routes::util::{Authenticate, perms}, state::db::Database};
|
||||
use super::Response;
|
||||
use crate::{
|
||||
routes::util::{perms, Authenticate},
|
||||
state::db::Database,
|
||||
ServerState,
|
||||
};
|
||||
use axum::{extract::State, Json};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::sync::Arc;
|
||||
use tracing::debug;
|
||||
use ufh::item::ItemRef;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct BulkCheckOptions {
|
||||
|
@ -23,6 +27,11 @@ pub async fn route(
|
|||
Json(options): Json<BulkCheckOptions>,
|
||||
) -> Response<Json<BulkCheckResponse>> {
|
||||
debug!("bulk check");
|
||||
let have = state.db.bulk_fetch(&options.refs, true).await?.into_keys().collect();
|
||||
let have = state
|
||||
.db
|
||||
.bulk_fetch(&options.refs, true)
|
||||
.await?
|
||||
.into_keys()
|
||||
.collect();
|
||||
Ok(Json(BulkCheckResponse { have }))
|
||||
}
|
||||
|
|
|
@ -81,14 +81,18 @@ pub async fn route(
|
|||
let Ok(create) = state.items.finish_event_create(wip).await else {
|
||||
return error!("failed to finish creating event");
|
||||
};
|
||||
let _ = state.events.send((create.event, create.relations, create.rowid));
|
||||
let _ = state
|
||||
.events
|
||||
.send((create.event, create.relations, create.rowid));
|
||||
});
|
||||
return Ok((StatusCode::ACCEPTED, Json(json!({ "ref": item_ref }))));
|
||||
}
|
||||
|
||||
let create = state.items.finish_event_create(wip).await?;
|
||||
let item_ref = create.event.id.clone();
|
||||
let _ = state.events.send((create.event, create.relations, create.rowid));
|
||||
let _ = state
|
||||
.events
|
||||
.send((create.event, create.relations, create.rowid));
|
||||
debug!("notified pollers of event");
|
||||
|
||||
item_ref
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
use axum::extract::{State, Query};
|
||||
use reqwest::StatusCode;
|
||||
use ufh::{event::Event, query::MatchType};
|
||||
use crate::ServerState;
|
||||
use crate::state::db::Database;
|
||||
use serde::{Serialize, Deserialize};
|
||||
use ufh::item::ItemRef;
|
||||
use crate::ServerState;
|
||||
use axum::extract::{Query, State};
|
||||
use axum::response::IntoResponse;
|
||||
use reqwest::StatusCode;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use axum::response::IntoResponse;
|
||||
use tracing::debug;
|
||||
use ufh::item::ItemRef;
|
||||
use ufh::{event::Event, query::MatchType};
|
||||
|
||||
use crate::routes::util::{perms, Authenticate};
|
||||
|
||||
|
@ -48,7 +48,7 @@ pub async fn route(
|
|||
) -> Result<QueryResult, Error> {
|
||||
tracing::Span::current().record("query", ¶ms.query);
|
||||
debug!("enumerate");
|
||||
|
||||
|
||||
let queries = state.queries.read().await;
|
||||
let query = queries
|
||||
.get(¶ms.query)
|
||||
|
@ -59,11 +59,14 @@ pub async fn route(
|
|||
debug!("query with {:?}", query);
|
||||
|
||||
// a bit of a mess, should figure out how to do this better
|
||||
let result = state.db.query_events(&query, params.limit, params.after.map(|p| p.to_string())).await?;
|
||||
let result = state
|
||||
.db
|
||||
.query_events(&query, params.limit, params.after.map(|p| p.to_string()))
|
||||
.await?;
|
||||
let result = match (params.timeout, result.events.is_empty()) {
|
||||
(Some(timeout), true) => {
|
||||
debug!("no events, waiting for new ones...");
|
||||
|
||||
|
||||
let next_event = async {
|
||||
let mut recv = state.events.subscribe();
|
||||
loop {
|
||||
|
@ -74,7 +77,7 @@ pub async fn route(
|
|||
mt @ MatchType::Relation => Ok((mt, event, rowid)),
|
||||
MatchType::None => continue,
|
||||
},
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -88,26 +91,24 @@ pub async fn route(
|
|||
relations: None,
|
||||
next: Some(rowid.to_string()),
|
||||
});
|
||||
},
|
||||
}
|
||||
(MatchType::Relation, event, rowid) => {
|
||||
debug!("poller received relation {:?}", event);
|
||||
return Ok(QueryResult {
|
||||
events: Vec::new(),
|
||||
relations: Some(HashMap::from([
|
||||
(event.id.clone(), event),
|
||||
])),
|
||||
relations: Some(HashMap::from([(event.id.clone(), event)])),
|
||||
next: Some(rowid.to_string()),
|
||||
});
|
||||
},
|
||||
}
|
||||
(MatchType::None, _, _) => unreachable!("handled by next_event(...)"),
|
||||
},
|
||||
Ok(Err(err)) => return Err(err),
|
||||
Err(_) => {
|
||||
debug!("long poll didn't receive any events");
|
||||
result
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
_ => result,
|
||||
};
|
||||
|
||||
|
@ -120,7 +121,7 @@ pub async fn route(
|
|||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
|
||||
Ok(QueryResult {
|
||||
events: result.events,
|
||||
relations,
|
||||
|
|
|
@ -1,13 +1,21 @@
|
|||
use std::sync::Arc;
|
||||
use axum::{headers::ContentType, TypedHeader, extract::{Path, State, Query}};
|
||||
use axum::{
|
||||
extract::{Path, Query, State},
|
||||
headers::ContentType,
|
||||
TypedHeader,
|
||||
};
|
||||
use bytes::Bytes;
|
||||
use reqwest::StatusCode;
|
||||
use ufh::item::ItemRef;
|
||||
use serde::Deserialize;
|
||||
use std::sync::Arc;
|
||||
use tracing::debug;
|
||||
use ufh::item::ItemRef;
|
||||
|
||||
use crate::{ServerState, routes::util::{Authenticate, perms}, items::Item};
|
||||
use super::{Response, Error};
|
||||
use super::{Error, Response};
|
||||
use crate::{
|
||||
items::Item,
|
||||
routes::util::{perms, Authenticate},
|
||||
ServerState,
|
||||
};
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct FetchParams {
|
||||
|
@ -23,21 +31,36 @@ pub async fn route(
|
|||
) -> Response<(StatusCode, TypedHeader<ContentType>, Bytes)> {
|
||||
tracing::Span::current().record("item_ref", &item_ref.to_string());
|
||||
debug!("fetch");
|
||||
|
||||
|
||||
let item = state.items.get(&item_ref, params.via.as_deref()).await?;
|
||||
|
||||
|
||||
match item {
|
||||
Item::Blob(blob) => {
|
||||
debug!("got blob");
|
||||
Ok((StatusCode::OK, TypedHeader(ContentType::octet_stream()), blob))
|
||||
},
|
||||
Ok((
|
||||
StatusCode::OK,
|
||||
TypedHeader(ContentType::octet_stream()),
|
||||
blob,
|
||||
))
|
||||
}
|
||||
Item::Event(event) if auth.level > 0 || event.content.get_type() == "x.file" => {
|
||||
debug!("got event");
|
||||
let event_str = serde_json::to_string(&event)?;
|
||||
let status = if event.is_redacted() { StatusCode::GONE } else { StatusCode::OK };
|
||||
Ok((status, TypedHeader(ContentType::json()), Bytes::from(event_str.into_bytes())))
|
||||
},
|
||||
let status = if event.is_redacted() {
|
||||
StatusCode::GONE
|
||||
} else {
|
||||
StatusCode::OK
|
||||
};
|
||||
Ok((
|
||||
status,
|
||||
TypedHeader(ContentType::json()),
|
||||
Bytes::from(event_str.into_bytes()),
|
||||
))
|
||||
}
|
||||
// TODO (security): should this be "not found" instead of forbidden?
|
||||
_ => Err(Error::Http(StatusCode::FORBIDDEN, "you can't view this event".into()))
|
||||
_ => Err(Error::Http(
|
||||
StatusCode::FORBIDDEN,
|
||||
"you can't view this event".into(),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,24 +1,29 @@
|
|||
use axum::{Router, routing};
|
||||
use axum::{routing, Router};
|
||||
use std::sync::Arc;
|
||||
|
||||
pub(crate) use crate::error::Error;
|
||||
|
||||
use super::util::{perms, Authenticate};
|
||||
use crate::ServerState;
|
||||
use super::util::{Authenticate, perms};
|
||||
|
||||
mod blob;
|
||||
mod check;
|
||||
pub mod create;
|
||||
mod enumerate;
|
||||
pub mod fetch;
|
||||
mod query;
|
||||
mod blob;
|
||||
mod enumerate;
|
||||
mod check;
|
||||
pub mod thumbnail;
|
||||
|
||||
type Response<T> = Result<T, Error>;
|
||||
|
||||
pub fn routes() -> Router<Arc<ServerState>> {
|
||||
Router::new()
|
||||
.route("/", routing::post(create::route).get(enumerate::route).head(enumerate::head))
|
||||
.route(
|
||||
"/",
|
||||
routing::post(create::route)
|
||||
.get(enumerate::route)
|
||||
.head(enumerate::head),
|
||||
)
|
||||
.route("/check", routing::post(check::route))
|
||||
.route("/query", routing::post(query::route))
|
||||
.route("/:item_ref", routing::get(fetch::route))
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
use axum::extract::{State, Json};
|
||||
use reqwest::StatusCode;
|
||||
use crate::ServerState;
|
||||
use ufh::query;
|
||||
use std::sync::Arc;
|
||||
use serde_json::{Value, json};
|
||||
use axum::extract::{Json, State};
|
||||
use nanoid::nanoid;
|
||||
use reqwest::StatusCode;
|
||||
use serde_json::{json, Value};
|
||||
use std::sync::Arc;
|
||||
use ufh::query;
|
||||
|
||||
use crate::routes::util::{perms, Authenticate};
|
||||
|
||||
|
|
|
@ -1,18 +1,18 @@
|
|||
use std::fmt::Display;
|
||||
use axum::extract::{State, Path, Query};
|
||||
use axum::headers::{ContentType, CacheControl};
|
||||
use crate::routes::util::get_blob;
|
||||
use crate::state::db::{Database, Thumbnail};
|
||||
use crate::ServerState;
|
||||
use axum::extract::{Path, Query, State};
|
||||
use axum::headers::{CacheControl, ContentType};
|
||||
use axum::TypedHeader;
|
||||
use bytes::Bytes;
|
||||
use crate::ServerState;
|
||||
use crate::state::db::{Database, Thumbnail};
|
||||
use crate::routes::util::get_blob;
|
||||
use serde::{Serialize, Deserialize};
|
||||
use ufh::item::ItemRef;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt::Display;
|
||||
use std::io::Cursor;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use ufh::item::ItemRef;
|
||||
|
||||
use super::{Response, Error};
|
||||
use super::{Error, Response};
|
||||
|
||||
pub async fn route(
|
||||
State(state): State<Arc<ServerState>>,
|
||||
|
@ -21,21 +21,25 @@ pub async fn route(
|
|||
) -> Response<(TypedHeader<ContentType>, TypedHeader<CacheControl>, Bytes)> {
|
||||
let size = ThumbnailSize::closest_to(params.width, params.height);
|
||||
let thumb = state.db.thumbnail_get(&item_ref, &size).await?;
|
||||
let cache_control = TypedHeader(CacheControl::new()
|
||||
.with_public()
|
||||
.with_max_age(Duration::from_secs(60 * 60 * 24)));
|
||||
let cache_control = TypedHeader(
|
||||
CacheControl::new()
|
||||
.with_public()
|
||||
.with_max_age(Duration::from_secs(60 * 60 * 24)),
|
||||
);
|
||||
let content_type = TypedHeader(ContentType::png());
|
||||
match thumb {
|
||||
Thumbnail::Raw(bytes) => {
|
||||
let thumb = generate_thumb(&bytes, &size)?;
|
||||
state.db.thumbnail_create(&item_ref, &size, &thumb).await?;
|
||||
Ok((content_type, cache_control, thumb))
|
||||
},
|
||||
Thumbnail::Some(thumb) => {
|
||||
Ok((content_type, cache_control, thumb))
|
||||
}
|
||||
Thumbnail::Some(thumb) => Ok((content_type, cache_control, thumb)),
|
||||
Thumbnail::None => {
|
||||
let event = state.db.get_event(&item_ref).await?.ok_or(Error::NotFound)?;
|
||||
let event = state
|
||||
.db
|
||||
.get_event(&item_ref)
|
||||
.await?
|
||||
.ok_or(Error::NotFound)?;
|
||||
let blob = get_blob(&state.items, &event, params.via.as_deref()).await?;
|
||||
let thumb = generate_thumb(&blob, &size)?;
|
||||
state.db.thumbnail_create(&item_ref, &size, &thumb).await?;
|
||||
|
|
|
@ -1,10 +1,13 @@
|
|||
use axum::{headers::{Authorization, authorization::Bearer}, TypedHeader};
|
||||
use axum::{
|
||||
headers::{authorization::Bearer, Authorization},
|
||||
TypedHeader,
|
||||
};
|
||||
use bytes::Bytes;
|
||||
use reqwest::StatusCode;
|
||||
use ufh::event::{Event, EventContent};
|
||||
|
||||
use crate::{ServerState, items::Items};
|
||||
use std::{sync::Arc, marker::PhantomData};
|
||||
use crate::{items::Items, ServerState};
|
||||
use std::{marker::PhantomData, sync::Arc};
|
||||
|
||||
use self::perms::AuthLevel;
|
||||
|
||||
|
@ -25,21 +28,58 @@ bitflags::bitflags! {
|
|||
pub mod perms {
|
||||
// surely theres a better method
|
||||
pub trait AuthLevel {
|
||||
fn new() -> Self where Self: Sized;
|
||||
fn new() -> Self
|
||||
where
|
||||
Self: Sized;
|
||||
fn to_num(&self) -> u8;
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct None; // session exists but can't do anything
|
||||
pub struct ReadOnly; // can only read/enumerate things
|
||||
pub struct ReadWrite; // can write things (default level)
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct None; // session exists but can't do anything
|
||||
pub struct ReadOnly; // can only read/enumerate things
|
||||
pub struct ReadWrite; // can write things (default level)
|
||||
pub struct Everything; // can manage account (need better name, since admin exists)
|
||||
pub struct Admin; // can do server administrative stuff
|
||||
impl AuthLevel for None { fn new() -> Self { Self } fn to_num(&self) -> u8 { 0 } }
|
||||
impl AuthLevel for ReadOnly { fn new() -> Self { Self } fn to_num(&self) -> u8 { 1 } }
|
||||
impl AuthLevel for ReadWrite { fn new() -> Self { Self } fn to_num(&self) -> u8 { 2 } }
|
||||
impl AuthLevel for Everything { fn new() -> Self { Self } fn to_num(&self) -> u8 { 3 } }
|
||||
impl AuthLevel for Admin { fn new() -> Self { Self } fn to_num(&self) -> u8 { 4 } }
|
||||
pub struct Admin; // can do server administrative stuff
|
||||
impl AuthLevel for None {
|
||||
fn new() -> Self {
|
||||
Self
|
||||
}
|
||||
fn to_num(&self) -> u8 {
|
||||
0
|
||||
}
|
||||
}
|
||||
impl AuthLevel for ReadOnly {
|
||||
fn new() -> Self {
|
||||
Self
|
||||
}
|
||||
fn to_num(&self) -> u8 {
|
||||
1
|
||||
}
|
||||
}
|
||||
impl AuthLevel for ReadWrite {
|
||||
fn new() -> Self {
|
||||
Self
|
||||
}
|
||||
fn to_num(&self) -> u8 {
|
||||
2
|
||||
}
|
||||
}
|
||||
impl AuthLevel for Everything {
|
||||
fn new() -> Self {
|
||||
Self
|
||||
}
|
||||
fn to_num(&self) -> u8 {
|
||||
3
|
||||
}
|
||||
}
|
||||
impl AuthLevel for Admin {
|
||||
fn new() -> Self {
|
||||
Self
|
||||
}
|
||||
fn to_num(&self) -> u8 {
|
||||
4
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
@ -53,8 +93,11 @@ pub struct Authenticate<T: perms::AuthLevel> {
|
|||
impl<T: AuthLevel> axum::extract::FromRequestParts<Arc<ServerState>> for Authenticate<T> {
|
||||
type Rejection = (StatusCode, String);
|
||||
|
||||
async fn from_request_parts(parts: &mut axum::http::request::Parts, state: &Arc<ServerState>) -> Result<Self, Self::Rejection> {
|
||||
let header = <TypedHeader<Authorization<Bearer>>>::from_request_parts(parts, state).await ;
|
||||
async fn from_request_parts(
|
||||
parts: &mut axum::http::request::Parts,
|
||||
state: &Arc<ServerState>,
|
||||
) -> Result<Self, Self::Rejection> {
|
||||
let header = <TypedHeader<Authorization<Bearer>>>::from_request_parts(parts, state).await;
|
||||
let token = match &header {
|
||||
Ok(header) => header.token(),
|
||||
Err(_) if T::new().to_num() == 0 => "",
|
||||
|
@ -65,14 +108,17 @@ impl<T: AuthLevel> axum::extract::FromRequestParts<Arc<ServerState>> for Authent
|
|||
}
|
||||
|
||||
impl<T: AuthLevel> Authenticate<T> {
|
||||
pub async fn from_token(db: &sqlx::SqlitePool, token: &str) -> Result<Authenticate<T>, (StatusCode, String)> {
|
||||
pub async fn from_token(
|
||||
db: &sqlx::SqlitePool,
|
||||
token: &str,
|
||||
) -> Result<Authenticate<T>, (StatusCode, String)> {
|
||||
// TODO: cache, probably in state.auth: HashMap<String, AuthLevel>
|
||||
// (almost) every request has to go through this so it's a *very*
|
||||
// hot path. sqlite is good for "lots of small requests", but even
|
||||
// this is pushing it.
|
||||
|
||||
|
||||
// TODO (future): use bitflags for more fine grained permissions
|
||||
|
||||
|
||||
let query = sqlx::query!("SELECT * FROM sessions WHERE id = ?", token)
|
||||
.fetch_one(db)
|
||||
.await;
|
||||
|
@ -84,16 +130,22 @@ impl<T: AuthLevel> Authenticate<T> {
|
|||
level: 0,
|
||||
_lvl: PhantomData,
|
||||
});
|
||||
},
|
||||
_ => {},
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
let Ok(query) = query else {
|
||||
return Err((StatusCode::UNAUTHORIZED, "Invalid, expired, or removed authorization token".into()));
|
||||
return Err((
|
||||
StatusCode::UNAUTHORIZED,
|
||||
"Invalid, expired, or removed authorization token".into(),
|
||||
));
|
||||
};
|
||||
|
||||
if (query.level as u8) < T::new().to_num() {
|
||||
return Err((StatusCode::UNAUTHORIZED, "Authorization token not a high enough level".into()));
|
||||
return Err((
|
||||
StatusCode::UNAUTHORIZED,
|
||||
"Authorization token not a high enough level".into(),
|
||||
));
|
||||
}
|
||||
|
||||
Ok(Authenticate {
|
||||
|
@ -105,7 +157,11 @@ impl<T: AuthLevel> Authenticate<T> {
|
|||
}
|
||||
|
||||
// TODO: allow fetching ranges of files more easily
|
||||
pub async fn get_blob(items: &Items, file_event: &Event, via: Option<&str>) -> Result<Bytes, Error> {
|
||||
pub async fn get_blob(
|
||||
items: &Items,
|
||||
file_event: &Event,
|
||||
via: Option<&str>,
|
||||
) -> Result<Bytes, Error> {
|
||||
let EventContent::File(file) = &file_event.content else {
|
||||
return Err(Error::Validation("not a file event"));
|
||||
};
|
||||
|
|
|
@ -2,9 +2,9 @@ use std::collections::HashMap;
|
|||
|
||||
use bytes::Bytes;
|
||||
use ufh::derived::Derived;
|
||||
use ufh::event::Event;
|
||||
use ufh::item::ItemRef;
|
||||
use ufh::query::Query;
|
||||
use ufh::event::Event;
|
||||
|
||||
use crate::routes::things::thumbnail::ThumbnailSize;
|
||||
|
||||
|
@ -39,7 +39,7 @@ pub enum DbItem {
|
|||
pub enum Thumbnail {
|
||||
Raw(Bytes),
|
||||
Some(Bytes),
|
||||
None
|
||||
None,
|
||||
}
|
||||
|
||||
pub trait Database {
|
||||
|
@ -51,21 +51,47 @@ pub trait Database {
|
|||
async fn create_blob(&self, item_ref: &ItemRef, size: u32) -> Result<(), Self::Error>;
|
||||
async fn redact_event(&self, item_ref: &ItemRef) -> Result<(), Self::Error>;
|
||||
async fn get_event(&self, item_ref: &ItemRef) -> Result<Option<Event>, Self::Error>;
|
||||
async fn query_events(&self, query: &Query, limit: Option<u32>, after: Option<String>) -> Result<QueryResult, Self::Error>;
|
||||
async fn query_relations(&self, relations: &[(String, String)], for_events: &[Event]) -> Result<HashMap<ItemRef, Event>, Self::Error>;
|
||||
async fn query_events(
|
||||
&self,
|
||||
query: &Query,
|
||||
limit: Option<u32>,
|
||||
after: Option<String>,
|
||||
) -> Result<QueryResult, Self::Error>;
|
||||
async fn query_relations(
|
||||
&self,
|
||||
relations: &[(String, String)],
|
||||
for_events: &[Event],
|
||||
) -> Result<HashMap<ItemRef, Event>, Self::Error>;
|
||||
|
||||
// routes::things::create has a lot of file-specific things
|
||||
async fn bulk_fetch(&self, item_refs: &[ItemRef], partial: bool) -> Result<HashMap<ItemRef, DbItem>, Self::Error>;
|
||||
async fn bulk_fetch(
|
||||
&self,
|
||||
item_refs: &[ItemRef],
|
||||
partial: bool,
|
||||
) -> Result<HashMap<ItemRef, DbItem>, Self::Error>;
|
||||
async fn tags_set(&self, item_refs: &[ItemRef], tags: &[String]) -> Result<(), Self::Error>;
|
||||
async fn derived_set(&self, item_ref: &ItemRef, derived: &Derived) -> Result<(), Self::Error>;
|
||||
|
||||
|
||||
// thumbnails
|
||||
async fn thumbnail_create(&self, item_ref: &ItemRef, size: &ThumbnailSize, bytes: &[u8]) -> Result<(), Self::Error>;
|
||||
async fn thumbnail_get(&self, item_ref: &ItemRef, size: &ThumbnailSize) -> Result<Thumbnail, Self::Error>;
|
||||
async fn thumbnail_delete(&self, item_ref: &ItemRef, size: &ThumbnailSize) -> Result<(), Self::Error>;
|
||||
async fn thumbnail_create(
|
||||
&self,
|
||||
item_ref: &ItemRef,
|
||||
size: &ThumbnailSize,
|
||||
bytes: &[u8],
|
||||
) -> Result<(), Self::Error>;
|
||||
async fn thumbnail_get(
|
||||
&self,
|
||||
item_ref: &ItemRef,
|
||||
size: &ThumbnailSize,
|
||||
) -> Result<Thumbnail, Self::Error>;
|
||||
async fn thumbnail_delete(
|
||||
&self,
|
||||
item_ref: &ItemRef,
|
||||
size: &ThumbnailSize,
|
||||
) -> Result<(), Self::Error>;
|
||||
|
||||
// shares
|
||||
// async fn share_create(&self, item_ref: &ItemRef, share_id: Option<&str>, expires_at: Option<u64>) -> Result<(), Self::Error>;
|
||||
// async fn share_get(&self, share_id: Option<&str>) -> Result<(), Self::Error>;
|
||||
// async fn share_delete(&self, share_id: Option<&str>) -> Result<(), Self::Error>;
|
||||
// async fn share_delete(&self, share_id: Option<&str>) -> Result<(), Self::Error>;
|
||||
}
|
||||
|
|
|
@ -5,17 +5,17 @@ use std::collections::{HashMap, HashSet};
|
|||
use std::str::FromStr;
|
||||
|
||||
use bytes::Bytes;
|
||||
use sqlx::{SqlitePool, sqlite::SqliteConnectOptions, QueryBuilder, Row};
|
||||
use sqlx::query as sql;
|
||||
use futures_util::TryStreamExt;
|
||||
use sqlx::query as sql;
|
||||
use sqlx::{sqlite::SqliteConnectOptions, QueryBuilder, Row, SqlitePool};
|
||||
use tracing::debug;
|
||||
use ufh::derived::Derived;
|
||||
use ufh::event::EventContent;
|
||||
use ufh::{query, item::ItemRef};
|
||||
use tracing::debug;
|
||||
use ufh::{item::ItemRef, query};
|
||||
|
||||
use super::{Database, DbItem, Thumbnail};
|
||||
use crate::routes::things::thumbnail::ThumbnailSize;
|
||||
use crate::{Event, Error};
|
||||
use crate::{Error, Event};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Sqlite {
|
||||
|
@ -82,7 +82,7 @@ impl Database for Sqlite {
|
|||
let event_str = serde_json::to_string(&event)?;
|
||||
|
||||
let mut tx = self.pool.begin().await?;
|
||||
|
||||
|
||||
debug!("insert into refs");
|
||||
sql!("INSERT OR IGNORE INTO refs (ref) VALUES (?)", item_ref_str)
|
||||
.execute(&mut tx)
|
||||
|
@ -91,10 +91,11 @@ impl Database for Sqlite {
|
|||
let insert = sql!("INSERT OR IGNORE INTO events (ref, sender, type, json, flags) VALUES (?, ?, ?, ?, ?) RETURNING rowid", item_ref_str, item_sender_str, item_type_str, event_str, 0)
|
||||
.fetch_one(&mut tx)
|
||||
.await?;
|
||||
|
||||
|
||||
if !event.relations.is_empty() && event.content.get_type() != "x.redact" {
|
||||
debug!("insert into relations");
|
||||
let mut sql_relations = QueryBuilder::new("INSERT OR IGNORE INTO relations (ref_from, ref_to, rel_type) ");
|
||||
let mut sql_relations =
|
||||
QueryBuilder::new("INSERT OR IGNORE INTO relations (ref_from, ref_to, rel_type) ");
|
||||
sql_relations.push_values(&event.relations, |mut item, (target, info)| {
|
||||
let target_str = target.to_string();
|
||||
item.push_bind(item_ref_str.clone())
|
||||
|
@ -105,16 +106,16 @@ impl Database for Sqlite {
|
|||
}
|
||||
|
||||
tx.commit().await?;
|
||||
|
||||
|
||||
debug!("commit event {}", item_ref_str);
|
||||
|
||||
|
||||
Ok(insert.rowid.unwrap() as u32)
|
||||
}
|
||||
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
async fn create_blob(&self, item_ref: &ItemRef, size: u32) -> Result<(), Self::Error> {
|
||||
let item_ref_str = item_ref.to_string();
|
||||
|
||||
|
||||
// TODO: garabge collect blobs with no references
|
||||
// should probably clean this up
|
||||
let gc_at = std::time::SystemTime::now()
|
||||
|
@ -127,12 +128,17 @@ impl Database for Sqlite {
|
|||
sqlx::query!("INSERT OR IGNORE INTO refs (ref) VALUES (?)", item_ref_str)
|
||||
.execute(&mut tx)
|
||||
.await?;
|
||||
sqlx::query!("INSERT OR IGNORE INTO blobs (ref, size, gc_at) VALUES (?, ?, ?)", item_ref_str, size, gc_at)
|
||||
.execute(&mut tx)
|
||||
.await?;
|
||||
sqlx::query!(
|
||||
"INSERT OR IGNORE INTO blobs (ref, size, gc_at) VALUES (?, ?, ?)",
|
||||
item_ref_str,
|
||||
size,
|
||||
gc_at
|
||||
)
|
||||
.execute(&mut tx)
|
||||
.await?;
|
||||
tx.commit().await?;
|
||||
debug!("created blob");
|
||||
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -147,9 +153,13 @@ impl Database for Sqlite {
|
|||
let mut event: Event = serde_json::from_str(&row.json)?;
|
||||
event.content = EventContent::Redacted(event.content.get_type().to_string());
|
||||
let event_str = json_canon::to_string(&event)?;
|
||||
sql!("UPDATE events SET json = ?, flags = flags | 1 WHERE ref = ?", event_str, item_ref_str)
|
||||
.execute(&mut tx)
|
||||
.await?;
|
||||
sql!(
|
||||
"UPDATE events SET json = ?, flags = flags | 1 WHERE ref = ?",
|
||||
event_str,
|
||||
item_ref_str
|
||||
)
|
||||
.execute(&mut tx)
|
||||
.await?;
|
||||
sql!("DELETE FROM derived WHERE ref = ?", item_ref_str)
|
||||
.execute(&mut tx)
|
||||
.await?;
|
||||
|
@ -161,13 +171,16 @@ impl Database for Sqlite {
|
|||
#[tracing::instrument(skip_all)]
|
||||
async fn get_event(&self, item_ref: &ItemRef) -> Result<Option<Event>, Error> {
|
||||
let item_ref_str = item_ref.to_string();
|
||||
let result = sql!("
|
||||
let result = sql!(
|
||||
"
|
||||
SELECT events.json as json, derived.json as derived FROM events
|
||||
LEFT JOIN derived ON derived.ref = events.ref
|
||||
WHERE events.ref = ?
|
||||
", item_ref_str)
|
||||
.fetch_one(&self.pool)
|
||||
.await;
|
||||
",
|
||||
item_ref_str
|
||||
)
|
||||
.fetch_one(&self.pool)
|
||||
.await;
|
||||
debug!("get event {} = {:?}", item_ref, result);
|
||||
let row = match result {
|
||||
Ok(row) => row,
|
||||
|
@ -176,15 +189,21 @@ impl Database for Sqlite {
|
|||
};
|
||||
debug!("got event from db");
|
||||
let event: Event = serde_json::from_str(&row.json)?;
|
||||
let derived = row.derived
|
||||
let derived = row
|
||||
.derived
|
||||
.map(|json| serde_json::from_str(&json))
|
||||
.transpose()?
|
||||
.unwrap_or_default();
|
||||
Ok(Some(Event { derived, ..event }))
|
||||
}
|
||||
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
async fn query_events(&self, query: &query::Query, limit: Option<u32>, after: Option<String>) -> Result<super::QueryResult, Error> {
|
||||
async fn query_events(
|
||||
&self,
|
||||
query: &query::Query,
|
||||
limit: Option<u32>,
|
||||
after: Option<String>,
|
||||
) -> Result<super::QueryResult, Error> {
|
||||
// this code is getting worse as time goes on
|
||||
let mut builder = QueryBuilder::new("
|
||||
SELECT DISTINCT events.rowid, events.ref AS item_ref, events.json, sender, derived.json as derived FROM events
|
||||
|
@ -226,10 +245,12 @@ impl Database for Sqlite {
|
|||
builder.push(")");
|
||||
}
|
||||
let limit = limit.unwrap_or(50).clamp(0, 100);
|
||||
builder.push(" ORDER BY events.rowid LIMIT ").push_bind(limit);
|
||||
|
||||
builder
|
||||
.push(" ORDER BY events.rowid LIMIT ")
|
||||
.push_bind(limit);
|
||||
|
||||
debug!("generated sql: {}", builder.sql());
|
||||
|
||||
|
||||
let mut rows = builder.build().fetch(&self.pool);
|
||||
let mut events = Vec::with_capacity(limit as usize);
|
||||
let mut last_after = None;
|
||||
|
@ -245,9 +266,9 @@ impl Database for Sqlite {
|
|||
events.push(Event { derived, ..event });
|
||||
last_after = Some(rowid);
|
||||
}
|
||||
|
||||
|
||||
debug!("fetched {} events", events.len());
|
||||
|
||||
|
||||
Ok(super::QueryResult {
|
||||
events,
|
||||
next: last_after.map(|p| p.to_string()),
|
||||
|
@ -255,15 +276,21 @@ impl Database for Sqlite {
|
|||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
async fn query_relations(&self, relations: &[(String, String)], for_events: &[Event]) -> Result<HashMap<ItemRef, Event>, Self::Error> {
|
||||
let mut builder = sqlx::QueryBuilder::new("
|
||||
async fn query_relations(
|
||||
&self,
|
||||
relations: &[(String, String)],
|
||||
for_events: &[Event],
|
||||
) -> Result<HashMap<ItemRef, Event>, Self::Error> {
|
||||
let mut builder = sqlx::QueryBuilder::new(
|
||||
"
|
||||
SELECT relations.ref_from AS item_ref, events_from.json, derived.json AS derived
|
||||
FROM relations
|
||||
JOIN events AS events_from ON events_from.ref = relations.ref_from
|
||||
JOIN events AS events_to ON events_to.ref = relations.ref_to
|
||||
LEFT JOIN derived ON derived.ref = events_from.ref
|
||||
WHERE (relations.rel_type, events_from.type) IN (
|
||||
");
|
||||
",
|
||||
);
|
||||
builder.push_tuples(relations, |mut q, tup| {
|
||||
q.push_bind(tup.0.to_owned()).push_bind(tup.1.to_owned());
|
||||
});
|
||||
|
@ -291,14 +318,20 @@ impl Database for Sqlite {
|
|||
Ok(map)
|
||||
}
|
||||
|
||||
async fn bulk_fetch(&self, item_refs: &[ItemRef], partial: bool) -> Result<HashMap<ItemRef, DbItem>, Self::Error> {
|
||||
let mut builder = sqlx::QueryBuilder::new("
|
||||
async fn bulk_fetch(
|
||||
&self,
|
||||
item_refs: &[ItemRef],
|
||||
partial: bool,
|
||||
) -> Result<HashMap<ItemRef, DbItem>, Self::Error> {
|
||||
let mut builder = sqlx::QueryBuilder::new(
|
||||
"
|
||||
SELECT refs.ref AS item_ref, events.json, blobs.size, derived.json AS derived FROM refs
|
||||
LEFT JOIN blobs ON blobs.ref = refs.ref
|
||||
LEFT JOIN events ON events.ref = refs.ref
|
||||
LEFT JOIN derived ON derived.ref = events.ref
|
||||
WHERE (refs.ref) IN (
|
||||
");
|
||||
",
|
||||
);
|
||||
let mut sep = builder.separated(",");
|
||||
for item_ref in item_refs {
|
||||
sep.push_bind(item_ref.to_string());
|
||||
|
@ -346,13 +379,21 @@ impl Database for Sqlite {
|
|||
// TODO (performance, future): batch sql queries
|
||||
for item_ref in item_refs {
|
||||
let item_ref_str = item_ref.to_string();
|
||||
let row = sql!("SELECT * FROM derived WHERE ref = ?", item_ref_str).fetch_one(&mut tx).await?;
|
||||
let row = sql!("SELECT * FROM derived WHERE ref = ?", item_ref_str)
|
||||
.fetch_one(&mut tx)
|
||||
.await?;
|
||||
let derived = Derived {
|
||||
tags: HashSet::from_iter(tags.iter().map(|i| i.to_string())),
|
||||
..serde_json::from_str(&row.json)?
|
||||
};
|
||||
let event_json = serde_json::to_string(&derived)?;
|
||||
sql!("UPDATE derived SET json = ? WHERE ref = ?", event_json, item_ref_str).execute(&mut tx).await?;
|
||||
sql!(
|
||||
"UPDATE derived SET json = ? WHERE ref = ?",
|
||||
event_json,
|
||||
item_ref_str
|
||||
)
|
||||
.execute(&mut tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
let mut delete_query = QueryBuilder::new("DELETE FROM tags WHERE (ref) IN (");
|
||||
|
@ -361,17 +402,16 @@ impl Database for Sqlite {
|
|||
sep.push_bind(item_ref.to_string());
|
||||
}
|
||||
delete_query.push(")").build().execute(&mut tx).await?;
|
||||
|
||||
|
||||
let mut insert_query = QueryBuilder::new("INSERT INTO tags (ref, tag) ");
|
||||
insert_query.push_values(product, |mut item, (item_ref, tag)| {
|
||||
item.push_bind(item_ref.to_string())
|
||||
.push_bind(tag);
|
||||
item.push_bind(item_ref.to_string()).push_bind(tag);
|
||||
});
|
||||
insert_query.build().execute(&mut tx).await?;
|
||||
|
||||
tx.commit().await?;
|
||||
debug!("tagged {} events with {} tags", item_refs.len(), tags.len());
|
||||
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -380,32 +420,54 @@ impl Database for Sqlite {
|
|||
let derived_str = serde_json::to_string(&derived)?;
|
||||
|
||||
let mut tx = self.pool.begin().await?;
|
||||
|
||||
|
||||
debug!("insert into derived");
|
||||
sql!("INSERT OR IGNORE INTO derived (ref, json) VALUES (?, ?)", item_ref_str, derived_str)
|
||||
.execute(&mut tx)
|
||||
.await?;
|
||||
|
||||
sql!(
|
||||
"INSERT OR IGNORE INTO derived (ref, json) VALUES (?, ?)",
|
||||
item_ref_str,
|
||||
derived_str
|
||||
)
|
||||
.execute(&mut tx)
|
||||
.await?;
|
||||
|
||||
tx.commit().await?;
|
||||
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn thumbnail_create(&self, item_ref: &ItemRef, size: &ThumbnailSize, bytes: &[u8]) -> Result<(), Self::Error> {
|
||||
async fn thumbnail_create(
|
||||
&self,
|
||||
item_ref: &ItemRef,
|
||||
size: &ThumbnailSize,
|
||||
bytes: &[u8],
|
||||
) -> Result<(), Self::Error> {
|
||||
let item_ref_str = item_ref.to_string();
|
||||
let size_str = size.to_string();
|
||||
sql!("INSERT INTO thumbnails (ref, size, blob) VALUES (?, ?, ?)", item_ref_str, size_str, bytes)
|
||||
.execute(&self.pool)
|
||||
.await?;
|
||||
sql!(
|
||||
"INSERT INTO thumbnails (ref, size, blob) VALUES (?, ?, ?)",
|
||||
item_ref_str,
|
||||
size_str,
|
||||
bytes
|
||||
)
|
||||
.execute(&self.pool)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn thumbnail_get(&self, item_ref: &ItemRef, size: &ThumbnailSize) -> Result<Thumbnail, Self::Error> {
|
||||
async fn thumbnail_get(
|
||||
&self,
|
||||
item_ref: &ItemRef,
|
||||
size: &ThumbnailSize,
|
||||
) -> Result<Thumbnail, Self::Error> {
|
||||
let item_ref_str = item_ref.to_string();
|
||||
let size_str = size.to_string();
|
||||
let result = sql!("SELECT * FROM thumbnails WHERE ref = ? AND size IN (?, 'raw')", item_ref_str, size_str)
|
||||
.fetch_one(&self.pool)
|
||||
.await;
|
||||
let result = sql!(
|
||||
"SELECT * FROM thumbnails WHERE ref = ? AND size IN (?, 'raw')",
|
||||
item_ref_str,
|
||||
size_str
|
||||
)
|
||||
.fetch_one(&self.pool)
|
||||
.await;
|
||||
match result {
|
||||
Ok(row) if row.size == "raw" => Ok(Thumbnail::Raw(Bytes::from(row.blob))),
|
||||
Ok(row) => Ok(Thumbnail::Some(Bytes::from(row.blob))),
|
||||
|
@ -414,12 +476,20 @@ impl Database for Sqlite {
|
|||
}
|
||||
}
|
||||
|
||||
async fn thumbnail_delete(&self, item_ref: &ItemRef, size: &ThumbnailSize) -> Result<(), Self::Error> {
|
||||
async fn thumbnail_delete(
|
||||
&self,
|
||||
item_ref: &ItemRef,
|
||||
size: &ThumbnailSize,
|
||||
) -> Result<(), Self::Error> {
|
||||
let item_ref_str = item_ref.to_string();
|
||||
let size_str = size.to_string();
|
||||
sql!("DELETE FROM thumbnails WHERE ref = ? AND size = ?", item_ref_str, size_str)
|
||||
.execute(&self.pool)
|
||||
.await?;
|
||||
sql!(
|
||||
"DELETE FROM thumbnails WHERE ref = ? AND size = ?",
|
||||
item_ref_str,
|
||||
size_str
|
||||
)
|
||||
.execute(&self.pool)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashSet;
|
||||
use serde::{Serialize, Deserialize};
|
||||
use ufh::{item::ItemRef, event::Event};
|
||||
use ufh::{event::Event, item::ItemRef};
|
||||
|
||||
pub mod tantivy;
|
||||
|
||||
|
@ -32,8 +32,14 @@ pub struct Document<'a> {
|
|||
|
||||
pub trait Search {
|
||||
type Error;
|
||||
|
||||
|
||||
async fn upsert(&self, item_ref: &ItemRef, document: Document) -> Result<(), Self::Error>;
|
||||
async fn delete(&self, item_ref: &ItemRef) -> Result<(), Self::Error>;
|
||||
async fn search(&self, query: &str, limit: usize, offset: usize, sniplen: usize) -> Result<Vec<SearchResult>, Self::Error>;
|
||||
async fn search(
|
||||
&self,
|
||||
query: &str,
|
||||
limit: usize,
|
||||
offset: usize,
|
||||
sniplen: usize,
|
||||
) -> Result<Vec<SearchResult>, Self::Error>;
|
||||
}
|
||||
|
|
|
@ -2,15 +2,18 @@ use std::sync::Arc;
|
|||
|
||||
// use tracing::debug;
|
||||
// use crate::{Event, Error};
|
||||
use crate::{Error, state::search::Snippet};
|
||||
use tantivy::{Index, schema::Schema, Document as TantivyDoc, query::QueryParser, collector::TopDocs, directory::MmapDirectory, IndexReader, IndexWriter, DateTime, time::OffsetDateTime};
|
||||
use super::{Document, Search, SearchResult};
|
||||
use crate::{state::search::Snippet, Error};
|
||||
use tantivy::{
|
||||
collector::TopDocs, directory::MmapDirectory, query::QueryParser, schema::Schema,
|
||||
time::OffsetDateTime, DateTime, Document as TantivyDoc, Index, IndexReader, IndexWriter,
|
||||
};
|
||||
use tokio::sync::Mutex;
|
||||
use tracing::debug;
|
||||
use ufh::item::ItemRef;
|
||||
use tokio::sync::Mutex;
|
||||
use super::{Search, SearchResult, Document};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Tantivy {
|
||||
pub struct Tantivy {
|
||||
schema: Schema,
|
||||
reader: IndexReader,
|
||||
writer: Arc<Mutex<IndexWriter>>,
|
||||
|
@ -57,7 +60,11 @@ impl Search for Tantivy {
|
|||
type Error = Error;
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
async fn upsert(&self, item_ref: &ufh::item::ItemRef, document: Document<'_>) -> Result<(), Self::Error> {
|
||||
async fn upsert(
|
||||
&self,
|
||||
item_ref: &ufh::item::ItemRef,
|
||||
document: Document<'_>,
|
||||
) -> Result<(), Self::Error> {
|
||||
debug!("upsert doc for {}", item_ref);
|
||||
let mut writer = self.writer.lock().await;
|
||||
let mut doc = TantivyDoc::new();
|
||||
|
@ -70,10 +77,16 @@ impl Search for Tantivy {
|
|||
}
|
||||
doc.add_text(field("body"), document.text);
|
||||
doc.add_text(field("type"), document.event.content.get_type());
|
||||
doc.add_date(field("mtime"), DateTime::from_timestamp_millis(OffsetDateTime::now_utc().unix_timestamp()));
|
||||
doc.add_date(
|
||||
field("mtime"),
|
||||
DateTime::from_timestamp_millis(OffsetDateTime::now_utc().unix_timestamp()),
|
||||
);
|
||||
// FIXME: don't do `as i64`
|
||||
doc.add_date(field("btime"), DateTime::from_timestamp_millis(document.event.origin_ts as i64));
|
||||
|
||||
doc.add_date(
|
||||
field("btime"),
|
||||
DateTime::from_timestamp_millis(document.event.origin_ts as i64),
|
||||
);
|
||||
|
||||
let mut product = Vec::new();
|
||||
for tag in document.tags {
|
||||
let mut parts = Vec::new();
|
||||
|
@ -85,7 +98,7 @@ impl Search for Tantivy {
|
|||
for tag in product {
|
||||
doc.add_text(field("tag"), tag);
|
||||
}
|
||||
|
||||
|
||||
writer.add_document(doc)?;
|
||||
writer.commit()?;
|
||||
Ok(())
|
||||
|
@ -103,12 +116,23 @@ impl Search for Tantivy {
|
|||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
async fn search(&self, query: &str, limit: usize, offset: usize, sniplen: usize) -> Result<Vec<SearchResult>, Self::Error> {
|
||||
async fn search(
|
||||
&self,
|
||||
query: &str,
|
||||
limit: usize,
|
||||
offset: usize,
|
||||
sniplen: usize,
|
||||
) -> Result<Vec<SearchResult>, Self::Error> {
|
||||
debug!("search doc for {}", query);
|
||||
let searcher = self.reader.searcher();
|
||||
let field = |name: &str| self.schema.get_field(name).unwrap();
|
||||
let parser = QueryParser::for_index(searcher.index(), vec![field("ref"), field("name"), field("body")]);
|
||||
let parsed = parser.parse_query(query).map_err(|_| Error::Validation("invalid query"))?;
|
||||
let parser = QueryParser::for_index(
|
||||
searcher.index(),
|
||||
vec![field("ref"), field("name"), field("body")],
|
||||
);
|
||||
let parsed = parser
|
||||
.parse_query(query)
|
||||
.map_err(|_| Error::Validation("invalid query"))?;
|
||||
let mut snippets = tantivy::SnippetGenerator::create(&searcher, &parsed, field("body"))?;
|
||||
snippets.set_max_num_chars(sniplen);
|
||||
let collector = TopDocs::with_limit(limit).and_offset(offset);
|
||||
|
@ -118,15 +142,24 @@ impl Search for Tantivy {
|
|||
let snippet = snippets.snippet_from_doc(&doc);
|
||||
let snippet = (!snippet.is_empty()).then(|| Snippet {
|
||||
text: snippet.fragment().to_string(),
|
||||
ranges: snippet.highlighted().iter()
|
||||
ranges: snippet
|
||||
.highlighted()
|
||||
.iter()
|
||||
.map(|range| (range.start, range.end))
|
||||
.collect(),
|
||||
});
|
||||
let item_ref: ItemRef = doc.get_first(field("ref"))
|
||||
let item_ref: ItemRef = doc
|
||||
.get_first(field("ref"))
|
||||
.expect("document doesn't have an item ref!")
|
||||
.as_text().unwrap()
|
||||
.parse().unwrap();
|
||||
docs.push(SearchResult { score, snippet, item_ref });
|
||||
.as_text()
|
||||
.unwrap()
|
||||
.parse()
|
||||
.unwrap();
|
||||
docs.push(SearchResult {
|
||||
score,
|
||||
snippet,
|
||||
item_ref,
|
||||
});
|
||||
}
|
||||
|
||||
Ok(docs)
|
||||
|
|
|
@ -1,9 +1,13 @@
|
|||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum Error {
|
||||
#[error("{0}")] Sqlite(sqlx::Error),
|
||||
#[error("{0}")] Migration(sqlx::migrate::MigrateError),
|
||||
#[error("{0}")] Io(std::io::Error),
|
||||
#[error("{0}")] Static(&'static str),
|
||||
#[error("{0}")]
|
||||
Sqlite(sqlx::Error),
|
||||
#[error("{0}")]
|
||||
Migration(sqlx::migrate::MigrateError),
|
||||
#[error("{0}")]
|
||||
Io(std::io::Error),
|
||||
#[error("{0}")]
|
||||
Static(&'static str),
|
||||
// #[error("{0}")] Other(Box<dyn std::error::Error>),
|
||||
}
|
||||
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
use crate::error::Error;
|
||||
use sha2::Digest;
|
||||
use sqlx::{query, sqlite::SqliteConnectOptions, SqlitePool};
|
||||
use std::path::PathBuf;
|
||||
use ufh::item::{ItemRef, HashType};
|
||||
use tokio::fs;
|
||||
use tokio::sync::broadcast;
|
||||
use sqlx::{SqlitePool, query, sqlite::SqliteConnectOptions};
|
||||
use crate::error::Error;
|
||||
use ufh::item::{HashType, ItemRef};
|
||||
|
||||
pub struct FileStore {
|
||||
blob_path: PathBuf,
|
||||
|
@ -36,7 +36,7 @@ impl FileStore {
|
|||
stream: broadcast::channel(1).0, // set low for now, to check for bad code
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
pub async fn put(&self, blob: &[u8]) -> Result<ItemRef, Error> {
|
||||
let hash = {
|
||||
let mut hasher = sha2::Sha224::default();
|
||||
|
@ -48,10 +48,15 @@ impl FileStore {
|
|||
tokio::fs::write(path, blob).await?;
|
||||
|
||||
let item_ref_str = item_ref.to_string();
|
||||
query!("INSERT OR IGNORE INTO blobs (hash) VALUES (?)", item_ref_str).execute(&self.ref_db).await?;
|
||||
query!(
|
||||
"INSERT OR IGNORE INTO blobs (hash) VALUES (?)",
|
||||
item_ref_str
|
||||
)
|
||||
.execute(&self.ref_db)
|
||||
.await?;
|
||||
|
||||
let _ = self.stream.send(item_ref.clone());
|
||||
|
||||
|
||||
Ok(item_ref)
|
||||
}
|
||||
|
||||
|
@ -63,11 +68,18 @@ impl FileStore {
|
|||
pub async fn delete(&self, item: &ItemRef) -> Result<(), Error> {
|
||||
let file_path = self.blob_path.join(item.to_string());
|
||||
let item_str = item.to_string();
|
||||
query!("DELETE FROM blobs WHERE hash = ?", item_str).execute(&self.ref_db).await?;
|
||||
query!("DELETE FROM blobs WHERE hash = ?", item_str)
|
||||
.execute(&self.ref_db)
|
||||
.await?;
|
||||
Ok(tokio::fs::remove_file(file_path).await?)
|
||||
}
|
||||
|
||||
pub async fn list(&self, after: Option<ItemRef>, limit: usize, timeout: Option<u64>) -> Result<Vec<ItemRef>, Error> {
|
||||
pub async fn list(
|
||||
&self,
|
||||
after: Option<ItemRef>,
|
||||
limit: usize,
|
||||
timeout: Option<u64>,
|
||||
) -> Result<Vec<ItemRef>, Error> {
|
||||
// this code doesn't seem good but works
|
||||
use futures_util::TryStreamExt as _;
|
||||
|
||||
|
@ -84,8 +96,8 @@ impl FileStore {
|
|||
}
|
||||
} else {
|
||||
let limit = limit as u32;
|
||||
let mut rows = query!("SELECT hash FROM blobs ORDER BY rowid LIMIT ?", limit)
|
||||
.fetch(&self.ref_db);
|
||||
let mut rows =
|
||||
query!("SELECT hash FROM blobs ORDER BY rowid LIMIT ?", limit).fetch(&self.ref_db);
|
||||
while let Ok(Some(row)) = rows.try_next().await {
|
||||
let item_str: String = row.hash;
|
||||
let item_ref: ItemRef = ItemRef::try_from(item_str.as_str()).unwrap();
|
||||
|
@ -96,13 +108,15 @@ impl FileStore {
|
|||
if let Some(timeout) = timeout {
|
||||
if entries.is_empty() {
|
||||
let timeout = std::time::Duration::from_millis(timeout);
|
||||
if let Ok(result) = tokio::time::timeout(timeout, self.stream.subscribe().recv()).await {
|
||||
if let Ok(result) =
|
||||
tokio::time::timeout(timeout, self.stream.subscribe().recv()).await
|
||||
{
|
||||
let item = result.map_err(|_| Error::Static("couldnt receive from channel"))?;
|
||||
return Ok(vec![item]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Ok(entries)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
use axum::extract::{BodyStream, Json, Path, Query, State};
|
||||
use axum::http::StatusCode;
|
||||
use futures_util::StreamExt;
|
||||
use serde_json::{Value, json};
|
||||
use serde_json::{json, Value};
|
||||
use std::sync::Arc;
|
||||
use ufh::item::ItemRef;
|
||||
|
||||
mod fs;
|
||||
mod error;
|
||||
mod fs;
|
||||
|
||||
// blob servers are only accessible from index servers anyway, so a max_size isn't really needed
|
||||
// const MAX_SIZE: u64 = 1024 * 1024; // 1 MiB
|
||||
|
@ -25,7 +25,10 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|||
};
|
||||
let router = axum::Router::new()
|
||||
.route("/blobs", axum::routing::get(blob_list).post(blob_upload))
|
||||
.route("/blobs/:item_ref", axum::routing::get(blob_download).delete(blob_delete))
|
||||
.route(
|
||||
"/blobs/:item_ref",
|
||||
axum::routing::get(blob_download).delete(blob_delete),
|
||||
)
|
||||
.with_state(Arc::new(state));
|
||||
axum::Server::bind(&"0.0.0.0:3219".parse().unwrap())
|
||||
.serve(router.into_make_service())
|
||||
|
@ -55,10 +58,12 @@ async fn blob_upload(
|
|||
chunks.push(chunk);
|
||||
}
|
||||
|
||||
let item_ref = state.store
|
||||
.put(&chunks.concat())
|
||||
.await
|
||||
.map_err(|err| (StatusCode::INTERNAL_SERVER_ERROR, Json(json!({ "error": err.to_string() }))))?;
|
||||
let item_ref = state.store.put(&chunks.concat()).await.map_err(|err| {
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({ "error": err.to_string() })),
|
||||
)
|
||||
})?;
|
||||
Ok(Json(json!({ "ref": item_ref })))
|
||||
}
|
||||
|
||||
|
@ -66,17 +71,20 @@ async fn blob_download(
|
|||
State(state): State<Arc<UfhState>>,
|
||||
path: Path<DownloadPath>,
|
||||
) -> Result<Vec<u8>, (StatusCode, Json<Value>)> {
|
||||
state.store
|
||||
.get(&path.item_ref)
|
||||
.await
|
||||
.map_err(|err| (StatusCode::NOT_FOUND, Json(json!({ "error": err.to_string() }))))
|
||||
state.store.get(&path.item_ref).await.map_err(|err| {
|
||||
(
|
||||
StatusCode::NOT_FOUND,
|
||||
Json(json!({ "error": err.to_string() })),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
async fn blob_delete(
|
||||
State(state): State<Arc<UfhState>>,
|
||||
path: Path<DownloadPath>,
|
||||
) -> Result<(), Json<Value>> {
|
||||
state.store
|
||||
state
|
||||
.store
|
||||
.delete(&path.item_ref)
|
||||
.await
|
||||
.map_err(|err| Json(json!({ "error": err.to_string() })))
|
||||
|
@ -87,7 +95,8 @@ async fn blob_list(
|
|||
Query(query): Query<ListQuery>,
|
||||
) -> Result<Json<Value>, Json<Value>> {
|
||||
let limit = query.limit.unwrap_or(20).min(100);
|
||||
let list = state.store
|
||||
let list = state
|
||||
.store
|
||||
.list(query.after, limit, query.timeout)
|
||||
.await
|
||||
.map_err(|err| Json(json!({ "error": err.to_string() })))?;
|
||||
|
|
Loading…
Reference in a new issue