cleanup and bug hunting

This commit is contained in:
tezlm 2023-08-05 03:30:20 -07:00
parent 67c48e9eec
commit f24e16c02b
Signed by: tezlm
GPG key ID: 649733FCD94AFBBA
41 changed files with 533 additions and 291 deletions

7
Cargo.lock generated
View file

@ -135,6 +135,12 @@ version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bddcadddf5e9015d310179a59bb28c4d4b9920ad0f11e8e14dbadf654890c9a6"
[[package]]
name = "ascii_table"
version = "4.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75054ce561491263d7b80dc2f6f6c6f8cdfd0c7a7c17c5cf3b8117829fa72ae1"
[[package]]
name = "async-recursion"
version = "1.0.4"
@ -460,6 +466,7 @@ checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b"
name = "cli"
version = "0.1.0"
dependencies = [
"ascii_table",
"base64 0.21.2",
"bytes",
"clap",

View file

@ -6,6 +6,7 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
ascii_table = "4.0.2"
base64 = "0.21.2"
bytes = "1.4.0"
clap = { version = "4.3.11", features = ["derive"] }

View file

@ -25,7 +25,7 @@ use ufh::{
item::{HashType, ItemRef},
query::{Query, QueryRelation},
};
use ascii_table::{AsciiTable, Align as AsciiAlign};
use crate::net::PutItem;
pub type Error = Box<dyn std::error::Error>;
@ -277,19 +277,19 @@ async fn main() -> Result<(), Error> {
let is_term = stdout.is_terminal();
for item_ref in refs {
let Item::Event(event) = client.get(&item_ref).await? else {
return Err(StaticError("couldnt build an event"))?;
return Err(StaticError("couldnt build an event").into());
};
let EventContent::File(content) = event.content else {
return Err(StaticError("not a file event"))?;
return Err(StaticError("not a file event").into());
};
for chunk in content.chunks {
let Item::Blob(blob) = client.get(&chunk).await? else {
return Err(StaticError("not a blob"))?;
return Err(StaticError("not a blob").into());
};
if is_term && !force && std::str::from_utf8(&blob).is_err() {
return Err(StaticError(
"refusing to output binary data to tty, use `-f` to force",
))?;
).into());
}
std::io::stdout().write_all(&blob)?;
}

View file

@ -28,6 +28,8 @@ pub enum Error {
RecvError(tokio::sync::broadcast::error::RecvError),
#[error("{0}")]
Tantivy(tantivy::error::TantivyError),
#[error("{0}")]
Axum(axum::Error),
// #[error("{0}")] Unknown(Box<dyn std::error::Error>),
}
@ -86,6 +88,12 @@ impl From<tantivy::error::TantivyError> for Error {
}
}
impl From<axum::Error> for Error {
fn from(err: axum::Error) -> Self {
Error::Axum(err)
}
}
impl IntoResponse for Error {
fn into_response(self) -> axum::response::Response {
let status = self.status();
@ -115,6 +123,7 @@ impl Error {
Error::RecvError(_) => StatusCode::INTERNAL_SERVER_ERROR,
Error::Tantivy(_) => StatusCode::INTERNAL_SERVER_ERROR,
Error::BadRequest(_) => StatusCode::BAD_REQUEST,
Error::Axum(_) => StatusCode::INTERNAL_SERVER_ERROR,
Error::Http(status, _) => *status,
}
}

View file

@ -167,30 +167,23 @@ pub async fn commit_special(me: &Items, action: &DelayedAction) -> Result<(), Er
DelayedAction::Redact(refs) => {
// TODO: garbage collect unreferenced blobs
for item_ref in refs {
let mutex = me.mutex_for(item_ref).await;
let lock = mutex.lock().await;
me.db.event_redact(item_ref).await?;
me.search.delete(item_ref).await?;
drop(lock);
}
}
DelayedAction::Tag(refs, tags) => {
for item_ref in refs {
let mutex = me.mutex_for(item_ref).await;
let lock = mutex.lock().await;
me.db.tags_set(&[item_ref.clone()], tags).await?;
drop(lock);
}
}
DelayedAction::Edit(refs, content) => {
for item_ref in refs {
let mutex = me.mutex_for(item_ref).await;
let lock = mutex.lock().await;
let event = me.db.event_fetch(item_ref).await?.expect("can't edit nonexistent event");
me.db
.derived_put(item_ref, "update", content.clone())
.await?;
drop(lock);
// FIXME (investigate): potential race condition with update + redact
me.finish_event_create(crate::items::WipCreate {
item_ref: item_ref.clone(),

View file

@ -14,7 +14,7 @@ use bytes::Bytes;
use events::DelayedAction;
use lru::LruCache;
use once_cell::sync::OnceCell;
use std::{collections::HashMap, num::NonZeroUsize, sync::Arc};
use std::num::NonZeroUsize;
use tokio::sync::Mutex;
use tracing::{debug, info};
use ufh::{
@ -51,7 +51,6 @@ pub struct Items {
db: Sqlite,
search: Tantivy,
blobs: blobs::Client,
partials: Mutex<HashMap<ItemRef, Arc<Mutex<()>>>>,
}
impl Items {
@ -60,14 +59,12 @@ impl Items {
db: db.clone(),
blobs: blobs.clone(),
search: search.clone(),
partials: Mutex::new(HashMap::new()),
}
}
// unsure whether to return rowid (and relations) here...
pub async fn begin_event_create(&self, wip: WipEvent) -> Result<WipCreate, Error> {
debug!("begin new create");
dbg!(wip.to_json());
if !wip.has_valid_signature() {
return Err(Error::Validation("missing or invalid signature"));
@ -104,9 +101,6 @@ impl Items {
let rowid = self.db.event_create(&event).await?;
debug!("created event (rowid={})", rowid);
let mut partials = self.partials.lock().await;
partials.insert(event.id.clone(), Arc::new(Mutex::new(())));
Ok(WipCreate {
item_ref: event.id.clone(),
create: Create {
@ -124,8 +118,6 @@ impl Items {
pub async fn finish_event_create(&self, wip: WipCreate) -> Result<Create, Error> {
let create = wip.create;
let event = create.event;
let mutex = self.mutex_for(&wip.item_ref).await;
let lock = mutex.lock().await;
events::commit_special(self, &wip.action).await?;
@ -160,8 +152,6 @@ impl Items {
update_search_index(self, &event, &relations).await?;
drop(lock);
Ok(Create {
event,
relations: create.relations,
@ -169,13 +159,6 @@ impl Items {
})
}
async fn mutex_for(&self, item_ref: &ItemRef) -> Arc<Mutex<()>> {
let mut map = self.partials.lock().await;
map.entry(item_ref.clone())
.or_insert_with(|| Arc::new(Mutex::new(())))
.clone()
}
#[async_recursion::async_recursion]
pub async fn create_event(&self, wip: WipEvent) -> Result<Create, Error> {
let wip = self.begin_event_create(wip).await?;

View file

@ -1,5 +1,9 @@
#![feature(async_fn_in_trait)] // ahh yes, experimental features
#![allow(
clippy::type_complexity
)]
// general purpose lints
#![warn(
clippy::get_unwrap,
@ -100,7 +104,7 @@ async fn serve(port: u16) -> Result<(), Error> {
search,
items: items_client,
queries: RwLock::new(HashMap::new()),
events: tokio::sync::broadcast::channel(1).0, // set low for now, to check for bad code
events: tokio::sync::broadcast::channel(64).0,
p2p: Mutex::new(p2p),
};

View file

@ -5,7 +5,7 @@ use std::collections::VecDeque;
use crate::{
items::events::get_relations,
routes::things::Error,
state::db::{sqlite::Sqlite, Database, DbItem},
state::db::{sqlite::Sqlite, Database, DbItem, Location},
Relations,
};
use tracing::trace;
@ -172,10 +172,10 @@ async fn is_relation_valid(
async fn get_acl(db: &Sqlite, item_ref: &ItemRef) -> Option<Acl> {
let relation = QueryRelation::from_rel("x.acl".into(), "acl".into());
let result = db
.query_relations(&[relation], &[item_ref.clone()])
.query_relations(&[relation], &[item_ref.clone()], Location::Reverse, 1)
.await
.ok()?;
let (_, event) = result.into_iter().last()?;
let (_, event) = result.0.into_iter().next()?;
match event.content {
EventContent::Acl(acl) => Some(acl),
_ => None,

View file

@ -75,11 +75,15 @@ pub async fn route(
let wip: WipEvent = serde_json::from_slice(&blob)?;
let wip = state.items.begin_event_create(wip).await?;
// FIXME: the state.events channel can become out of order when ?wait=false
// then again, it can get out of order when multiple people upload. this is critical to fix
#[cfg(off)]
if !params.wait {
let item_ref = wip.item_ref.clone();
tokio::spawn(async move {
let Ok(create) = state.items.finish_event_create(wip).await else {
return error!("failed to finish creating event");
let create = match state.items.finish_event_create(wip).await {
Ok(create) => create,
Err(err) => return error!("failed to finish creating event: {}", err),
};
let _ = state
.events

View file

@ -1,11 +1,12 @@
use crate::items::events::get_relations;
use crate::perms::can_view_event;
use crate::state::db::Database;
use crate::state::db::{Database, Location};
use crate::{Relations, ServerState};
use axum::extract::{Query, State};
use axum::extract::{Query as AxumQuery, State};
use axum::response::IntoResponse;
use reqwest::StatusCode;
use serde::{Deserialize, Serialize};
use ufh::query::Query;
use std::collections::HashMap;
use std::sync::Arc;
use std::time::Duration;
@ -20,7 +21,7 @@ use super::Error;
#[derive(Debug, Deserialize)]
pub struct QueryParams {
limit: Option<u32>,
after: Option<u32>,
after: Option<String>,
timeout: Option<u64>,
query: String,
}
@ -32,6 +33,7 @@ pub struct QueryResult {
relations: Option<HashMap<ItemRef, Event>>,
#[serde(skip_serializing_if = "Option::is_none")]
next: Option<String>,
_debug: &'static str,
}
impl IntoResponse for QueryResult {
@ -46,7 +48,7 @@ impl IntoResponse for QueryResult {
pub async fn route(
State(state): State<Arc<ServerState>>,
auth: Authenticate<perms::ReadOnly>,
Query(params): Query<QueryParams>,
AxumQuery(params): AxumQuery<QueryParams>,
) -> Result<QueryResult, Error> {
tracing::Span::current().record("query", &params.query);
debug!("enumerate");
@ -61,17 +63,33 @@ pub async fn route(
debug!("query with {:?}", query);
let limit = params.limit.unwrap_or(50).clamp(0, 100);
let (events_after, rels_after) = params
.after
.as_deref()
.map(parse_after)
.transpose()?
.unwrap_or((Location::Beginning, Location::Beginning));
fn parse_after(after: &str) -> Result<(Location, Location), Error> {
let Some((event, rels)) = after.split_once('.') else {
return Err(Error::Validation("invalid after"));
};
let parse = |s: &str| s.parse().map_err(|_| Error::Validation("invalid after"));
Ok((
Location::Index(parse(event)?),
Location::Index(parse(rels)?),
))
}
// a bit of a mess, should figure out how to do this better
let result = state
.db
.query_events(&query, params.limit, params.after.map(|p| p.to_string()))
.await?;
let has_events = !result.events.is_empty();
let result_og = state.db.query_events(&query, events_after, limit).await?;
let has_events = !result_og.events.is_empty();
use futures_util::stream::{self, StreamExt};
let result = crate::state::db::QueryResult {
events: stream::iter(result.events)
events: stream::iter(result_og.events.clone())
.filter_map(|event| async {
let visible = can_view_event(&state.db, &event, &user).await;
trace!("check if event matches (visible = {})", visible);
@ -79,23 +97,50 @@ pub async fn route(
})
.collect()
.await,
..result
..result_og.clone()
};
if result.events.is_empty() && has_events {
// TODO (performance): paginate server side
return Ok(QueryResult {
events: result.events,
relations: None,
next: result.next,
});
// return early if skipping
if result.events.is_empty() && !has_events {
if let Some(next) = result.next {
return Ok(QueryResult {
events: result.events,
relations: None,
next: Some(format!("{}.{}", next, 0)),
_debug: "skipping",
});
}
}
// FIXME: send newly visible events and remove newly hidden events
// this could be hard to do! events in the past could be made visible,
// which messes with rowid. i'd need to either restructure long
// polling to work or switch to websockets
let result = match (params.timeout, result.events.is_empty()) {
// relations query
// TODO (perf, maybe): this can be skipped if we're skipping over
// invisible events, but it would be better to fully skip over invisible
// events than add handling for that case?
let (relations, at_end, rel_rowid) = if !query.relations.is_empty() && !result.events.is_empty() {
let relations: Vec<_> = query.relations.iter().cloned().collect();
let ids: Vec<_> = result.events.iter().map(|ev| ev.id.clone()).collect();
let (map, end_rowid) = state
.db
.query_relations(&relations, &ids, rels_after, limit)
.await?;
debug!("fetched {} relations", map.len());
if let Some(end_rowid) = end_rowid {
let start_rowid = location_idx(&rels_after);
dbg!(start_rowid, end_rowid, limit);
let at_end = end_rowid - start_rowid < limit;
(Some(map), at_end, Some(end_rowid))
} else {
(Some(map), true, end_rowid)
}
} else {
(None, true, Some(location_idx(&rels_after)))
};
let is_at_relation_tail = relations.is_none() && at_end;
let is_at_events_tail = result.events.is_empty();
let is_pollable = is_at_events_tail && is_at_relation_tail;
let result = match (params.timeout, is_pollable) {
(Some(timeout), true) => {
debug!("no events, waiting for new ones...");
@ -103,19 +148,20 @@ pub async fn route(
#[async_recursion::async_recursion]
async fn check_relations(
state: &ServerState,
query: &ufh::query::Query,
query: &Query,
relations: &Relations,
) -> Result<bool, Error> {
for (rel, _) in relations.values() {
let rel_relations = get_relations(&state.db, rel).await?;
trace!("received non-matching event");
dbg!(&rel, &rel_relations, &query);
match query.matches(rel, &rel_relations) {
MatchType::Event => {
unreachable!("matching events should already be handled")
// don't allow event "backwash"
return Ok(false);
}
MatchType::Relation => return Ok(true),
MatchType::None => {
dbg!("check relations: ", &query, &rel, &rel_relations);
match check_relations(state, query, &rel_relations).await? {
true => return Ok(true),
false => (),
@ -154,21 +200,23 @@ pub async fn route(
let timeout = Duration::from_millis(timeout);
match tokio::time::timeout(timeout, next_event).await {
Ok(Ok(result)) => match result {
Ok(Ok(res)) => match res {
(MatchType::Event, event, rowid) => {
debug!("poller received event {:?}", event);
debug!("poller received event {:?} (rowid={})", event, rowid);
return Ok(QueryResult {
events: vec![event],
relations: None,
next: Some(rowid.to_string()),
next: Some(format!("{}.{}", rowid, rowid)),
_debug: "from poller",
});
}
(MatchType::Relation, event, rowid) => {
debug!("poller received relation {:?}", event);
debug!("poller received relation {:?} (rowid={})", event, rowid);
return Ok(QueryResult {
events: Vec::new(),
relations: Some(HashMap::from([(event.id.clone(), event)])),
next: Some(rowid.to_string()),
next: Some(format!("{}.{}", rowid, rowid)),
_debug: "from poller",
});
}
(MatchType::None, _, _) => unreachable!("handled by next_event(...)"),
@ -183,24 +231,29 @@ pub async fn route(
_ => result,
};
// relations query
let relations = if !query.relations.is_empty() && !result.events.is_empty() {
let relations: Vec<_> = query.relations.into_iter().collect();
let ids: Vec<_> = result.events.iter().map(|ev| ev.id.clone()).collect();
let map = state.db.query_relations(&relations, &ids).await?;
debug!("fetched {} relations", map.len());
Some(map)
} else {
None
let next = match (result.next, rel_rowid, at_end) {
(Some(event_after), _, true) => Some(format!("{}.{}", event_after, 0)),
(None, _, true) => None,
(_, Some(rel_after), false) => Some(format!("{}.{}", location_idx(&events_after), rel_after)),
(_, None, false) => unimplemented!(),
};
Ok(QueryResult {
events: result.events,
relations,
next: result.next,
next,
_debug: "from db",
})
}
pub async fn head() -> StatusCode {
StatusCode::OK
}
fn location_idx(loc: &Location) -> u32 {
match loc {
Location::Beginning => 0,
Location::Index(idx) => *idx,
Location::Reverse => unimplemented!(),
}
}

View file

@ -13,6 +13,7 @@ mod enumerate;
pub mod fetch;
mod query;
pub mod thumbnail;
mod watch;
type Response<T> = Result<T, Error>;
@ -24,6 +25,7 @@ pub fn routes() -> Router<Arc<ServerState>> {
.get(enumerate::route)
.head(enumerate::head),
)
.route("/sync", routing::post(watch::route))
.route("/check", routing::post(check::route))
.route("/query", routing::post(query::route))
.route("/:item_ref", routing::get(fetch::route))

View file

@ -2,20 +2,29 @@ use crate::ServerState;
use axum::extract::{Json, State};
use nanoid::nanoid;
use reqwest::StatusCode;
use serde_json::{json, Value};
use std::sync::Arc;
use ufh::query;
use serde::{Serialize, Deserialize};
use crate::routes::util::{perms, Authenticate};
use super::Response;
#[derive(Debug, Serialize, Deserialize)]
pub struct QueryResponse {
query: String,
after: String,
}
pub async fn route(
State(state): State<Arc<ServerState>>,
_auth: Authenticate<perms::ReadOnly>,
Json(query): Json<query::Query>,
) -> Response<(StatusCode, Json<Value>)> {
) -> Response<(StatusCode, Json<QueryResponse>)> {
let id = nanoid!();
state.queries.write().await.insert(id.clone(), query);
Ok((StatusCode::CREATED, Json(json!({ "query": id }))))
Ok((StatusCode::CREATED, Json(QueryResponse {
query: id,
after: String::new(),
})))
}

View file

@ -29,14 +29,14 @@ pub async fn route(
);
let content_type = TypedHeader(ContentType::png());
match thumb {
Thumbnail::Raw(bytes) => {
Some(Thumbnail::Raw(bytes)) => {
debug!("generate thumbnail");
let thumb = generate_thumb(&bytes, &size)?;
state.db.thumbnail_put(&item_ref, &size, &thumb).await?;
Ok((content_type, cache_control, thumb))
}
Thumbnail::Some(thumb) => Ok((content_type, cache_control, thumb)),
Thumbnail::None => {
Some(Thumbnail::Some(thumb)) => Ok((content_type, cache_control, thumb)),
None => {
let event = state
.db
.event_fetch(&item_ref)

View file

@ -0,0 +1,185 @@
use crate::items::events::get_relations;
use crate::perms::can_view_event;
use crate::routes::util::{perms, Authenticate};
use crate::Error;
use crate::{Relations, ServerState};
use axum::extract::ws::Message;
use axum::extract::{
ws::{WebSocket, WebSocketUpgrade},
State,
};
use axum::response::IntoResponse;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::sync::Arc;
use std::time::Duration;
use tracing::{debug, trace};
use ufh::actor::ActorId;
use ufh::query::Query;
use ufh::{event::Event, query::MatchType};
#[derive(Debug, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "lowercase")]
pub enum Request {
Hello { token: String },
Watch { query: String },
Unwatch { query: String },
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "lowercase")]
pub enum Response {
Event { query: String, event: Event },
Relation { query: String, event: Event },
}
// TODO: instead of /query/:query_id, be able to use one socket with multiple queries
#[tracing::instrument(skip_all, fields(query))]
pub async fn route(
State(state): State<Arc<ServerState>>,
ws: WebSocketUpgrade,
) -> impl IntoResponse {
ws.on_upgrade(|mut ws| async move {
if let Err(error) = sync(state, &mut ws).await {
use serde_json::{json, to_string};
let msg = to_string(&json!({ "error": error.to_string() })).unwrap();
let _ = ws.send(Message::Text(msg)).await;
}
drop(ws);
})
}
async fn sync(state: Arc<ServerState>, ws: &mut WebSocket) -> Result<(), Error> {
enum Msg {
Noop,
Close,
Client(Request),
Server((Event, Relations, u32)),
}
let mut user: Option<ActorId> = None;
let mut watching: HashMap<String, Query> = HashMap::new();
let mut events_rx = state.events.subscribe();
loop {
let msg = tokio::select! {
_ = tokio::time::sleep(Duration::from_secs(60)) => Msg::Close,
message = ws.recv() => match message {
Some(Ok(message)) => match message {
Message::Text(string) => Msg::Client(serde_json::from_str(&string)?),
Message::Binary(_) => return Err(Error::Validation("can't handle binary")),
_ => Msg::Noop,
}
Some(Err(err)) => return Err(err.into()),
None => Msg::Close,
},
message = events_rx.recv() => Msg::Server(message?),
};
match msg {
Msg::Noop => (),
Msg::Close => break,
Msg::Client(req) => match &user {
None => match req {
Request::Hello { token } => {
match Authenticate::<perms::ReadOnly>::from_token(state.db.pool(), &token)
.await
{
Ok(auth) => match auth.user {
Some(u) => user = Some(u),
None => return Err(Error::Validation("a")),
},
Err((status, err)) => return Err(Error::Http(status, err)),
}
}
_ => {
return Err(Error::Validation(
"not authenticated, you must send a hello message first",
))
}
},
Some(_) => match req {
Request::Hello { .. } => {
return Err(Error::Validation("you have already authenticated"));
}
Request::Watch { query: query_id } => {
let queries = state.queries.read().await;
let query = queries
.get(&query_id)
.ok_or_else(|| Error::Validation("query doesn't exist"))?
.clone();
watching.insert(query_id, query);
drop(queries);
}
Request::Unwatch { query: query_id } => {
watching.remove(&query_id);
}
},
},
Msg::Server((event, relations, _)) => {
let Some(user) = &user else {
continue;
};
let mut result: Option<(MatchType, String)> = None;
for (query_id, query) in &watching {
let match_type = if can_view_event(&state.db, &event, user).await {
match query.matches(&event, &relations) {
mt @ MatchType::Event => mt,
mt @ MatchType::Relation => mt,
MatchType::None => {
debug!("received non-matching event");
if check_relations(&state, query, &relations).await? {
MatchType::Relation
} else {
MatchType::None
}
}
}
} else {
MatchType::None
};
if let mt @ (MatchType::Event | MatchType::Relation) = match_type {
result = Some((mt, query_id.clone()));
break;
};
}
let response = match result {
Some((MatchType::Event, query)) => Response::Event { event, query },
Some((MatchType::Relation, query)) => Response::Relation { event, query },
_ => continue,
};
ws.send(Message::Text(serde_json::to_string(&response)?))
.await?;
}
}
}
Ok(())
}
// TODO (performance): reduce database queries (reuse the recursive sql query?)
#[async_recursion::async_recursion]
async fn check_relations(
state: &ServerState,
query: &Query,
relations: &Relations,
) -> Result<bool, Error> {
for (rel, _) in relations.values() {
let rel_relations = get_relations(&state.db, rel).await?;
trace!("received non-matching event");
match query.matches(rel, &rel_relations) {
MatchType::Event => {
dbg!(&query, &rel, &rel_relations);
unreachable!("matching events should already be handled")
}
MatchType::Relation => return Ok(true),
MatchType::None => match check_relations(state, query, &rel_relations).await? {
true => return Ok(true),
false => (),
},
}
}
Ok(false)
}

View file

@ -15,7 +15,7 @@ use crate::routes::things::thumbnail::ThumbnailSize;
pub mod sqlite;
#[derive(Debug)]
#[derive(Debug, Clone)]
pub struct QueryResult {
pub events: Vec<Event>,
pub next: Option<String>,
@ -39,7 +39,13 @@ pub enum DbItem {
pub enum Thumbnail {
Raw(Bytes),
Some(Bytes),
None,
}
#[derive(Debug, Clone, Copy)]
pub enum Location {
Beginning,
Reverse,
Index(u32),
}
#[rustfmt::skip]
@ -59,8 +65,9 @@ pub trait Database {
// query
async fn event_fetch(&self, item_ref: &ItemRef) -> Result<Option<Event>, Self::Error>;
async fn query_events(&self, query: &Query, limit: Option<u32>, after: Option<String>) -> Result<QueryResult, Self::Error>;
async fn query_relations( &self, relations: &[QueryRelation], for_events: &[ItemRef]) -> Result<HashMap<ItemRef, Event>, Self::Error>;
async fn query_events(&self, query: &Query, after: Location, limit: u32) -> Result<QueryResult, Self::Error>;
// return type is currently a bit of a kludge
async fn query_relations(&self, relations: &[QueryRelation], for_events: &[ItemRef], after: Location, limit: u32) -> Result<(HashMap<ItemRef, Event>, Option<u32>), Self::Error>;
async fn bulk_fetch(&self, item_refs: &[ItemRef], partial: bool) -> Result<HashMap<ItemRef, DbItem>, Self::Error>;
// routes::things::create has a lot of file-specific things
@ -68,7 +75,7 @@ pub trait Database {
// thumbnails
async fn thumbnail_put(&self, item_ref: &ItemRef, size: &ThumbnailSize, bytes: &[u8]) -> Result<(), Self::Error>;
async fn thumbnail_get(&self, item_ref: &ItemRef, size: &ThumbnailSize) -> Result<Thumbnail, Self::Error>;
async fn thumbnail_get(&self, item_ref: &ItemRef, size: &ThumbnailSize) -> Result<Option<Thumbnail>, Self::Error>;
async fn thumbnail_del(&self, item_ref: &ItemRef, size: &ThumbnailSize) -> Result<(), Self::Error>;
// shares

View file

@ -3,25 +3,29 @@
use std::collections::{HashMap, HashSet};
use std::str::FromStr;
use std::sync::Arc;
use bytes::Bytes;
use futures_util::TryStreamExt;
use serde_json::Value;
use sqlx::query as sql;
use sqlx::{sqlite::SqliteConnectOptions, QueryBuilder, Row, SqlitePool};
use tokio::sync::RwLock;
use tracing::{debug, trace};
use ufh::derived::Derived;
use ufh::event::EventContent;
use ufh::query::QueryRelation;
use ufh::{item::ItemRef, query};
use super::{Database, DbItem, Thumbnail};
use super::{Database, DbItem, Thumbnail, Location};
use crate::routes::things::thumbnail::ThumbnailSize;
use crate::{Error, Event};
#[derive(Debug, Clone)]
pub struct Sqlite {
pool: sqlx::SqlitePool,
// yes, this ruins concurrency. no, i don't feel like dealing with this properly
// in the future i'd tell everyone to use postgres
lock: Arc<RwLock<()>>,
}
/*
@ -39,7 +43,7 @@ impl Sqlite {
.journal_mode(sqlx::sqlite::SqliteJournalMode::Wal);
let pool = SqlitePool::connect_with(options).await?;
sqlx::migrate!().run(&pool).await?;
Ok(Sqlite { pool })
Ok(Sqlite { pool, lock: Arc::new(RwLock::new(())) })
}
// leaky abstraction to be removed
@ -55,6 +59,7 @@ impl Database for Sqlite {
#[tracing::instrument(skip_all)]
async fn event_create(&self, event: &Event) -> Result<u32, Self::Error> {
let _lock = self.lock.write().await;
// stringify things here because sqlx is stupid and wont let me use temporary bindings
let item_type_str = event.content.get_type();
let item_ref_str = event.id.to_string();
@ -94,6 +99,7 @@ impl Database for Sqlite {
#[tracing::instrument(skip_all)]
async fn create_blob(&self, item_ref: &ItemRef, size: u32) -> Result<(), Self::Error> {
let _lock = self.lock.write().await;
let item_ref_str = item_ref.to_string();
// TODO: garabge collect blobs with no references
@ -124,6 +130,7 @@ impl Database for Sqlite {
#[tracing::instrument(skip_all)]
async fn event_redact(&self, item_ref: &ItemRef) -> Result<(), Self::Error> {
let _lock = self.lock.write().await;
let item_ref_str = item_ref.to_string();
let mut tx = self.pool.begin().await?;
let row = sql!("SELECT json FROM events WHERE ref = ?", item_ref_str)
@ -150,6 +157,7 @@ impl Database for Sqlite {
#[tracing::instrument(skip_all)]
async fn event_fetch(&self, item_ref: &ItemRef) -> Result<Option<Event>, Error> {
let _lock = self.lock.read().await;
let item_ref_str = item_ref.to_string();
let result = sql!(
"
@ -181,9 +189,10 @@ impl Database for Sqlite {
async fn query_events(
&self,
query: &query::Query,
limit: Option<u32>,
after: Option<String>,
after: Location,
limit: u32,
) -> Result<super::QueryResult, Error> {
let _lock = self.lock.read().await;
// this code is getting worse as time goes on
let mut builder = QueryBuilder::new("
SELECT DISTINCT events.rowid, events.ref AS item_ref, events.json, sender, derived.json as derived FROM events
@ -193,9 +202,6 @@ impl Database for Sqlite {
builder.push(" JOIN tags ON tags.ref = events.ref");
}
builder.push(" WHERE 1 = 1"); // ugly hack to make composing conditions easier
if let Some(after) = after {
builder.push(" AND events.rowid > ").push_bind(after);
}
if let Some(refs) = &query.refs {
let mut sep = builder.push(" AND item_ref IN (").separated(',');
for item_ref in refs {
@ -224,10 +230,12 @@ impl Database for Sqlite {
}
builder.push(")");
}
let limit = limit.unwrap_or(50).clamp(0, 100);
builder
.push(" ORDER BY events.rowid LIMIT ")
.push_bind(limit);
match after {
Location::Beginning => builder.push(" ORDER BY events.rowid LIMIT ").push_bind(limit),
Location::Index(rowid) => builder.push(" AND events.rowid > ").push_bind(rowid).push(" ORDER BY events.rowid LIMIT ").push_bind(limit),
Location::Reverse => builder.push(" ORDER BY events.rowid DESC LIMIT ").push_bind(limit),
};
debug!("generated sql: {}", builder.sql());
@ -260,7 +268,10 @@ impl Database for Sqlite {
&self,
relations: &[QueryRelation],
for_events: &[ItemRef],
) -> Result<HashMap<ItemRef, Event>, Self::Error> {
after: Location,
limit: u32,
) -> Result<(HashMap<ItemRef, Event>, Option<u32>), Self::Error> {
let _lock = self.lock.read().await;
let mut builder = sqlx::QueryBuilder::new(
"
WITH RECURSIVE graph(ref_from, ref_to, rel_type) AS (
@ -278,7 +289,7 @@ impl Database for Sqlite {
SELECT relations.ref_from, relations.ref_to, relations.rel_type FROM relations
JOIN graph ON graph.ref_from = relations.ref_to
)
SELECT graph.ref_from AS item_ref, events_from.json, derived.json AS derived
SELECT events_from.rowid, graph.ref_from AS item_ref, events_from.json, derived.json AS derived
FROM graph
JOIN events AS events_from ON events_from.ref = graph.ref_from
JOIN events AS events_to ON events_to.ref = graph.ref_to
@ -308,8 +319,15 @@ impl Database for Sqlite {
.push_bind(tup.2.clone());
});
}
match after {
Location::Beginning => builder.push(" ORDER BY events_from.rowid"),
Location::Index(idx) => builder.push(" AND events_from.rowid > ").push_bind(idx).push(" ORDER BY events_from.rowid "),
Location::Reverse => unimplemented!("cannot currently get last relations"),
};
builder.push(" LIMIT ").push_bind(limit);
debug!("generated sql: {}", builder.sql());
let mut map = HashMap::new();
let mut last_rowid = None;
for row in builder.build().fetch_all(&self.pool).await? {
let item_ref_str: &str = row.get("item_ref");
let item_ref = ItemRef::try_from(item_ref_str)?;
@ -320,9 +338,10 @@ impl Database for Sqlite {
.transpose()?
.unwrap_or_default();
map.insert(item_ref, Event { derived, ..event });
last_rowid = Some(row.get("rowid"));
}
debug!("fetched {} relations", map.len());
Ok(map)
Ok((map, last_rowid))
}
async fn bulk_fetch(
@ -330,6 +349,7 @@ impl Database for Sqlite {
item_refs: &[ItemRef],
partial: bool,
) -> Result<HashMap<ItemRef, DbItem>, Self::Error> {
let _lock = self.lock.read().await;
let mut builder = sqlx::QueryBuilder::new(
"
SELECT refs.ref AS item_ref, events.json, blobs.size, derived.json AS derived FROM refs
@ -372,6 +392,7 @@ impl Database for Sqlite {
}
async fn tags_set(&self, item_refs: &[ItemRef], tags: &[String]) -> Result<(), Self::Error> {
let _lock = self.lock.write().await;
let mut product = Vec::new();
for tag in tags {
let mut parts = Vec::new();
@ -415,6 +436,7 @@ impl Database for Sqlite {
key: &str,
value: Value,
) -> Result<(), Self::Error> {
let _lock = self.lock.write().await;
let item_ref_str = item_ref.to_string();
let mut tx = self.pool.begin().await?;
@ -462,6 +484,7 @@ impl Database for Sqlite {
size: &ThumbnailSize,
bytes: &[u8],
) -> Result<(), Self::Error> {
let _lock = self.lock.write().await;
let item_ref_str = item_ref.to_string();
let size_str = size.to_string();
sql!(
@ -479,7 +502,8 @@ impl Database for Sqlite {
&self,
item_ref: &ItemRef,
size: &ThumbnailSize,
) -> Result<Thumbnail, Self::Error> {
) -> Result<Option<Thumbnail>, Self::Error> {
let _lock = self.lock.read().await;
let item_ref_str = item_ref.to_string();
let size_str = size.to_string();
let result = sql!(
@ -490,9 +514,9 @@ impl Database for Sqlite {
.fetch_one(&self.pool)
.await;
match result {
Ok(row) if row.size == "raw" => Ok(Thumbnail::Raw(Bytes::from(row.blob))),
Ok(row) => Ok(Thumbnail::Some(Bytes::from(row.blob))),
Err(sqlx::error::Error::RowNotFound) => Ok(Thumbnail::None),
Ok(row) if row.size == "raw" => Ok(Some(Thumbnail::Raw(Bytes::from(row.blob)))),
Ok(row) => Ok(Some(Thumbnail::Some(Bytes::from(row.blob)))),
Err(sqlx::error::Error::RowNotFound) => Ok(None),
Err(err) => Err(err.into()),
}
}
@ -502,6 +526,7 @@ impl Database for Sqlite {
item_ref: &ItemRef,
size: &ThumbnailSize,
) -> Result<(), Self::Error> {
let _lock = self.lock.write().await;
let item_ref_str = item_ref.to_string();
let size_str = size.to_string();
sql!(

View file

@ -97,7 +97,7 @@
}
}
navigator.mediaSession?.setActionHandler("stop", () => events.emit("close", "statusMusic"));
navigator.mediaSession?.setActionHandler("stop", events.curry("statusMusic/close"));
onMount(updateScene);
onDestroy(() => stop());
@ -147,12 +147,12 @@
</div>
{#each $state.popups as popup}
{#if popup.type === "event"}
<EventPopup event={popup.event} paginate={popup.paginate} close={() => events.emit("close", "popup")}/>
<EventPopup event={popup.event} paginate={popup.paginate} close={events.curry("popup/close")}/>
{:else if popup.type === "text"}
<Popup>
{popup.text}
<div style="margin-top: 1em; text-align: right">
<button on:click={() => events.emit("close", "popup")}>alright</button>
<button on:click={events.curry("popup/close")}>alright</button>
</div>
</Popup>
{:else if popup.type === "edit"}
@ -163,8 +163,8 @@
<Popup on:close={() => popup.resolve(false)}>
<div>{popup.question}</div>
<div style="margin-top: 1em; text-align: right">
<button on:click={() => { events.emit("close", "popup"); popup.resolve(false); }}>cancel</button>
<button on:click={() => { events.emit("close", "popup"); popup.resolve(true); }}>confirm</button>
<button on:click={() => { events.do("popup/close"); popup.resolve(false); }}>cancel</button>
<button on:click={() => { events.do("popup/close"); popup.resolve(true); }}>confirm</button>
</div>
</Popup>
{:else}

View file

@ -1,5 +1,5 @@
<script lang="ts">
import { events } from "./state";
import { state } from "./state";
import SettingsIc from "carbon-icons-svelte/lib/Settings.svelte";
import HomeIc from "carbon-icons-svelte/lib/Home.svelte";
import ForumIc from "carbon-icons-svelte/lib/Forum.svelte";
@ -30,17 +30,17 @@
const openContext = (item) => (e) => {
console.log(item);
const items = [
{ type: "item", text: "edit", clicked: () => events.emit("popup", { type: "edit", event: item }) },
{ type: "item", text: "foo", clicked: () => events.emit("popup", { type: "text", text: "hello!" }) },
{ type: "item", text: "bar", clicked: () => events.emit("popup", { type: "text", text: "hello!" }) },
{ type: "item", text: "baz", clicked: () => events.emit("popup", { type: "text", text: "hello!" }) },
{ type: "item", text: "edit", clicked: state.curry("popup/open", { type: "edit", event: item }) },
{ type: "item", text: "foo", clicked: state.curry("popup/open", { type: "text", text: "hello!" }) },
{ type: "item", text: "bar", clicked: state.curry("popup/open", { type: "text", text: "hello!" }) },
{ type: "item", text: "baz", clicked: state.curry("popup/open", { type: "text", text: "hello!" }) },
{ type: "hr" },
{ type: "submenu", text: "baz", clicked: () => events.emit("popup", { type: "text", text: "hello!" }), items: [
{ type: "item", text: "two", clicked: () => events.emit("popup", { type: "text", text: "hello!" }) },
{ type: "item", text: "more", clicked: () => events.emit("popup", { type: "text", text: "hello!" }) },
{ type: "submenu", text: "baz", clicked: state.curry("popup/open", { type: "text", text: "hello!" }), items: [
{ type: "item", text: "two", clicked: state.curry("popup/open", { type: "text", text: "hello!" }) },
{ type: "item", text: "more", clicked: state.curry("popup/open", { type: "text", text: "hello!" }) },
] },
];
events.emit("menu", { x: e.clientX, y: e.clientY, items });
state.do("menu/open", { x: e.clientX, y: e.clientY, items });
};
</script>
<nav id="nav">
@ -55,7 +55,7 @@
<svelte:component this={getIcon(item.type)} />
<span class="name">{item.getContent().name || "unnamed"}</span>
{#if !["home", "settings"].includes(item.id)}
<button on:click={() => events.emit("popup", { type: "edit", event: item })}>+</button>
<button on:click={state.curry("popup/open", { type: "edit", event: item })}>+</button>
{/if}
</div>
</a>

View file

@ -94,7 +94,7 @@
function popout() {
audio.pause();
events.emit("open", "statusMusic", { event, time: currentTime });
state.do("statusMusic/open", { event, time: currentTime });
}
function hijackMedia() {

View file

@ -21,7 +21,7 @@
function close() {
dispatch("close");
events.emit("close", "popup");
events.do("popup/close");
}
</script>
<div class="background" on:click={close} transition:opacity|global>

View file

@ -5,10 +5,10 @@
export let menu: TopLevelMenu;
$: ({ x, y, items } = menu);
</script>
<div class="wrapper" style:left="{x}px" style:top="{y}px" on:click={() => events.emit("menu", null)}>
<div class="wrapper" style:left="{x}px" style:top="{y}px" on:click={events.curry("menu/close")}>
<Menu {items} />
</div>
<svelte:body on:click={() => events.emit("menu", null)} />
<svelte:body on:click={events.curry("menu/close")} />
<style lang="scss">
.wrapper {
position: fixed;

View file

@ -78,7 +78,7 @@ class Query extends EventEmitter implements TypedEmitter<QueryEvents> {
throw err;
});
if (!req) break;
if (req.status !== 200) throw new Error("failed to query: " + await req.text());
if (!req.ok) throw new Error("failed to query: " + await req.text());
const json = await req.json();
console.log(json);
for (let relId in json.relations) relations.set(relId, Event.from(json.relations[relId]));
@ -114,7 +114,7 @@ export class Client {
},
body: blob,
});
if (req.status !== (wait ? 201 : 202)) {
if (!req.ok) {
throw new Error("failed to upload: " + await req.text());
}
const { ref } = await req.json();
@ -137,7 +137,7 @@ export class Client {
},
body: JSON.stringify(event),
});
if (req.status !== (wait ? 201 : 202)) {
if (!req.ok) {
throw new Error("failed to upload: " + await req.text());
}
const { ref } = await req.json();
@ -154,7 +154,7 @@ export class Client {
});
const type = req.headers.get("content-type");
if (req.status === 404) throw new Error("doesnt exist");
if (req.status !== 200) throw new Error("failed to fetch");
if (!req.ok) throw new Error("failed to fetch");
if (type === "application/octet-stream") return req.arrayBuffer();
if (type === "application/json") return Event.from(await req.json());
throw new Error("invalid response type");
@ -205,7 +205,7 @@ export const api = Object.assign(getClient(), {
},
body: isRaw ? blob : JSON.stringify(blob),
});
if (req.status !== (wait ? 201 : 202)) {
if (!req.ok) {
console.log(await req.text());
throw new Error("failed to upload: " + req.statusText);
}
@ -260,7 +260,7 @@ export const api = Object.assign(getClient(), {
throw err;
});
if (!req) break;
if (req.status !== 200) throw new Error("failed to query: " + await req.text());
if (!req.ok) throw new Error("failed to query: " + await req.text());
const json = await req.json();
console.log(json);
for (let relId in json.relations) relations.set(relId, json.relations[relId]);
@ -295,7 +295,4 @@ export const api = Object.assign(getClient(), {
}
});
globalThis.client = client;
globalThis.api = api;
globalThis.base64 = base64;
globalThis.ed25519 = ed25519;
Object.assign(globalThis, { client, api, base64, ed25519 });

View file

@ -36,14 +36,14 @@ export class Option<T> {
}
export class Result<T> {
private value: T | Error;
constructor(private value: T | Error) {}
static handle<T>(fn: () => T): Result<T> {
return Object.assign(new Result(), { value: fn() });
return Object.assign(new Result(fn()));
}
static async handleAsync<T>(fn: () => T): Promise<Result<T>> {
return Object.assign(new Result(), { value: await fn() });
return Object.assign(new Result(await fn()));
}
unwrap(expect?: string): T {

38
web/lib/reduxer.ts Normal file
View file

@ -0,0 +1,38 @@
import { Subscriber, Unsubscriber } from "svelte/store";
type Mapper<S> = (state: S, ...args: any) => Partial<S>;
type Args<T extends (...args: any) => any> = Parameters<T> extends [any, ...infer U] ? U : never;
export class Reduxer<S, R extends Record<string, Mapper<S>>> {
private subscribers = new Set<(state: S) => void>();
constructor(
private state: S,
private reducers: R,
) {}
subscribe(subscription: Subscriber<S>): Unsubscriber {
subscription(this.state);
this.subscribers.add(subscription);
return () => this.subscribers.delete(subscription);
}
get() {
return this.state;
}
do<N extends keyof R>(action: N, ...args: Args<R[N]>) {
this.state = {
...this.state,
...this.reducers[action](this.state, ...args),
};
for (let sub of this.subscribers) {
sub(this.state);
}
return this.state;
}
curry<N extends keyof R>(action: N, ...args: Args<R[N]>) {
return () => this.do(action, ...args);
}
}

View file

@ -1,19 +1,2 @@
import { api, ed25519 } from "./lib/api";
import App from "./App.svelte";
let key = localStorage.getItem("key");
if (key) {
key = ed25519.etc.hexToBytes(key);
} else {
key = ed25519.utils.randomPrivateKey();
localStorage.setItem("key", ed25519.etc.bytesToHex(key));
}
api.key = key;
let server = localStorage.getItem("server");
api.baseUrl = server || "http://localhost:3210/";
let token = localStorage.getItem("token");
api.token = token || "";
new App({ target: document.body });

View file

@ -1,6 +1,6 @@
<script lang="ts">
import { api } from "../lib/api";
import { events } from "../events";
import { state } from "../state";
let event_type: string;
let name: string;
let description: string;
@ -8,7 +8,7 @@
async function create() {
const ev = await api.makeEvent(event_type, { name, description });
const ref = await api.upload(ev);
events.emit("close", "popup");
state.do("popup/close");
location.hash = `/${ref}`;
console.log(ref);
}

View file

@ -1,7 +1,9 @@
<script lang="ts">
import { api } from "../lib/api";
import { asyncIterArray } from "../lib/util";
export let event;
import { state } from "../state";
import type { Event } from "../lib/api";
export let event: Event;
let selected = "general";
let name = event.content.name;
let description = event.content.description;
@ -12,9 +14,9 @@
return query.filter(i => i.type === "relation" && i.event.type === "x.acl").map(i => i.event)[0] ?? null;
}
function confirm(question) {
function confirm(question: string) {
return new Promise(res => {
events.emit("popup", {
state.do("popup/open", {
type: "confirm",
question,
resolve: res,
@ -23,17 +25,17 @@
}
async function update() {
if (name === event.content.name && description === event.content.description) return events.emit("close", "popup");
if (name === event.content.name && description === event.content.description) return events.do("close", "popup");
const ev = await api.makeEvent("x.update", { name, description }, { [event.id]: { type: "update" } });
await api.upload(ev);
events.emit("close", "popup");
state.do("popup/close");
}
async function deleteNexus() {
if (!await confirm("are you sure you want to delete this?")) return;
const ev = await api.makeEvent("x.redact", {}, { [event.id]: { type: "redact" } });
await api.upload(ev);
events.emit("close", "popup");
state.do("popup/close");
location.hash = "/home";
}

View file

@ -13,7 +13,7 @@
function open(event: Event) {
const media = [...embeds.values()].filter(i => ["image", "video"].includes(i.derived?.file?.mime?.split("/")[0]));
events.emit("popup", { type: "event", event, paginate: readable(media) });
events.do("popup/open", { type: "event", event, paginate: readable(media) });
}
</script>
<div class="document" class:hideNotes>

View file

@ -47,7 +47,7 @@
const event = await api.makeEvent("l.doc", content, relations);
await api.upload(event, false);
} catch (err) {
events.emit("popup", { type: "text", text: "failed to upload: " + err });
events.do("popup/open", { type: "text", text: "failed to upload: " + err });
}
uploadStatus.count++;
}

View file

@ -103,5 +103,4 @@ export function parse(tokens: Array<Token>, strict = true): Array<DocInline> {
}
}
globalThis.tokenize = tokenize;
globalThis.parse = parse;
Object.assign(globalThis, { tokenize, parse });

View file

@ -22,9 +22,9 @@
function handleOpen(event: Event) {
if (event.derived?.file?.mime?.startsWith("audio/")) {
events.emit("open", "statusMusic", { event, paginate: readable(items) });
events.do("statusMusic/open", { event, paginate: readable(items) });
} else {
events.emit("popup", { type: "event", event, paginate: readable(items) });
events.do("popup/open", { type: "event", event, paginate: readable(items) });
}
}
</script>

View file

@ -22,9 +22,9 @@
function handleOpen(event: Event) {
if (event.derived?.file?.mime?.startsWith("audio/")) {
events.emit("open", "statusMusic", { event, paginate: readable(items) });
events.do("statusMusic/open", { event, paginate: readable(items) });
} else {
events.emit("popup", { type: "event", event, paginate: readable(items) });
events.do("popup/open", { type: "event", event, paginate: readable(items) });
}
}
</script>

View file

@ -35,7 +35,7 @@
const idx = $paginate.findIndex(i => i.id === event.id);
const newev = $paginate[idx + 1];
if (newev) {
events.emit("popupReplace", { type: "event", event: newev, paginate });
events.do("popup/replace", { type: "event", event: newev, paginate });
}
}
@ -44,7 +44,7 @@
const idx = $paginate?.findIndex(i => i.id === event.id);
const newev = $paginate[idx - 1];
if (newev) {
events.emit("popupReplace", { type: "event", event: newev, paginate });
events.do("popup/replace", { type: "event", event: newev, paginate });
}
}
</script>

View file

@ -6,7 +6,7 @@
const { selected } = context;
</script>
<div
on:click={() => events.emit("selectComment", ($selected === root.event.id) ? null : root.event)}
on:click={() => events.do("selectComment", ($selected === root.event.id) ? null : root.event)}
class:selected={$selected === root.event.id}
>
{root.event.content.body}

View file

@ -106,7 +106,7 @@
<table>
<em>new comment</em>
{#if $selected}
<tr><td>reply:</td><td><button on:click={() => events.emit("selectComment", null)}>deselect</button></td></tr>
<tr><td>reply:</td><td><button on:click={events.curry("selectComment", null)}>deselect</button></td></tr>
{/if}
<tr><td>comment:</td><td><textarea bind:value={commentBody}></textarea></td></tr>
<tr><td></td><td><input type="submit" value="post"></td></tr>

View file

@ -1,5 +1,5 @@
<script lang="ts">
import { events } from "../events";
import { state } from "../state";
import { Document, parse, tokenize } from "../scenes/Document";
let docText = "";
let doc = [];
@ -16,7 +16,7 @@
If you have a private key and token, you can set it in <a href="#/settings">settings</a>.
</p>
<hr />
<button on:click={() => events.emit("popup", { type: "create" })}>create bucket</button><br />
<button on:click={state.curry("popup/open", { type: "create" })}>create bucket</button><br />
<textarea bind:value={docText}></textarea><br />
<Document {doc} />
</div>

View file

@ -1,18 +1,7 @@
import EventEmitter from "events";
import TypedEmitter from "typed-emitter";
import { readable } from "svelte/store";
import { Event } from "./lib/api";
import type { Readable } from "svelte/store";
import type { TopLevelMenu } from "./atoms/context/types";
type Paginate = Readable<Array<Event>>;
export type Popup =
{ type: "event", event: Event, paginate?: Paginate } |
{ type: "edit", event: Event } |
{ type: "create" } |
{ type: "confirm", question: string, resolve: (accepted: boolean) => void } |
{ type: "text", text: string };
import { Paginate, Popup } from "./types";
import { Reduxer } from "./lib/reduxer";
interface State {
sidebar: boolean,
@ -22,87 +11,36 @@ interface State {
menu: TopLevelMenu | null,
}
type Events = {
"*": (...args: Array<any>) => void,
close: (thing: "statusMusic" | "sidebar" | "pins" | "popup") => void,
open: ((thing: "sidebar" | "pins" | "popup") => void)
| ((thing: "statusMusic", ctx: { event: Event, time?: number, paginate?: Paginate }) => void),
popup: (popup: Popup) => void,
popupReplace: (popup: Popup) => void,
menu: (menu: TopLevelMenu | null) => void,
};
interface EventHandlers {
[event: string]: (state: State, ...any: Array<any>) => Partial<State>,
};
const handlers: EventHandlers = {
open(state, item, ctx) {
switch (item) {
case "pins": return { pins: true };
case "sidebar": return { sidebar: true };
case "statusMusic": return { statusMusic: ctx };
}
if (!(item instanceof Event)) {
throw new Error("not an event");
}
if (item.derived?.file?.mime?.startsWith("audio/")) {
return { statusMusic: item };
} else {
return { popups: [...state.popups, { type: "event", event: item }] };
}
},
popup(state, popup) {
return { popups: [...state.popups, popup] };
},
popupReplace(state, popup) {
return { popups: [...state.popups.slice(0, -1), popup] };
},
menu(state, menu) {
return { menu };
},
close(state, item) {
switch (item) {
case "pins": return { pins: false };
case "sidebar": return { sidebar: false };
case "statusMusic": return { statusMusic: null };
case "popup": return { popups: state.popups.slice(0, -1) };
default: throw new Error("unknown");
}
},
};
function update(_, update) {
function handle(type, ...args) {
update((state: State) => ({ ...state, ...handlers[type]?.(state, ...args) }));
}
events.on("*", handle);
return function stop() {
events.off("*", handle);
}
}
const initial: State = {
const init: State = {
sidebar: window.innerWidth > 1080,
pins: true,
statusMusic: null,
popups: [],
menu: null,
};
class Emitter extends EventEmitter {
emit(type, ...args) {
super.emit("*", type, ...args);
super.emit(type, ...args);
}
};
// names take the form object/action[/type]
export const state = new Reduxer(init, {
"event/open": (state, event: Event) => {
if (event.derived?.file?.mime?.startsWith("audio/")) {
return { statusMusic: { event } };
} else {
const popup: Popup = { type: "event", event };
return { popups: [...state.popups, popup] };
}
},
"pins/close": () => ({ pins: false }),
"pins/open": () => ({ pins: true }),
"sidebar/close": () => ({ sidebar: false }),
"sidebar/open": () => ({ sidebar: true }),
"statusMusic/close": () => ({ statusMusic: null }),
"statusMusic/open": (_, ctx: { event: Event, time?: number, paginate?: Paginate }) => ({ statusMusic: ctx }),
"popup/open": (state, popup: Popup) => ({ popups: [...state.popups, popup] }),
"popup/replace": (state, popup: Popup) => ({ popups: [...state.popups.slice(0, -1), popup] }),
"popup/close": (state) => ({ popups: [...state.popups.slice(0, -1)] }),
"menu/open": (_, menu: TopLevelMenu) => ({ menu }),
"menu/close": (_) => ({ menu: null }),
});
export const events = new Emitter() as TypedEmitter<Events>;
export const state = readable(initial, update);
globalThis.events = events;
export const events = state;
Object.assign(globalThis, { events });

View file

@ -2,7 +2,7 @@
// TODO: merge/deduplicate with audio atom?
import brokenImgUrl from "../assets/broken.png";
import { api } from "../lib/api";
import { events } from "../events";
import { state } from "../state";
import Clock from "../Clock.svelte";
import "carbon-icons-svelte"; // import everything so vite optimizes correctly
import PlayIc from "carbon-icons-svelte/lib/PlayFilledAlt.svelte";
@ -12,7 +12,7 @@
import VolumeUpIc from "carbon-icons-svelte/lib/VolumeUpFilled.svelte";
import VolumeMuteIc from "carbon-icons-svelte/lib/VolumeMuteFilled.svelte";
import StopIc from "carbon-icons-svelte/lib/StopFilledAlt.svelte";
import { onDestroy, onMount } from "svelte";
import { afterUpdate, onDestroy, onMount } from "svelte";
import { formatTime } from "../lib/util";
import type { Event } from "../lib/api";
export let event: Event;
@ -20,7 +20,7 @@
export let small: boolean;
$: url = api.getBlobUrl(event.id);
$: media = event.derived?.media ?? {};
let audio;
let audio: HTMLAudioElement;
let duration: number;
let currentTime: number;
let volume: number = 1;
@ -65,7 +65,7 @@
const frac = e.layerX / progressEl.offsetWidth;
previewTime = frac * duration;
if (e.type === "mousedown" || e.buttons === 1) {
audio.currentTime = previewTime;
time = previewTime;
}
}
@ -75,9 +75,9 @@
function scrubWheel(e: WheelEvent) {
if (e.deltaY > 0) {
audio.currentTime = Math.max(currentTime - 5, 0);
time = Math.max(currentTime - 5, 0);
} else {
audio.currentTime = Math.min(currentTime + 5, duration);
time = Math.min(currentTime + 5, duration);
}
}
@ -105,18 +105,10 @@
],
});
function handleUpdate(thing, ctx) {
if (thing !== "statusMusic") return;
if (ctx.time) queueMicrotask(() => currentTime = ctx.time);
}
events.on("open", handleUpdate);
onMount(() => audio.currentTime = time);
$: if (audio) audio.currentTime = time;
onDestroy(() => {
if ("mediaSession" in navigator) navigator.mediaSession.metadata = null;
events.off("open", handleUpdate);
});
</script>
<div class="wrapper" class:small>
@ -167,7 +159,7 @@
<VolumeUpIc />
{/if}
</button>
<button on:click={() => events.emit("close", "statusMusic")}>
<button on:click={state.curry("statusMusic/close")}>
<StopIc />
</button>
</div>
@ -185,7 +177,7 @@
on:playing={handleLoading}
on:stalled={handleLoading}
on:loadstart={handleLoading}
on:ended={() => events.emit("close", "statusMusic")}
on:ended={state.curry("statusMusic/close")}
on:play={hijackMedia}
/>
</div>

View file

@ -1,6 +1,5 @@
<script lang="ts">
import { api, ed25519, base64 } from "../lib/api";
import { events } from "../events";
import { state } from "../state";
$: console.log($state);
</script>
@ -10,8 +9,8 @@
<div class="userid">user id: <code>{"%" + base64.encode(pubkey)}</code></div>
{/await}
<div>
<button on:click={() => events.emit($state.sidebar ? "close" : "open", "sidebar")}>sidebar</button>
<button on:click={() => events.emit($state.pins ? "close" : "open", "pins")}>pins</button>
<button on:click={state.curry($state.sidebar ? "sidebar/close" : "sidebar/open")}>sidebar</button>
<button on:click={state.curry($state.pins ? "pins/close" : "pins/open")}>pins</button>
</div>
</div>
<style lang="scss">

View file

@ -13,5 +13,5 @@
"noEmit": true,
"isolatedModules": true
},
"include": ["src/**/*.d.ts", "src/**/*.ts", "src/**/*.js", "src/**/*.tsx", "src/**/*.svelte"]
"include": ["**/*.d.ts", "**/*.ts", "**/*.js", "**/*.tsx", "**/*.svelte"]
}

12
web/types.ts Normal file
View file

@ -0,0 +1,12 @@
import { Event } from "./lib/api";
import { Readable } from "svelte/store";
export type Paginate = Readable<Array<Event>>;
export type Popup =
{ type: "event", event: Event, paginate?: Paginate } |
{ type: "edit", event: Event } |
{ type: "create" } |
{ type: "confirm", question: string, resolve: (accepted: boolean) => void } |
{ type: "text", text: string };