maintenence (see long commit message)

- update packages
- add nix flake
- add prepared sqlx queries
- fix various compilation errors

(this project is still pretty much dead though)
This commit is contained in:
tezlm 2024-08-01 02:06:04 -07:00
parent e221dd8b9b
commit a3ca248c59
Signed by: tezlm
GPG key ID: 649733FCD94AFBBA
37 changed files with 2969 additions and 1762 deletions

2108
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -119,9 +119,10 @@ impl fuser::Filesystem for Filesystem {
senders: Some(HashSet::from([self.key.get_id()])),
types: Some(HashSet::from(["x.file".into(), "x.tag.local".into()])),
tags: None,
relations: HashSet::from([QueryRelation::from_rel("redact".into(), "x.redact".into())]),
relations: HashSet::from([QueryRelation::from_rel("redact", "x.redact")]),
ephemeral: HashSet::new(),
with_redacts: false,
fetch: HashSet::new(),
};
let mut inodes = self.inodes.blocking_lock();

View file

@ -1,5 +1,3 @@
#![feature(fs_try_exists)]
// TODO: proper error handling
mod cli;
@ -64,6 +62,14 @@ impl Display for DynError {
impl std::error::Error for StaticError {}
impl std::error::Error for DynError {}
fn try_exists(path: &std::path::Path) -> std::io::Result<bool> {
match std::fs::metadata(path) {
Ok(_) => Ok(true),
Err(error) if error.kind() == std::io::ErrorKind::NotFound => Ok(false),
Err(error) => Err(error),
}
}
fn get_or_init_config(
file: Option<&std::path::Path>,
profile: Option<&str>,
@ -73,7 +79,7 @@ fn get_or_init_config(
.expect("should have config dir")
.join("ufh/config.json")
});
let mut config: Config = if std::fs::try_exists(&path)? {
let mut config: Config = if try_exists(&path)? {
serde_json::from_slice(&std::fs::read(&path)?)?
} else {
let (_, key) = ActorId::new();
@ -470,6 +476,7 @@ async fn main() -> Result<(), Error> {
relations: relations.unwrap_or_default(),
ephemeral: HashSet::new(),
with_redacts: false,
fetch: HashSet::new(),
};
let query = client.query(&query).await?;
let result = client.list(&query, Some(1), None, None).await?;
@ -503,6 +510,7 @@ async fn main() -> Result<(), Error> {
relations: HashSet::new(),
ephemeral: HashSet::new(),
with_redacts: false,
fetch: HashSet::new(),
};
let query = client.query(&query).await?;
let timeout = if stream { Some(30000) } else { None };

129
flake.lock Normal file
View file

@ -0,0 +1,129 @@
{
"nodes": {
"naersk": {
"inputs": {
"nixpkgs": "nixpkgs"
},
"locked": {
"lastModified": 1721727458,
"narHash": "sha256-r/xppY958gmZ4oTfLiHN0ZGuQ+RSTijDblVgVLFi1mw=",
"owner": "nix-community",
"repo": "naersk",
"rev": "3fb418eaf352498f6b6c30592e3beb63df42ef11",
"type": "github"
},
"original": {
"owner": "nix-community",
"ref": "master",
"repo": "naersk",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1722185531,
"narHash": "sha256-veKR07psFoJjINLC8RK4DiLniGGMgF3QMlS4tb74S6k=",
"path": "/nix/store/bcghcr9qwqmanpds017w75mcqda4fgab-source",
"rev": "52ec9ac3b12395ad677e8b62106f0b98c1f8569d",
"type": "path"
},
"original": {
"id": "nixpkgs",
"type": "indirect"
}
},
"nixpkgs_2": {
"locked": {
"lastModified": 1722415718,
"narHash": "sha256-5US0/pgxbMksF92k1+eOa8arJTJiPvsdZj9Dl+vJkM4=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "c3392ad349a5227f4a3464dce87bcc5046692fce",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs_3": {
"locked": {
"lastModified": 1718428119,
"narHash": "sha256-WdWDpNaq6u1IPtxtYHHWpl5BmabtpmLnMAx0RdJ/vo8=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "e6cea36f83499eb4e9cd184c8a8e823296b50ad5",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"naersk": "naersk",
"nixpkgs": "nixpkgs_2",
"rust-overlay": "rust-overlay",
"utils": "utils"
}
},
"rust-overlay": {
"inputs": {
"nixpkgs": "nixpkgs_3"
},
"locked": {
"lastModified": 1722479030,
"narHash": "sha256-98tsdV+N9wSVU0vlzjJ30+9QL2bescJs5jWFurTpvAw=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "c02e7d32607e4e16c80152a40ee141c4877b00cb",
"type": "github"
},
"original": {
"owner": "oxalica",
"repo": "rust-overlay",
"type": "github"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1710146030,
"narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

37
flake.nix Normal file
View file

@ -0,0 +1,37 @@
{
inputs = {
naersk.url = "github:nix-community/naersk/master";
nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable";
utils.url = "github:numtide/flake-utils";
rust-overlay.url = "github:oxalica/rust-overlay";
};
outputs = { self, nixpkgs, utils, naersk, rust-overlay }:
utils.lib.eachDefaultSystem (system:
let
pkgs = (import nixpkgs) {
inherit system;
overlays = [ (import rust-overlay) ];
};
naersk-lib = pkgs.callPackage naersk { };
nativeBuildInputs = with pkgs; [
pkg-config fuse3
(rust-bin.selectLatestNightlyWith
(toolchain: toolchain.default.override {
extensions = [ "rust-src" "clippy" ];
}))
];
in
{
defaultPackage = naersk-lib.buildPackage {
src = ./.;
inherit nativeBuildInputs;
};
devShell = with pkgs; mkShell {
RUST_SRC_PATH = rustPlatform.rustLibSrc;
buildInputs = [ cargo rustc rustfmt pre-commit rustPackages.clippy sqlx-cli ];
inherit nativeBuildInputs;
};
});
}

View file

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "INSERT OR REPLACE INTO derived (ref, json) VALUES (?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "052dee8887519d738b6757cad786e5aab146ac944ea045f88882e65279bcc217"
}

View file

@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "INSERT OR IGNORE INTO events (ref, sender, type, json, flags) VALUES (?, ?, ?, ?, ?) RETURNING rowid",
"describe": {
"columns": [
{
"name": "rowid",
"ordinal": 0,
"type_info": "Int64"
}
],
"parameters": {
"Right": 5
},
"nullable": [
false
]
},
"hash": "0c8141b5ae35d99e1cf8b3d08df2a2efdae2e2dcf1981dac8cf7ad0f199fc0b2"
}

View file

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "INSERT OR IGNORE INTO blobs (ref, size, gc_at) VALUES (?, ?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 3
},
"nullable": []
},
"hash": "169b36260e7ac883c091a9c4778dd7be442ffd45921d090a795652aca0210782"
}

View file

@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "SELECT json FROM events WHERE ref = ?",
"describe": {
"columns": [
{
"name": "json",
"ordinal": 0,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false
]
},
"hash": "1744a001634d18e149b38aa393664c0978ccd3b96b2e096c8df5f1052fc923a6"
}

View file

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "INSERT INTO thumbnails (ref, size, blob) VALUES (?, ?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 3
},
"nullable": []
},
"hash": "3912530c19a69c5d9a68e495c735d285084f291d1d14d459dd1968951f9d9743"
}

View file

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "INSERT OR IGNORE INTO refs (ref, flags) VALUES (?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "441af239734451b0316dd778a3bb87e04994fa13949afee89d9f71a230463428"
}

View file

@ -0,0 +1,26 @@
{
"db_name": "SQLite",
"query": "SELECT * FROM derived WHERE ref = ?",
"describe": {
"columns": [
{
"name": "ref",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "json",
"ordinal": 1,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false
]
},
"hash": "5164687df2910e26a5580228e2971065ab2714de1b2ffbcb40f7a3aa9bfa3176"
}

View file

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "UPDATE events SET json = ?, flags = flags | 1 WHERE ref = ?",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "559ad911cbb23392c1c0b1a35b72496150b916c866a08e58150ef28eed50d74b"
}

View file

@ -0,0 +1,32 @@
{
"db_name": "SQLite",
"query": "SELECT * FROM thumbnails WHERE ref = ? AND size IN (?, 'raw')",
"describe": {
"columns": [
{
"name": "ref",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "size",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "blob",
"ordinal": 2,
"type_info": "Blob"
}
],
"parameters": {
"Right": 2
},
"nullable": [
true,
false,
false
]
},
"hash": "659aba270b3a195cba95326736b171ea9c86d0ccf4aefbe819e9cdbb28555819"
}

View file

@ -0,0 +1,26 @@
{
"db_name": "SQLite",
"query": "\n SELECT events.json as json, derived.json as derived FROM events\n LEFT JOIN derived ON derived.ref = events.ref\n WHERE events.ref = ?\n ",
"describe": {
"columns": [
{
"name": "json",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "derived",
"ordinal": 1,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
true
]
},
"hash": "7051545f475ac5a81d04e3ada9e49cb752e2cf352feab7ca520f080fb98f9176"
}

View file

@ -0,0 +1,38 @@
{
"db_name": "SQLite",
"query": "SELECT * FROM subscriptions WHERE ref = ? AND direction = 'o'",
"describe": {
"columns": [
{
"name": "ref",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "user",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "node",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "direction",
"ordinal": 3,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
false
]
},
"hash": "7eb3ddfc275dee21feb4950119108cdeb607424265929484b9475826051e3397"
}

View file

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "INSERT OR IGNORE INTO subscriptions (ref, user, node, direction) VALUES (?, ?, ?, 'o')",
"describe": {
"columns": [],
"parameters": {
"Right": 3
},
"nullable": []
},
"hash": "7f715887d044753437f62e740157a0a205dc79aff90deb7771966895b4037391"
}

View file

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "DELETE FROM derived WHERE ref = ?",
"describe": {
"columns": [],
"parameters": {
"Right": 1
},
"nullable": []
},
"hash": "a4fa9a48324c469921d78ffbde825513371da6a9d1bbf6bb45e424a5ed6f2b8e"
}

View file

@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "SELECT user FROM subscriptions WHERE ref = ? AND direction = 'i'",
"describe": {
"columns": [
{
"name": "user",
"ordinal": 0,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false
]
},
"hash": "a79eb36885629e35fadad13ddd87d4102a1518242d56b22964fab7081c9420b0"
}

View file

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "DELETE FROM thumbnails WHERE ref = ? AND size = ?",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "c849102b9b26dee51977bf13f83094ecc82fd37b823c6cf94285e266ac0f1710"
}

View file

@ -0,0 +1,26 @@
{
"db_name": "SQLite",
"query": "SELECT user, level as \"level: u8\" FROM sessions WHERE id = ?",
"describe": {
"columns": [
{
"name": "user",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "level: u8",
"ordinal": 1,
"type_info": "Int64"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false
]
},
"hash": "d0a88e650f1ecf241231e05dc1fad5a7ea738f13888bbf1c53d13883f31f46a3"
}

View file

@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "SELECT rowid FROM events WHERE ref = ?",
"describe": {
"columns": [
{
"name": "rowid",
"ordinal": 0,
"type_info": "Int64"
}
],
"parameters": {
"Right": 1
},
"nullable": [
true
]
},
"hash": "f42d567413f5a357d9f99e52dc71b78686627486228752b1bf28a2eaa95d42f7"
}

View file

@ -30,8 +30,8 @@ serde = { version = "1.0.171", features = ["derive"] }
serde-aux = "4.2.0"
serde_json = "1.0.97"
sha2 = { version = "0.10.7", features = ["asm"] }
sqlx = { version = "0.6.3", features = ["sqlite", "runtime-tokio-rustls"] }
tantivy = "0.20.2"
sqlx = { version = "0.7.4", features = ["sqlite", "runtime-tokio-rustls"] }
tantivy = "0.22.0"
thiserror = "1.0.40"
tokio = { version = "1.28.2", features = ["rt-multi-thread", "macros", "process"] }
tower-http = { version = "0.4.1", features = ["cors", "trace"] }

View file

@ -5,7 +5,7 @@ use tokio::process::Command;
// FIXME: some files (mp4, mov) may fail to thumbnail with stdin
// they can have a MOOV atom at the end, and ffmpeg can't seek to the beginning
pub async fn extract(buffer: &[u8], args: &[&str]) -> Result<Vec<u8>, ()> {
let mut cmd = Command::new("/usr/bin/ffmpeg")
let mut cmd = Command::new("ffmpeg")
.args([&["-v", "quiet", "-i", "-"], args].concat())
.stdin(Stdio::piped())
.stdout(Stdio::piped())

View file

@ -57,7 +57,7 @@ pub struct Tags(
impl Ffprobe {
pub async fn derive(buffer: &[u8]) -> Option<Ffprobe> {
let mut cmd = Command::new("/usr/bin/ffprobe")
let mut cmd = Command::new("ffprobe")
.args([
"-v",
"quiet",

View file

@ -1,11 +1,9 @@
#![feature(async_fn_in_trait)] // ahh yes, experimental features
#![allow(clippy::type_complexity)]
// general purpose lints
#![warn(
clippy::get_unwrap,
clippy::unnested_or_patterns,
clippy::undocumented_unsafe_blocks,
illegal_floating_point_literal_pattern
)]
// "production quality" lints

View file

@ -113,7 +113,7 @@ async fn message(
"SELECT user FROM subscriptions WHERE ref = ? AND direction = 'i'",
item_ref_str,
)
.fetch_all(&mut tx)
.fetch_all(&mut *tx)
.await?;
let existing_users: HashSet<ActorId> = records
.into_iter()
@ -139,7 +139,7 @@ async fn message(
user_str,
contact_str,
)
.execute(&mut tx)
.execute(&mut *tx)
.await;
if let Err(err) = query {
error!("db error: {}", err);

View file

@ -99,20 +99,20 @@ impl Database for Sqlite {
item_ref_str,
0
)
.execute(&mut tx)
.execute(&mut *tx)
.await?;
trace!("insert into events");
let insert = sql!("INSERT OR IGNORE INTO events (ref, sender, type, json, flags) VALUES (?, ?, ?, ?, ?) RETURNING rowid", item_ref_str, item_sender_str, item_type_str, event_str, 0)
.fetch_optional(&mut tx)
.fetch_optional(&mut *tx)
.await?;
let Some(insert) = insert else {
trace!("event already exists!");
let row = sql!("SELECT rowid FROM events WHERE ref = ?", item_ref_str)
.fetch_one(&mut tx)
.fetch_one(&mut *tx)
.await?;
return Ok(row.rowid.unwrap() as u32);
};
trace!("inserted with rowid = {}", insert.rowid.unwrap());
trace!("inserted with rowid = {}", insert.rowid);
if !event.relations.is_empty() && event.content.get_type() != "x.redact" {
trace!("insert into relations");
@ -123,14 +123,14 @@ impl Database for Sqlite {
.push_bind(target.to_string())
.push_bind(info.rel_type.clone());
});
sql_relations.build().execute(&mut tx).await?;
sql_relations.build().execute(&mut *tx).await?;
}
tx.commit().await?;
debug!("commit event {}", item_ref_str);
Ok(insert.rowid.unwrap() as u32)
Ok(insert.rowid as u32)
}
#[tracing::instrument(skip_all)]
@ -152,7 +152,7 @@ impl Database for Sqlite {
item_ref_str,
0
)
.execute(&mut tx)
.execute(&mut *tx)
.await?;
sqlx::query!(
"INSERT OR IGNORE INTO blobs (ref, size, gc_at) VALUES (?, ?, ?)",
@ -160,7 +160,7 @@ impl Database for Sqlite {
size,
gc_at
)
.execute(&mut tx)
.execute(&mut *tx)
.await?;
tx.commit().await?;
debug!("created blob");
@ -174,7 +174,7 @@ impl Database for Sqlite {
let item_ref_str = item_ref.to_string();
let mut tx = self.pool.begin().await?;
let row = sql!("SELECT json FROM events WHERE ref = ?", item_ref_str)
.fetch_one(&mut tx)
.fetch_one(&mut *tx)
.await?;
debug!("found event and will patch it");
let mut event: Event = serde_json::from_str(&row.json)?;
@ -185,10 +185,10 @@ impl Database for Sqlite {
event_str,
item_ref_str
)
.execute(&mut tx)
.execute(&mut *tx)
.await?;
sql!("DELETE FROM derived WHERE ref = ?", item_ref_str)
.execute(&mut tx)
.execute(&mut *tx)
.await?;
tx.commit().await?;
debug!("event has been redacted");
@ -566,13 +566,13 @@ impl Database for Sqlite {
for item_ref in item_refs {
sep.push_bind(item_ref.to_string());
}
delete_query.push(")").build().execute(&mut tx).await?;
delete_query.push(")").build().execute(&mut *tx).await?;
let mut insert_query = QueryBuilder::new("INSERT INTO tags (ref, tag) ");
insert_query.push_values(product, |mut item, (item_ref, tag)| {
item.push_bind(item_ref.to_string()).push_bind(tag);
});
insert_query.build().execute(&mut tx).await?;
insert_query.build().execute(&mut *tx).await?;
tx.commit().await?;
debug!("tagged {} events with {} tags", item_refs.len(), tags.len());
@ -588,7 +588,7 @@ impl Database for Sqlite {
debug!("put {} into derived", key);
let row = sql!("SELECT * FROM derived WHERE ref = ?", item_ref_str)
.fetch_optional(&mut tx)
.fetch_optional(&mut *tx)
.await?;
let mut derived: Derived = row
@ -610,7 +610,7 @@ impl Database for Sqlite {
item_ref_str,
derived_str
)
.execute(&mut tx)
.execute(&mut *tx)
.await?;
tx.commit().await?;

View file

@ -5,8 +5,7 @@ use std::sync::Arc;
use super::{Document, Search, SearchResult};
use crate::{state::search::Snippet, Error};
use tantivy::{
collector::TopDocs, directory::MmapDirectory, query::QueryParser, schema::Schema,
time::OffsetDateTime, DateTime, Document as TantivyDoc, Index, IndexReader, IndexWriter,
collector::TopDocs, directory::MmapDirectory, query::QueryParser, schema::{Schema, Value as _}, time::OffsetDateTime, DateTime, Index, IndexReader, IndexWriter, TantivyDocument as TantivyDoc
};
use tokio::sync::Mutex;
use tracing::debug;
@ -156,7 +155,7 @@ impl Search for Tantivy {
let collector = TopDocs::with_limit(limit).and_offset(offset);
let mut docs = Vec::new();
for (score, addr) in searcher.search(&parsed, &collector)? {
let doc = searcher.doc(addr)?;
let doc: TantivyDoc = searcher.doc(addr)?;
let snippet = if sniplen > 0 {
let snippet = snippets.snippet_from_doc(&doc);
let snippet = (!snippet.is_empty()).then(|| Snippet {
@ -174,7 +173,7 @@ impl Search for Tantivy {
let item_ref: ItemRef = doc
.get_first(field("ref"))
.expect("document doesn't have an item ref!")
.as_text()
.as_str()
.unwrap()
.parse()
.unwrap();

View file

@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "SELECT hash FROM blobs ORDER BY rowid LIMIT ?",
"describe": {
"columns": [
{
"name": "hash",
"ordinal": 0,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false
]
},
"hash": "215682ae0306508c5f376861b9b40667e0e20e39acf8fc680eb5957ed750695c"
}

View file

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "DELETE FROM blobs WHERE hash = ?",
"describe": {
"columns": [],
"parameters": {
"Right": 1
},
"nullable": []
},
"hash": "8f9ebe65f9c57bbfdd5653a9259309f217f511764e59cb5e5484c89e3f118479"
}

View file

@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "INSERT OR IGNORE INTO blobs (hash) VALUES (?)",
"describe": {
"columns": [],
"parameters": {
"Right": 1
},
"nullable": []
},
"hash": "94a2817c0af392a2af5d03b216d1193c25d3bd0551b2d7d77bdcf2495d2c9fa1"
}

View file

@ -0,0 +1,20 @@
{
"db_name": "SQLite",
"query": "SELECT hash FROM blobs WHERE rowid > (SELECT rowid FROM blobs WHERE hash = ?) ORDER BY rowid LIMIT ?",
"describe": {
"columns": [
{
"name": "hash",
"ordinal": 0,
"type_info": "Text"
}
],
"parameters": {
"Right": 2
},
"nullable": [
false
]
},
"hash": "b83d26ec516401f5486814784a1277251402621cfda2700fba537af38ff8d35a"
}

View file

@ -12,7 +12,7 @@ futures-util = "0.3.28"
serde = { version = "1.0.171", features = ["derive"] }
serde_json = "1.0.96"
sha2 = { version = "0.10.6", features = ["asm"] }
sqlx = { version = "0.6.3", features = ["sqlite", "runtime-tokio-rustls", "offline"] }
sqlx = { version = "0.7.4", features = ["sqlite", "runtime-tokio-rustls"] }
thiserror = "1.0.40"
tokio = { version = "1.28.2", features = ["rt-multi-thread", "macros", "fs"] }
ufh = { path = "../lib" }

View file

@ -1 +0,0 @@
{"secs_since_epoch":1690528577,"nanos_since_epoch":119672817}

File diff suppressed because it is too large Load diff

View file

@ -9,4 +9,7 @@ export default defineConfig({
sendWarningsToBrowser: true,
}
})],
build: {
target: "esnext",
},
});