rewrite cli in rust, split out code to the library
This commit is contained in:
parent
a28ed815b8
commit
62c0b2fdf3
24 changed files with 949 additions and 524 deletions
317
Cargo.lock
generated
317
Cargo.lock
generated
|
@ -2,6 +2,15 @@
|
|||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "addr2line"
|
||||
version = "0.20.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f4fa78e18c64fce05e902adecd7a5eed15a5e0a3439f7b0e169f0252214865e3"
|
||||
dependencies = [
|
||||
"gimli",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "adler"
|
||||
version = "1.0.2"
|
||||
|
@ -36,6 +45,55 @@ version = "0.2.15"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "56fc6cf8dc8c4158eed8649f9b8b0ea1518eb62b544fe9490d66fa0b349eafe9"
|
||||
|
||||
[[package]]
|
||||
name = "anstream"
|
||||
version = "0.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0ca84f3628370c59db74ee214b3263d58f9aadd9b4fe7e711fd87dc452b7f163"
|
||||
dependencies = [
|
||||
"anstyle",
|
||||
"anstyle-parse",
|
||||
"anstyle-query",
|
||||
"anstyle-wincon",
|
||||
"colorchoice",
|
||||
"is-terminal",
|
||||
"utf8parse",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anstyle"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3a30da5c5f2d5e72842e00bcb57657162cdabef0931f40e2deb9b4140440cecd"
|
||||
|
||||
[[package]]
|
||||
name = "anstyle-parse"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333"
|
||||
dependencies = [
|
||||
"utf8parse",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anstyle-query"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b"
|
||||
dependencies = [
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anstyle-wincon"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "180abfa45703aebe0093f79badacc01b8fd4ea2e35118747e5811127f926e188"
|
||||
dependencies = [
|
||||
"anstyle",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "async-trait"
|
||||
version = "0.1.68"
|
||||
|
@ -44,7 +102,7 @@ checksum = "b9ccdd8f2a161be9bd5c023df56f1b2a0bd1d83872ae53b71a84a12c9bf6e842"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.18",
|
||||
"syn 2.0.23",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -122,7 +180,22 @@ dependencies = [
|
|||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.18",
|
||||
"syn 2.0.23",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "backtrace"
|
||||
version = "0.3.68"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4319208da049c43661739c5fade2ba182f09d1dc2299b32298d3a31692b17e12"
|
||||
dependencies = [
|
||||
"addr2line",
|
||||
"cc",
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"miniz_oxide",
|
||||
"object",
|
||||
"rustc-demangle",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -220,12 +293,90 @@ version = "1.0.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.3.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1640e5cc7fb47dbb8338fd471b105e7ed6c3cb2aeb00c2e067127ffd3764a05d"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
"once_cell",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.3.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "98c59138d527eeaf9b53f35a77fcc1fad9d883116070c63d5de1c7dc7b00c72b"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
"clap_lex",
|
||||
"strsim",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap_derive"
|
||||
version = "4.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b8cd2b2a819ad6eec39e8f1d6b53001af1e5469f8c177579cdaeb313115b825f"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.23",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap_lex"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b"
|
||||
|
||||
[[package]]
|
||||
name = "cli"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"base64 0.21.2",
|
||||
"bytes",
|
||||
"clap",
|
||||
"colored",
|
||||
"dirs",
|
||||
"ed25519-dalek",
|
||||
"humantime",
|
||||
"json-canon",
|
||||
"once_cell",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
"ufh",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "color_quant"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b"
|
||||
|
||||
[[package]]
|
||||
name = "colorchoice"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7"
|
||||
|
||||
[[package]]
|
||||
name = "colored"
|
||||
version = "2.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2674ec482fbc38012cf31e6c42ba0177b431a0cb6f15fe40efa5aab1bda516f6"
|
||||
dependencies = [
|
||||
"is-terminal",
|
||||
"lazy_static",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "core-foundation"
|
||||
version = "0.9.3"
|
||||
|
@ -376,6 +527,27 @@ dependencies = [
|
|||
"crypto-common",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dirs"
|
||||
version = "5.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225"
|
||||
dependencies = [
|
||||
"dirs-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dirs-sys"
|
||||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"option-ext",
|
||||
"redox_users",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dotenvy"
|
||||
version = "0.15.7"
|
||||
|
@ -580,7 +752,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.18",
|
||||
"syn 2.0.23",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -654,6 +826,12 @@ dependencies = [
|
|||
"weezl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gimli"
|
||||
version = "0.27.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e"
|
||||
|
||||
[[package]]
|
||||
name = "h2"
|
||||
version = "0.3.19"
|
||||
|
@ -812,6 +990,12 @@ version = "1.0.2"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421"
|
||||
|
||||
[[package]]
|
||||
name = "humantime"
|
||||
version = "2.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
|
||||
|
||||
[[package]]
|
||||
name = "hyper"
|
||||
version = "0.14.26"
|
||||
|
@ -923,6 +1107,17 @@ version = "2.7.2"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "12b6ee2129af8d4fb011108c73d99a1b83a85977f23b82460c0ae2e25bb4b57f"
|
||||
|
||||
[[package]]
|
||||
name = "is-terminal"
|
||||
version = "0.4.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b"
|
||||
dependencies = [
|
||||
"hermit-abi 0.3.1",
|
||||
"rustix 0.38.3",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "itertools"
|
||||
version = "0.10.5"
|
||||
|
@ -1002,6 +1197,12 @@ version = "0.3.8"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519"
|
||||
|
||||
[[package]]
|
||||
name = "linux-raw-sys"
|
||||
version = "0.4.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "09fc20d2ca12cb9f044c93e3bd6d32d523e6e2ec3db4f7b2939cd99026ecd3f0"
|
||||
|
||||
[[package]]
|
||||
name = "lock_api"
|
||||
version = "0.4.10"
|
||||
|
@ -1167,6 +1368,15 @@ dependencies = [
|
|||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "object"
|
||||
version = "0.31.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8bda667d9f2b5051b8833f59f3bf748b28ef54f850f4fcb389a252aa383866d1"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.18.0"
|
||||
|
@ -1202,7 +1412,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.18",
|
||||
"syn 2.0.23",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1223,6 +1433,12 @@ dependencies = [
|
|||
"vcpkg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "option-ext"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot"
|
||||
version = "0.11.2"
|
||||
|
@ -1277,7 +1493,7 @@ checksum = "39407670928234ebc5e6e580247dd567ad73a3578460c5990f9503df207e8f07"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.18",
|
||||
"syn 2.0.23",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1319,9 +1535,9 @@ checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
|
|||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.60"
|
||||
version = "1.0.63"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dec2b086b7a862cf4de201096214fa870344cf922b2b30c167badb3af3195406"
|
||||
checksum = "7b368fba921b0dce7e60f5e04ec15e565b3303972b42bcfde1d0713b881959eb"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
@ -1455,6 +1671,17 @@ dependencies = [
|
|||
"bitflags 1.3.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_users"
|
||||
version = "0.4.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b"
|
||||
dependencies = [
|
||||
"getrandom 0.2.10",
|
||||
"redox_syscall 0.2.16",
|
||||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "reqwest"
|
||||
version = "0.11.18"
|
||||
|
@ -1492,6 +1719,12 @@ dependencies = [
|
|||
"winreg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc-demangle"
|
||||
version = "0.1.23"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76"
|
||||
|
||||
[[package]]
|
||||
name = "rustix"
|
||||
version = "0.37.20"
|
||||
|
@ -1502,7 +1735,20 @@ dependencies = [
|
|||
"errno",
|
||||
"io-lifetimes",
|
||||
"libc",
|
||||
"linux-raw-sys",
|
||||
"linux-raw-sys 0.3.8",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustix"
|
||||
version = "0.38.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ac5ffa1efe7548069688cd7028f32591853cd7b5b756d41bcffd2353e4fc75b4"
|
||||
dependencies = [
|
||||
"bitflags 2.3.3",
|
||||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys 0.4.3",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
|
@ -1564,29 +1810,29 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.164"
|
||||
version = "1.0.166"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9e8c8cf938e98f769bc164923b06dce91cea1751522f46f8466461af04c9027d"
|
||||
checksum = "d01b7404f9d441d3ad40e6a636a7782c377d2abdbe4fa2440e2edcc2f4f10db8"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.164"
|
||||
version = "1.0.166"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d9735b638ccc51c28bf6914d90a2e9725b377144fc612c49a611fddd1b631d68"
|
||||
checksum = "5dd83d6dde2b6b2d466e14d9d1acce8816dedee94f735eac6395808b3483c6d6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.18",
|
||||
"syn 2.0.23",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.97"
|
||||
version = "1.0.100"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bdf3bf93142acad5821c99197022e170842cdbc1c30482b98750c688c640842a"
|
||||
checksum = "0f1e14e89be7aa4c4b78bdbdc9eb5bf8517829a600ae8eaa39a6e1d960b5185c"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"ryu",
|
||||
|
@ -1619,9 +1865,7 @@ name = "server"
|
|||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"axum",
|
||||
"base64 0.21.2",
|
||||
"bytes",
|
||||
"ed25519-dalek",
|
||||
"futures-util",
|
||||
"image",
|
||||
"infer",
|
||||
|
@ -1629,7 +1873,6 @@ dependencies = [
|
|||
"lru",
|
||||
"nanoid",
|
||||
"once_cell",
|
||||
"rand 0.7.3",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
|
@ -1843,6 +2086,12 @@ dependencies = [
|
|||
"unicode-normalization",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "strsim"
|
||||
version = "0.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
|
||||
|
||||
[[package]]
|
||||
name = "subtle"
|
||||
version = "2.5.0"
|
||||
|
@ -1862,9 +2111,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.18"
|
||||
version = "2.0.23"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "32d41677bcbe24c20c52e7c70b0d8db04134c5d1066bf98662e2871ad200ea3e"
|
||||
checksum = "59fb7d6d8281a51045d62b8eb3a7d1ce347b76f312af50cd3dc0af39c87c1737"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -1887,7 +2136,7 @@ dependencies = [
|
|||
"cfg-if",
|
||||
"fastrand",
|
||||
"redox_syscall 0.3.5",
|
||||
"rustix",
|
||||
"rustix 0.37.20",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
|
@ -1908,7 +2157,7 @@ checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.18",
|
||||
"syn 2.0.23",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1939,11 +2188,12 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
|
|||
|
||||
[[package]]
|
||||
name = "tokio"
|
||||
version = "1.28.2"
|
||||
version = "1.29.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "94d7b1cfd2aa4011f2de74c2c4c63665e27a71006b0a192dcd2710272e73dfa2"
|
||||
checksum = "532826ff75199d5833b9d2c5fe410f29235e25704ee5f0ef599fb51c21f4a4da"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"backtrace",
|
||||
"bytes",
|
||||
"libc",
|
||||
"mio",
|
||||
|
@ -1962,7 +2212,7 @@ checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.18",
|
||||
"syn 2.0.23",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -2084,7 +2334,12 @@ checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba"
|
|||
name = "ufh"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"base64 0.21.2",
|
||||
"ed25519-dalek",
|
||||
"json-canon",
|
||||
"rand 0.7.3",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"thiserror",
|
||||
]
|
||||
|
||||
|
@ -2132,6 +2387,12 @@ dependencies = [
|
|||
"percent-encoding",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "utf8parse"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
|
||||
|
||||
[[package]]
|
||||
name = "uuid"
|
||||
version = "1.4.0"
|
||||
|
@ -2192,7 +2453,7 @@ dependencies = [
|
|||
"once_cell",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.18",
|
||||
"syn 2.0.23",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
|
||||
|
@ -2226,7 +2487,7 @@ checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.18",
|
||||
"syn 2.0.23",
|
||||
"wasm-bindgen-backend",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
|
@ -2424,7 +2685,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.18",
|
||||
"syn 2.0.23",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
[workspace]
|
||||
resolver = "2"
|
||||
members = ["store-fs", "server", "lib"]
|
||||
members = ["store-fs", "server", "lib", "cli"]
|
||||
|
|
22
cli/Cargo.toml
Normal file
22
cli/Cargo.toml
Normal file
|
@ -0,0 +1,22 @@
|
|||
[package]
|
||||
name = "cli"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
base64 = "0.21.2"
|
||||
bytes = "1.4.0"
|
||||
clap = { version = "4.3.11", features = ["derive"] }
|
||||
colored = "2.0.4"
|
||||
dirs = "5.0.1"
|
||||
ed25519-dalek = "1.0.1"
|
||||
humantime = "2.1.0"
|
||||
json-canon = "0.1.3"
|
||||
once_cell = "1.18.0"
|
||||
reqwest = { version = "0.11.18", features = ["json"] }
|
||||
serde = { version = "1.0.166", features = ["derive"] }
|
||||
serde_json = "1.0.100"
|
||||
tokio = { version = "1.29.1", features = ["rt-multi-thread", "macros"] }
|
||||
ufh = { version = "0.1.0", path = "../lib" }
|
62
cli/src/cli.rs
Normal file
62
cli/src/cli.rs
Normal file
|
@ -0,0 +1,62 @@
|
|||
use clap::{Parser, Subcommand};
|
||||
use ufh::item::ItemRef;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(name = "ufh", version = "0.1.0", about = "cli to interact with a ufh server")]
|
||||
pub struct Command {
|
||||
#[arg(short, long, help = "path to config file")]
|
||||
pub config: Option<PathBuf>,
|
||||
#[arg(short, long, help = "which profile to use")]
|
||||
pub profile: Option<String>,
|
||||
#[command(subcommand)]
|
||||
pub action: Action,
|
||||
}
|
||||
|
||||
#[derive(Debug, Subcommand)]
|
||||
pub enum Action {
|
||||
/// list events
|
||||
#[command(name = "ls")]
|
||||
List {
|
||||
#[arg(short, long, help = "the event types to list")]
|
||||
types: Vec<String>,
|
||||
#[arg(short = 'g', long, help = "the tags to list")]
|
||||
tags: Vec<String>,
|
||||
#[arg(short, long, help = "list files with extra detail")]
|
||||
long: bool,
|
||||
#[arg(short, long, help = "list events as they're created")]
|
||||
stream: bool,
|
||||
},
|
||||
/// upload a file
|
||||
Put {
|
||||
#[arg(short, long, help = "what tags to set")]
|
||||
tags: Vec<String>,
|
||||
file: PathBuf,
|
||||
},
|
||||
/// download a file
|
||||
Cat {
|
||||
#[arg(name = "ref")]
|
||||
item_ref: ItemRef,
|
||||
},
|
||||
/// add a tag to an event
|
||||
Tag {
|
||||
#[arg(short, long, required = true, name = "tag", help = "what tags to set")]
|
||||
tags: Vec<String>,
|
||||
#[arg(name = "ref")]
|
||||
item_ref: ItemRef,
|
||||
},
|
||||
#[command(name = "rm")]
|
||||
/// remove an event
|
||||
Redact {
|
||||
#[arg(name = "ref")]
|
||||
item_ref: ItemRef,
|
||||
},
|
||||
/// get info about a ref
|
||||
Info {
|
||||
#[arg(short, long, help = "what relations to fetch")]
|
||||
rels: Vec<String>,
|
||||
#[arg(name = "ref")]
|
||||
item_ref: ItemRef,
|
||||
},
|
||||
}
|
||||
|
208
cli/src/main.rs
Normal file
208
cli/src/main.rs
Normal file
|
@ -0,0 +1,208 @@
|
|||
#![feature(fs_try_exists)]
|
||||
|
||||
// TODO: proper error handling
|
||||
// TODO: redo config file to fit better
|
||||
|
||||
mod cli;
|
||||
mod net;
|
||||
|
||||
use std::{collections::HashMap, io::Write};
|
||||
|
||||
use cli::{Command, Action};
|
||||
use clap::Parser;
|
||||
use serde::{Serialize, Deserialize, Deserializer, Serializer};
|
||||
use net::Item;
|
||||
use ufh::{query::{Query, QueryRelation}, actor::{ActorSecret, ActorId}, event::{WipEvent, EventContent, FileEvent, TagEvent, RedactEvent, RelInfo}};
|
||||
|
||||
pub type Error = Box<dyn std::error::Error>;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct Config {
|
||||
profiles: HashMap<String, ConfigProfile>,
|
||||
default: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct ConfigProfile {
|
||||
base_url: String,
|
||||
#[serde(deserialize_with = "import_from_string", serialize_with = "export_to_string")]
|
||||
key: ActorSecret,
|
||||
}
|
||||
|
||||
fn import_from_string<'de, D>(d: D) -> Result<ActorSecret, D::Error>
|
||||
where D: Deserializer<'de>
|
||||
{
|
||||
let s = String::deserialize(d)?;
|
||||
ActorSecret::import_from_string(s).map_err(serde::de::Error::custom)
|
||||
}
|
||||
|
||||
fn export_to_string<S: Serializer>(me: &ActorSecret, s: S) -> Result<S::Ok, S::Error> {
|
||||
me.export_to_string().serialize(s)
|
||||
}
|
||||
|
||||
fn get_or_init_config(file: Option<&std::path::Path>, profile: Option<&str>) -> Result<ConfigProfile, Error> {
|
||||
let path = file
|
||||
.map(|f| f.to_path_buf())
|
||||
.unwrap_or_else(|| dirs::config_dir()
|
||||
.expect("should have config dir")
|
||||
.join("ufh/config.json"));
|
||||
if std::fs::try_exists(&path)? {
|
||||
let mut config: Config = serde_json::from_slice(&std::fs::read(&path)?)?;
|
||||
let profile = config.profiles.remove(profile.unwrap_or(&config.default))
|
||||
.ok_or_else(|| std::io::Error::new(std::io::ErrorKind::NotFound, "couldnt find that profile"))?;
|
||||
Ok(profile)
|
||||
} else {
|
||||
let mut config = Config {
|
||||
profiles: HashMap::from([("default".to_owned(), ConfigProfile {
|
||||
base_url: "http://localhost:3210/".to_owned(),
|
||||
key: ActorId::new().1,
|
||||
})]),
|
||||
default: "default".to_owned(),
|
||||
};
|
||||
std::fs::write(&path, serde_json::to_string(&config)?)?;
|
||||
let profile = config.profiles.remove(profile.unwrap_or(&config.default))
|
||||
.ok_or_else(|| std::io::Error::new(std::io::ErrorKind::NotFound, "couldnt find that profile"))?;
|
||||
Ok(profile)
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Error> {
|
||||
let args: Command = Command::parse();
|
||||
let config = get_or_init_config(args.config.as_deref(), args.profile.as_deref())?;
|
||||
match args.action {
|
||||
Action::Info { rels, item_ref } => {
|
||||
if rels.is_empty() {
|
||||
match net::get(&item_ref).await? {
|
||||
Item::Event(event) => println!("{}", serde_json::to_string(&event)?),
|
||||
Item::Blob(blob) => println!("blob ({} bytes)", blob.len()),
|
||||
};
|
||||
} else {
|
||||
let query = Query {
|
||||
refs: Some(vec![item_ref]),
|
||||
senders: Some(vec![config.key.get_id()]),
|
||||
types: None,
|
||||
tags: None,
|
||||
relations: rels.into_iter().map(QueryRelation::Any).collect(),
|
||||
};
|
||||
let query = net::query(&query).await?;
|
||||
let result = net::list(&query, Some(1), None, None).await?;
|
||||
for event in result.events {
|
||||
println!("{}", serde_json::to_string(&event)?);
|
||||
}
|
||||
for event in result.relations.unwrap_or_default().values() {
|
||||
println!("{}", serde_json::to_string(&event)?);
|
||||
}
|
||||
}
|
||||
},
|
||||
Action::List { types, tags, long, stream } => {
|
||||
let query = Query {
|
||||
refs: None,
|
||||
senders: Some(vec![config.key.get_id()]),
|
||||
types: if types.is_empty() { None } else { Some(types) },
|
||||
tags: if tags.is_empty() { None } else { Some(tags) },
|
||||
relations: Vec::new(),
|
||||
};
|
||||
let query = net::query(&query).await?;
|
||||
let timeout = if stream { Some(30000) } else { None };
|
||||
let mut after = None;
|
||||
if long {
|
||||
println!("{:63} {:12} {:20} {}", "ref", "type", "date", "sender");
|
||||
}
|
||||
loop {
|
||||
let items = net::list(&query, None, after, timeout).await?;
|
||||
for event in items.events {
|
||||
if long {
|
||||
use colored::Colorize;
|
||||
let time = std::time::SystemTime::UNIX_EPOCH + std::time::Duration::from_millis(event.origin_ts);
|
||||
println!("{} {:12} {} {}",
|
||||
&event.id.expect("always has event id"),
|
||||
event.content.get_type().blue(),
|
||||
humantime::format_rfc3339_seconds(time).to_string().magenta(),
|
||||
event.sender.to_string().green(),
|
||||
);
|
||||
} else {
|
||||
println!("{}", &event.id.expect("always has event id"));
|
||||
}
|
||||
};
|
||||
if items.next.is_none() {
|
||||
break;
|
||||
}
|
||||
after = items.next;
|
||||
}
|
||||
},
|
||||
Action::Put { tags, file } => {
|
||||
let buffer = std::fs::read(&file)?;
|
||||
let mut refs = Vec::new();
|
||||
for chunk in buffer.chunks(1024 * 1024) {
|
||||
let bytes = bytes::Bytes::copy_from_slice(chunk);
|
||||
let item_ref = net::put(&Item::Blob(bytes)).await?;
|
||||
println!("upload {item_ref} ({} bytes)", chunk.len());
|
||||
refs.push(item_ref);
|
||||
}
|
||||
let content = EventContent::File(FileEvent {
|
||||
chunks: refs,
|
||||
name: file.to_str().map(|i| i.to_owned()),
|
||||
});
|
||||
|
||||
let mut wip = WipEvent::new(content, &config.key.get_id());
|
||||
wip.signature = Some(config.key.sign(wip.to_json().as_bytes()));
|
||||
let event = wip.into_event()
|
||||
.ok_or_else(|| std::io::Error::new(std::io::ErrorKind::NotFound, "couldnt build an event"))?;
|
||||
let item_ref = net::put(&Item::Event(event)).await?;
|
||||
println!("fully uploaded to: {item_ref}");
|
||||
|
||||
if !tags.is_empty() {
|
||||
let content = EventContent::LocalTag(TagEvent { tags });
|
||||
let mut wip = WipEvent::new(content, &config.key.get_id());
|
||||
wip.relations = Some(HashMap::from([
|
||||
(item_ref, RelInfo { rel_type: "tag".to_owned(), key: None }),
|
||||
]));
|
||||
wip.signature = Some(config.key.sign(wip.to_json().as_bytes()));
|
||||
let event = wip.into_event()
|
||||
.ok_or_else(|| std::io::Error::new(std::io::ErrorKind::NotFound, "couldnt build an event"))?;
|
||||
let tag_ref = net::put(&Item::Event(event)).await?;
|
||||
println!("tagged with: {tag_ref}");
|
||||
}
|
||||
},
|
||||
Action::Cat { item_ref } => {
|
||||
let Item::Event(event) = net::get(&item_ref).await? else {
|
||||
return Err(std::io::Error::new(std::io::ErrorKind::NotFound, "couldnt build an event"))?;
|
||||
};
|
||||
let EventContent::File(content) = event.content else {
|
||||
return Err(std::io::Error::new(std::io::ErrorKind::NotFound, "not a file event"))?;
|
||||
};
|
||||
for chunk in content.chunks {
|
||||
let Item::Blob(blob) = net::get(&chunk).await? else {
|
||||
return Err(std::io::Error::new(std::io::ErrorKind::NotFound, "not a blob"))?;
|
||||
};
|
||||
std::io::stdout().write_all(&blob)?;
|
||||
}
|
||||
},
|
||||
Action::Tag { tags, item_ref } => {
|
||||
let content = EventContent::LocalTag(TagEvent { tags });
|
||||
let mut wip = WipEvent::new(content, &config.key.get_id());
|
||||
wip.relations = Some(HashMap::from([
|
||||
(item_ref, RelInfo { rel_type: "tag".to_owned(), key: None }),
|
||||
]));
|
||||
wip.signature = Some(config.key.sign(wip.to_json().as_bytes()));
|
||||
let event = wip.into_event()
|
||||
.ok_or_else(|| std::io::Error::new(std::io::ErrorKind::NotFound, "couldnt build an event"))?;
|
||||
let tag_ref = net::put(&Item::Event(event)).await?;
|
||||
println!("tagged with: {tag_ref}");
|
||||
},
|
||||
Action::Redact { item_ref } => {
|
||||
let content = EventContent::Redact(RedactEvent {});
|
||||
let mut wip = WipEvent::new(content, &config.key.get_id());
|
||||
wip.relations = Some(HashMap::from([
|
||||
(item_ref, RelInfo { rel_type: "redact".to_owned(), key: None }),
|
||||
]));
|
||||
wip.signature = Some(config.key.sign(wip.to_json().as_bytes()));
|
||||
let event = wip.into_event()
|
||||
.ok_or_else(|| std::io::Error::new(std::io::ErrorKind::NotFound, "couldnt build an event"))?;
|
||||
let redact_ref = net::put(&Item::Event(event)).await?;
|
||||
println!("tagged with: {redact_ref}");
|
||||
},
|
||||
};
|
||||
Ok(())
|
||||
}
|
110
cli/src/net.rs
Normal file
110
cli/src/net.rs
Normal file
|
@ -0,0 +1,110 @@
|
|||
use reqwest::{Client, StatusCode};
|
||||
use std::collections::HashMap;
|
||||
use ufh::item::ItemRef;
|
||||
use bytes::Bytes;
|
||||
use once_cell::sync::Lazy;
|
||||
use ufh::event::Event;
|
||||
use serde::Deserialize;
|
||||
use crate::Error;
|
||||
|
||||
static CLIENT: Lazy<Client> = Lazy::new(Client::new);
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct UploadResponse {
|
||||
#[serde(rename = "ref")]
|
||||
item_ref: ItemRef,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct UploadError {
|
||||
error: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct QueryResponse {
|
||||
query: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct ListResponse {
|
||||
pub events: Vec<Event>,
|
||||
pub relations: Option<HashMap<ItemRef, Event>>,
|
||||
pub next: Option<String>,
|
||||
}
|
||||
|
||||
#[allow(clippy::large_enum_variant)] // Event is large, but Item::Blob will probably be big as well
|
||||
pub enum Item {
|
||||
Event(Event),
|
||||
Blob(Bytes),
|
||||
}
|
||||
|
||||
pub async fn put(item: &Item) -> Result<ItemRef, Error> {
|
||||
let buffer = match item {
|
||||
Item::Event(event) => Bytes::from(json_canon::to_string(event)?),
|
||||
Item::Blob(blob) => blob.clone(),
|
||||
};
|
||||
let content_type = match item {
|
||||
Item::Event(_) => "application/json",
|
||||
Item::Blob(_) => "application/octet-stream",
|
||||
};
|
||||
let res = CLIENT
|
||||
.post("http://localhost:3210/things")
|
||||
.header("content-length", buffer.len())
|
||||
.header("content-type", content_type)
|
||||
.body(buffer)
|
||||
.send()
|
||||
.await?;
|
||||
if res.status() != StatusCode::CREATED {
|
||||
let json: UploadError = res.json().await?;
|
||||
return Err(Box::new(std::io::Error::new(std::io::ErrorKind::Other, json.error)));
|
||||
}
|
||||
let json: UploadResponse = res
|
||||
.json()
|
||||
.await?;
|
||||
Ok(json.item_ref)
|
||||
}
|
||||
|
||||
pub async fn get(item: &ItemRef) -> Result<Item, Error> {
|
||||
let req = CLIENT
|
||||
.get(format!("http://localhost:3210/things/{}", item))
|
||||
.send()
|
||||
.await?;
|
||||
match req.headers().get("content-type").map(|i| i.to_str()) {
|
||||
Some(Ok("application/json")) => {
|
||||
Ok(Item::Event(req.json().await?))
|
||||
},
|
||||
Some(Ok("application/octet-stream")) => {
|
||||
Ok(Item::Blob(req.bytes().await?))
|
||||
},
|
||||
Some(Ok(_)) => unreachable!(),
|
||||
Some(Err(err)) => Err(Box::new(err)),
|
||||
_ => Err(Box::new(std::io::Error::new(std::io::ErrorKind::Other, "something went wrong"))),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn query(query: &ufh::query::Query) -> Result<String, Error> {
|
||||
let res: QueryResponse = CLIENT
|
||||
.post("http://localhost:3210/things/query")
|
||||
.header("content-type", "application/json")
|
||||
.body(serde_json::to_string(query)?)
|
||||
.send()
|
||||
.await?
|
||||
.json()
|
||||
.await?;
|
||||
Ok(res.query)
|
||||
}
|
||||
|
||||
pub async fn list(query: &str, limit: Option<u32>, after: Option<String>, timeout: Option<u32>) -> Result<ListResponse, Error> {
|
||||
let mut params = Vec::from([("query", query.to_string())]);
|
||||
if let Some(limit) = limit { params.push(("limit", limit.to_string())) };
|
||||
if let Some(after) = after { params.push(("after", after)) };
|
||||
if let Some(timeout) = timeout{ params.push(("timeout", timeout.to_string())) };
|
||||
let res = CLIENT
|
||||
.get("http://localhost:3210/things")
|
||||
.query(¶ms)
|
||||
.send()
|
||||
.await?
|
||||
.json()
|
||||
.await?;
|
||||
Ok(res)
|
||||
}
|
298
client.ts
298
client.ts
|
@ -1,298 +0,0 @@
|
|||
#!/usr/bin/env -S deno run -A
|
||||
|
||||
import * as ed25519 from "npm:@noble/ed25519";
|
||||
import path from "npm:path";
|
||||
import { Command } from "npm:commander";
|
||||
import * as base64Normal from "npm:uint8-to-base64";
|
||||
import canonicalize from "npm:canonicalize";
|
||||
import ogs from "npm:open-graph-scraper";
|
||||
|
||||
// shim until i write a proper client in rust (:rocket:)
|
||||
// TODO: rewrite this in rust
|
||||
|
||||
const CONFIG_FILE = path.join(Deno.env.get("HOME"), ".config/ufh/config.json");
|
||||
await Deno.mkdir(path.dirname(CONFIG_FILE), { recursive: true });
|
||||
|
||||
const config = await Deno.readTextFile(CONFIG_FILE)
|
||||
.then((text) => {
|
||||
try {
|
||||
const config = JSON.parse(text);
|
||||
const profile = config.profiles[config.default];
|
||||
return { ...profile, key: ed25519.etc.hexToBytes(profile.key) };
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
Deno.exit(1);
|
||||
}
|
||||
})
|
||||
.catch(async () => {
|
||||
const config = {
|
||||
profiles: {
|
||||
default: {
|
||||
key: ed25519.etc.bytesToHex(ed25519.utils.randomPrivateKey()),
|
||||
baseUrl: "http://localhost:3210/",
|
||||
}
|
||||
},
|
||||
default: "default",
|
||||
};
|
||||
await Deno.writeTextFile(CONFIG_FILE, JSON.stringify(config));
|
||||
return config.profiles[config.default];
|
||||
});
|
||||
|
||||
interface Query {
|
||||
refs?: Array<string>,
|
||||
types?: Array<string>,
|
||||
tags?: Array<string>,
|
||||
relations?: Array<string | [string, string]>,
|
||||
}
|
||||
|
||||
type Relations = Record<string, { type: string, key?: string }>;
|
||||
|
||||
interface Event {
|
||||
type: string,
|
||||
content: any,
|
||||
sender: string,
|
||||
signature: string,
|
||||
origin_ts: number,
|
||||
relations?: Relations,
|
||||
}
|
||||
|
||||
const api = {
|
||||
baseUrl: config.baseUrl,
|
||||
async upload(blob: ArrayBuffer | Event): Promise<string> {
|
||||
const isRaw = blob instanceof ArrayBuffer;
|
||||
const req = await fetch(this.baseUrl + "things", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"content-type": isRaw ? "application/octet-stream" : "application/json",
|
||||
},
|
||||
body: isRaw ? blob : JSON.stringify(blob),
|
||||
});
|
||||
if (req.status !== 201) {
|
||||
console.log(await req.text());
|
||||
throw new Error("failed to upload: " + req.statusText);
|
||||
}
|
||||
const { ref } = await req.json();
|
||||
return ref;
|
||||
},
|
||||
async fetch(ref: string): Promise<ArrayBuffer | Event> {
|
||||
const req = await fetch(this.baseUrl + "things/" + ref);
|
||||
const type = req.headers.get("content-type");
|
||||
if (req.status === 404) throw new Error("doesnt exist");
|
||||
if (req.status !== 200) throw new Error("failed to fetch");
|
||||
if (type === "application/octet-stream") return req.arrayBuffer();
|
||||
if (type === "application/json") return req.json();
|
||||
throw new Error("invalid response type");
|
||||
},
|
||||
query(query: Query, stream = false): { events: AsyncGenerator<Event>, relations: Map<string, Event>, stop: () => void } {
|
||||
const self = this;
|
||||
let after: string;
|
||||
let pagination: string;
|
||||
let stopped = false;
|
||||
let stopSig = new AbortController();
|
||||
const relations = new Map();
|
||||
|
||||
async function getPagination(): Promise<string> {
|
||||
if (pagination) return pagination;
|
||||
const res = await fetch(self.baseUrl + "things/query", {
|
||||
method: "POST",
|
||||
headers: { "content-type": "application/json" },
|
||||
body: JSON.stringify(query),
|
||||
}).then(res => res.json());
|
||||
pagination = res.query;
|
||||
return pagination;
|
||||
}
|
||||
|
||||
async function *paginateEvents(): AsyncGenerator<Event> {
|
||||
while (true) {
|
||||
const url = self.baseUrl + "things?query=" + (await getPagination()) + (after ? "&after=" + after : "") + (stream ? "&timeout=30000" : "");
|
||||
const req = await fetch(url, { signal: stopSig.signal }).catch((err) => {
|
||||
console.log(err);
|
||||
return { text: () => "thing" } as any;
|
||||
});
|
||||
if (req.status !== 200) throw new Error("failed to query: " + await req.text());
|
||||
const json = await req.json();
|
||||
for (let relId in json.relations) relations.set(relId, json.relations[relId]);
|
||||
for (let event of json.events) yield event;
|
||||
if (json.next) {
|
||||
after = json.next;
|
||||
} else if (stopped || !stream) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
events: paginateEvents(),
|
||||
relations,
|
||||
stop() {
|
||||
if (stopped) return;
|
||||
stopped = true;
|
||||
stopSig.abort();
|
||||
}
|
||||
}
|
||||
},
|
||||
async makeEvent(type: string, content: any, relations?: Relations): Promise<Event> {
|
||||
const sender = "%" + base64.encode(await ed25519.getPublicKeyAsync(config.key));
|
||||
const event = { type, content, sender, origin_ts: Date.now() } as any;
|
||||
if (relations) event.relations = relations;
|
||||
const encoder = new TextEncoder();
|
||||
event.signature = base64.encode(await ed25519.signAsync(encoder.encode(canonicalize(event)), config.key));
|
||||
return event;
|
||||
}
|
||||
};
|
||||
|
||||
const base64 = {
|
||||
encode(data: Uint8Array): string {
|
||||
return base64Normal
|
||||
.encode(data)
|
||||
.replace(/\+/g, "-")
|
||||
.replace(/\//g, "_")
|
||||
.replace(/=/g, "")
|
||||
},
|
||||
decode(data: string): Uint8Array {
|
||||
return base64Normal
|
||||
.decode(data)
|
||||
.replace(/\-/g, "+")
|
||||
.replace(/_/g, "/")
|
||||
},
|
||||
}
|
||||
|
||||
async function uploadFile(buffer: ArrayBuffer, name?: string) {
|
||||
const refs: Array<string> = [];
|
||||
const mebibyte = 1024 * 1024;
|
||||
for (let chunk = 0; chunk < buffer.byteLength; chunk += mebibyte) {
|
||||
const slice = buffer.slice(chunk, chunk + mebibyte);
|
||||
const ref = await api.upload(slice);
|
||||
console.log(`upload ${ref} (${slice.byteLength} bytes)`);
|
||||
refs.push(ref);
|
||||
}
|
||||
const event = await api.makeEvent("x.file", { chunks: refs, name: name ?? null });
|
||||
const ref = await api.upload(event);
|
||||
return ref;
|
||||
}
|
||||
|
||||
const args = new Command();
|
||||
|
||||
args
|
||||
.name("client.ts")
|
||||
.description("cli to interact with a ufh server")
|
||||
.version("0.1.0");
|
||||
|
||||
args.command("ls")
|
||||
.description("list all files")
|
||||
.option("-t, --types [types...]", "the event types to list")
|
||||
.option("-g, --tags [tags...]", "the tags to list")
|
||||
.option("-l, --long", "list files with extra detail")
|
||||
.option("-s, --stream", "stream events as they're uploaded")
|
||||
.action(async (opts) => {
|
||||
const query = api.query({ types: opts.types ? opts.types : ["x.file", "l.url"], tags: opts.tags }, opts.stream);
|
||||
if (opts.long) {
|
||||
console.log(`${"ref".padEnd(63)} ${"type".padEnd(8)} ${"date".padEnd(26)} sender`);
|
||||
}
|
||||
for await (let event of query.events) {
|
||||
const eventId = event.id;
|
||||
if (opts.long) {
|
||||
console.log(
|
||||
`${eventId} %c${event.type.padEnd(8)} %c${new Date(event.origin_ts).toISOString().padEnd(26)} %c${event.sender}`,
|
||||
"color: blue", "color: magenta", "color: green",
|
||||
);
|
||||
} else {
|
||||
console.log(eventId);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
args.command("put")
|
||||
.description("upload a file")
|
||||
.option("-t, --tags [tags...]", "what tags to set")
|
||||
.argument("<file>", "the file to upload")
|
||||
.action(async (file: string, opts) => {
|
||||
const content = await Deno.readFile(file);
|
||||
const ref = await uploadFile(content.buffer, path.basename(file));
|
||||
console.log(`fully uploaded to: ${ref}`);
|
||||
if (opts.tags) {
|
||||
const event = await api.makeEvent("x.tag.local", { tags: opts.tags }, { [ref]: { type: "tag" } });
|
||||
const uploadedRef = await api.upload(event);
|
||||
console.log(`tagged with ${uploadedRef}`);
|
||||
}
|
||||
});
|
||||
|
||||
args.command("url")
|
||||
.description("save a url")
|
||||
.argument("<url>", "the url to save")
|
||||
.action(async (url: string) => {
|
||||
const { result: page } = await ogs({ url });
|
||||
if (!page.success) throw new Error("failed to fetch page");
|
||||
const info = {
|
||||
url,
|
||||
title: page.ogTitle ?? new URL(url).hostname,
|
||||
description: page.ogDescription ?? null,
|
||||
};
|
||||
const rels: Relations = {};
|
||||
if (page.ogImage?.[0]) {
|
||||
const imgUrl = new URL(page.ogImage?.[0].url, url);
|
||||
const imgBuf = await fetch(imgUrl).then((res) => res.arrayBuffer());
|
||||
const imgRef = await uploadFile(imgBuf);
|
||||
rels[imgRef] = { type: "thumbnail" };
|
||||
console.log(`thumbnail uploaded to: ${imgRef}`);
|
||||
}
|
||||
const event = await api.makeEvent("l.url", info, rels);
|
||||
const ref = await api.upload(event);
|
||||
console.log(`fully uploaded to: ${ref}`);
|
||||
Deno.exit(0); // doesn't seem to exit on its own?
|
||||
});
|
||||
|
||||
args.command("cat")
|
||||
.description("download a file")
|
||||
.argument("<ref>", "the files to download")
|
||||
.action(async (ref: string) => {
|
||||
const event = await api.fetch(ref) as Event;
|
||||
if (event.type !== "x.file") throw new Error("not a file");
|
||||
for (let chunk of event.content.chunks ?? []) {
|
||||
const data = await api.fetch(chunk);
|
||||
if (!(data instanceof ArrayBuffer)) throw new Error("invalid blob");
|
||||
Deno.stdout.write(data);
|
||||
}
|
||||
});
|
||||
|
||||
args.command("tag")
|
||||
.description("tag something")
|
||||
.option("-t, --tags [tags...]", "what tags to set")
|
||||
.argument("<ref>", "the thing to tag")
|
||||
.action(async (ref: string, opts: any) => {
|
||||
const event = await api.makeEvent("x.tag.local", { tags: opts.tags }, { [ref]: { type: "tag" } });
|
||||
const uploadedRef = await api.upload(event);
|
||||
console.log(`tagged with ${uploadedRef}`);
|
||||
});
|
||||
|
||||
args.command("rm")
|
||||
.description("remove a thing")
|
||||
.argument("<ref>", "the thing to remove")
|
||||
.action(async (targetRef: string) => {
|
||||
const redactEvent = await api.makeEvent("x.redact", {}, {
|
||||
[targetRef]: { type: "redact" },
|
||||
});
|
||||
const redactRef = await api.upload(redactEvent);
|
||||
console.log(`redacted with: ${redactRef}`);
|
||||
});
|
||||
|
||||
args.command("info")
|
||||
.description("get info about a ref")
|
||||
.option("-r, --rels [types...]", "which relation types to fetch")
|
||||
.argument("<ref>", "the ref to inspect")
|
||||
.action(async (ref, opts) => {
|
||||
if (opts.rels) {
|
||||
const query = api.query({ refs: [ref], relations: opts.rels });
|
||||
for await (let event of query.events) {
|
||||
console.log(event);
|
||||
for (let [_, relEvent] of query.relations) {
|
||||
console.log(relEvent);
|
||||
}
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
console.log(await api.fetch(ref));
|
||||
}
|
||||
});
|
||||
|
||||
args.parse(Deno.args, { from: "user" });
|
|
@ -6,5 +6,10 @@ edition = "2021"
|
|||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
base64 = "0.21.2"
|
||||
ed25519-dalek = "1.0.1"
|
||||
json-canon = "0.1.3"
|
||||
rand = "^0.7"
|
||||
serde = { version = "1.0.164", features = ["derive"] }
|
||||
serde_json = "1.0.100"
|
||||
thiserror = "1.0.40"
|
||||
|
|
|
@ -2,7 +2,7 @@ use std::{str::FromStr, fmt::{Display, Debug}};
|
|||
use base64::Engine as _;
|
||||
use base64::engine::general_purpose::URL_SAFE_NO_PAD as b64engine;
|
||||
use thiserror::Error;
|
||||
use ed25519_dalek::{Keypair, PublicKey, Signature, Signer, Verifier};
|
||||
use ed25519_dalek::{Keypair, PublicKey, Signature, Signer, Verifier, SecretKey};
|
||||
use serde::{Serialize, Deserialize};
|
||||
// use super::hostname::{Hostname, HostnameParseError};
|
||||
|
||||
|
@ -23,6 +23,12 @@ pub enum ActorIdParseError {
|
|||
#[error("incorrect byte count (should be 32)")] IncorrectByteCount,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Error)]
|
||||
pub enum ActorSecretParseError {
|
||||
#[error("invalid base64")] InvalidBase64,
|
||||
#[error("incorrect byte count (should be 32)")] IncorrectByteCount,
|
||||
}
|
||||
|
||||
impl ActorId {
|
||||
pub fn new() -> (ActorId, ActorSecret) {
|
||||
let mut rng = rand::thread_rng();
|
||||
|
@ -38,9 +44,28 @@ impl ActorId {
|
|||
}
|
||||
|
||||
impl ActorSecret {
|
||||
pub fn sign(&self, hash: &[u8]) -> Signature {
|
||||
pub fn sign(&self, hash: &[u8]) -> ActorSignature {
|
||||
let secret = Keypair::from_bytes(&self.0).expect("can only generate valid keys");
|
||||
secret.sign(hash)
|
||||
ActorSignature(secret.sign(hash).to_bytes())
|
||||
}
|
||||
|
||||
pub fn get_id(&self) -> ActorId {
|
||||
let pair = Keypair::from_bytes(&self.0).expect("can only generate valid keys");
|
||||
ActorId(pair.public.to_bytes())
|
||||
}
|
||||
|
||||
// unsure about this api
|
||||
pub fn export_to_string(&self) -> String {
|
||||
let pair = Keypair::from_bytes(&self.0).expect("can only generate valid keys");
|
||||
b64engine.encode(pair.secret.to_bytes())
|
||||
}
|
||||
|
||||
pub fn import_from_string(s: String) -> Result<Self, ActorSecretParseError> {
|
||||
let key = b64engine.decode(s).map_err(|_| ActorSecretParseError::InvalidBase64)?;
|
||||
let secret = SecretKey::from_bytes(&key).map_err(|_| ActorSecretParseError::IncorrectByteCount)?;
|
||||
let public= PublicKey::from(&secret);
|
||||
let keypair = Keypair { secret, public };
|
||||
Ok(Self(keypair.to_bytes()))
|
||||
}
|
||||
}
|
||||
|
29
lib/src/derived.rs
Normal file
29
lib/src/derived.rs
Normal file
|
@ -0,0 +1,29 @@
|
|||
use serde::{Serialize, Deserialize};
|
||||
|
||||
#[derive(Debug, Default, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct Derived {
|
||||
#[serde(skip_serializing_if = "Option::is_none", default)]
|
||||
pub file: Option<DeriveFile>,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
||||
pub tags: Vec<String>,
|
||||
// #[serde(skip_serializing_if = "HashMap::is_empty")]
|
||||
// pub refs: HashMap<ItemRef, Event>
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct DeriveFile {
|
||||
pub size: u64,
|
||||
pub mime: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub height: Option<u64>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub width: Option<u64>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub duration: Option<u64>,
|
||||
}
|
||||
|
||||
impl Derived {
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.file.is_none() && self.tags.is_empty()
|
||||
}
|
||||
}
|
|
@ -1,9 +1,9 @@
|
|||
use std::collections::HashMap;
|
||||
use serde::{Serialize, Deserialize};
|
||||
use serde_json::Value;
|
||||
use ufh::ItemRef;
|
||||
use crate::item::ItemRef;
|
||||
use crate::actor::{ActorId, ActorSignature};
|
||||
use crate::derive::Derived;
|
||||
use crate::derived::Derived;
|
||||
|
||||
// TODO (future): stabilize move to the library (ufh)
|
||||
// TODO (future, maybe?): also come up with a better name than ufh
|
||||
|
@ -17,6 +17,7 @@ pub struct Event {
|
|||
pub content: EventContent,
|
||||
#[serde(skip_serializing_if = "HashMap::is_empty", default)]
|
||||
pub relations: HashMap<ItemRef, RelInfo>,
|
||||
#[serde(skip_serializing_if = "Derived::is_empty")]
|
||||
pub derived: Derived,
|
||||
pub sender: ActorId,
|
||||
pub signature: ActorSignature,
|
||||
|
@ -24,6 +25,7 @@ pub struct Event {
|
|||
}
|
||||
|
||||
/// an event in the process of being created or verified
|
||||
// TODO: this looks like a builder, maybe refactor it into one?
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]
|
||||
pub struct WipEvent {
|
||||
#[serde(flatten, serialize_with = "serialize_event_content", deserialize_with = "deserialize_event_content")]
|
||||
|
@ -31,7 +33,7 @@ pub struct WipEvent {
|
|||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub relations: Option<HashMap<ItemRef, RelInfo>>,
|
||||
#[serde(skip)]
|
||||
pub derived: Derived,
|
||||
pub derived: Option<Derived>,
|
||||
pub sender: ActorId,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub signature: Option<ActorSignature>,
|
||||
|
@ -146,18 +148,34 @@ fn serialize_event_content<S>(content: &EventContent, serializer: S) -> Result<S
|
|||
// }
|
||||
|
||||
impl WipEvent {
|
||||
pub fn new(content: EventContent, sender: &ActorId) -> WipEvent {
|
||||
Self {
|
||||
content,
|
||||
relations: None,
|
||||
derived: None,
|
||||
sender: sender.clone(),
|
||||
signature: None,
|
||||
// ugly code galore
|
||||
origin_ts: std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.expect("we're not remotely near the 1970s anymore")
|
||||
.as_millis() as u64,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_json(&self) -> String {
|
||||
json_canon::to_string(&self).expect("can always be serialized")
|
||||
}
|
||||
|
||||
// pub fn into_event(&self) -> Option<Event> {
|
||||
// Some(Event {
|
||||
// content: self.content,
|
||||
// relations: self.relations,
|
||||
// derived: HashMap::new(),
|
||||
// sender: self.sender,
|
||||
// signature: self.signature,
|
||||
// origin_ts: self.origin_ts,
|
||||
// })
|
||||
// }
|
||||
pub fn into_event(self) -> Option<Event> {
|
||||
Some(Event {
|
||||
id: None,
|
||||
content: self.content,
|
||||
relations: self.relations.unwrap_or_default(),
|
||||
derived: self.derived.unwrap_or_default(),
|
||||
sender: self.sender,
|
||||
signature: self.signature?,
|
||||
origin_ts: self.origin_ts,
|
||||
})
|
||||
}
|
||||
}
|
104
lib/src/item.rs
Normal file
104
lib/src/item.rs
Normal file
|
@ -0,0 +1,104 @@
|
|||
use std::{fmt::Display, str::FromStr};
|
||||
use serde::{Serialize, Deserialize};
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Hash)]
|
||||
#[serde(try_from = "&str", into = "String")]
|
||||
pub enum HashType {
|
||||
Sha224,
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum ItemRefParseError {
|
||||
#[error("missing hash type")] MissingHashType,
|
||||
#[error("unknown hash type")] UnknownHashType,
|
||||
#[error("missing hash")] MissingHash,
|
||||
#[error("invalid hash char")] InvalidHashChar,
|
||||
#[error("invalid hash length")] InvalidHashLength,
|
||||
#[error("unknown ref kind")] ExtraData,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)]
|
||||
#[serde(try_from = "&str", into = "String")]
|
||||
pub struct ItemRef(pub HashType, pub String);
|
||||
|
||||
impl TryFrom<&str> for ItemRef {
|
||||
type Error = ItemRefParseError;
|
||||
|
||||
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
||||
let mut split = value.split('-');
|
||||
let hash_type_str = split.next().ok_or(ItemRefParseError::MissingHashType)?;
|
||||
let hash_type = HashType::try_from(hash_type_str)?;
|
||||
let hash = split.next().ok_or(ItemRefParseError::MissingHash)?;
|
||||
if split.next().is_some() {
|
||||
return Err(ItemRefParseError::ExtraData);
|
||||
}
|
||||
if hash.find(|c: char| !c.is_ascii_hexdigit()).is_some() {
|
||||
return Err(ItemRefParseError::InvalidHashChar);
|
||||
}
|
||||
if hash.len() != hash_type.get_hex_len() as usize {
|
||||
return Err(ItemRefParseError::InvalidHashLength);
|
||||
}
|
||||
Ok(Self(hash_type, hash.to_owned()))
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for ItemRef {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}-{}", self.0, self.1)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ItemRef> for String {
|
||||
fn from(value: ItemRef) -> Self {
|
||||
value.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for ItemRef {
|
||||
type Err = ItemRefParseError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
ItemRef::try_from(s)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&str> for HashType {
|
||||
type Error = ItemRefParseError;
|
||||
|
||||
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
||||
match value {
|
||||
"sha224" => Ok(Self::Sha224),
|
||||
"" => Err(ItemRefParseError::MissingHashType),
|
||||
_ => Err(ItemRefParseError::UnknownHashType),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HashType {
|
||||
/// get length of this kind of hash in bytes
|
||||
fn get_len(&self) -> u32 {
|
||||
match self {
|
||||
Self::Sha224 => 28,
|
||||
}
|
||||
}
|
||||
|
||||
/// get length of this kind of hash in hexadecimal
|
||||
fn get_hex_len(&self) -> u32 {
|
||||
self.get_len() * 2
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for HashType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Sha224 => f.write_str("sha224"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<HashType> for String {
|
||||
fn from(value: HashType) -> Self {
|
||||
value.to_string()
|
||||
}
|
||||
}
|
101
lib/src/lib.rs
101
lib/src/lib.rs
|
@ -1,99 +1,8 @@
|
|||
use std::fmt::Display;
|
||||
use serde::{Serialize, Deserialize};
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Hash)]
|
||||
#[serde(try_from = "&str", into = "String")]
|
||||
pub enum HashType {
|
||||
Sha224,
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum ItemRefParseError {
|
||||
#[error("missing hash type")] MissingHashType,
|
||||
#[error("unknown hash type")] UnknownHashType,
|
||||
#[error("missing hash")] MissingHash,
|
||||
#[error("invalid hash char")] InvalidHashChar,
|
||||
#[error("invalid hash length")] InvalidHashLength,
|
||||
#[error("unknown ref kind")] ExtraData,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)]
|
||||
#[serde(try_from = "&str", into = "String")]
|
||||
pub struct ItemRef(pub HashType, pub String);
|
||||
|
||||
impl TryFrom<&str> for ItemRef {
|
||||
type Error = ItemRefParseError;
|
||||
|
||||
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
||||
let mut split = value.split('-');
|
||||
let hash_type_str = split.next().ok_or(ItemRefParseError::MissingHashType)?;
|
||||
let hash_type = HashType::try_from(hash_type_str)?;
|
||||
let hash = split.next().ok_or(ItemRefParseError::MissingHash)?;
|
||||
if split.next().is_some() {
|
||||
return Err(ItemRefParseError::ExtraData);
|
||||
}
|
||||
if hash.find(|c: char| !c.is_ascii_hexdigit()).is_some() {
|
||||
return Err(ItemRefParseError::InvalidHashChar);
|
||||
}
|
||||
if hash.len() != hash_type.get_hex_len() as usize {
|
||||
return Err(ItemRefParseError::InvalidHashLength);
|
||||
}
|
||||
Ok(Self(hash_type, hash.to_owned()))
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for ItemRef {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}-{}", self.0, self.1)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ItemRef> for String {
|
||||
fn from(value: ItemRef) -> Self {
|
||||
value.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&str> for HashType {
|
||||
type Error = ItemRefParseError;
|
||||
|
||||
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
||||
match value {
|
||||
"sha224" => Ok(Self::Sha224),
|
||||
"" => Err(ItemRefParseError::MissingHashType),
|
||||
_ => Err(ItemRefParseError::UnknownHashType),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HashType {
|
||||
/// get length of this kind of hash in bytes
|
||||
fn get_len(&self) -> u32 {
|
||||
match self {
|
||||
Self::Sha224 => 28,
|
||||
}
|
||||
}
|
||||
|
||||
/// get length of this kind of hash in hexadecimal
|
||||
fn get_hex_len(&self) -> u32 {
|
||||
self.get_len() * 2
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for HashType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Sha224 => f.write_str("sha224"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<HashType> for String {
|
||||
fn from(value: HashType) -> Self {
|
||||
value.to_string()
|
||||
}
|
||||
}
|
||||
pub mod item;
|
||||
pub mod actor;
|
||||
pub mod event;
|
||||
pub mod derived;
|
||||
pub mod query;
|
||||
|
||||
/*
|
||||
#[async_trait::async_trait]
|
||||
|
|
23
lib/src/query.rs
Normal file
23
lib/src/query.rs
Normal file
|
@ -0,0 +1,23 @@
|
|||
use serde::{Serialize, Deserialize};
|
||||
use crate::item::ItemRef;
|
||||
use crate::actor::ActorId;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
|
||||
pub struct Query {
|
||||
// each filter is logically ANDed together, and each item in each vec is logically ORed
|
||||
pub refs: Option<Vec<ItemRef>>,
|
||||
pub senders: Option<Vec<ActorId>>, // TODO (security): don't allow reading other people's events by default
|
||||
pub types: Option<Vec<String>>,
|
||||
pub tags: Option<Vec<String>>,
|
||||
|
||||
// after filtering, relations are fetched
|
||||
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
||||
pub relations: Vec<QueryRelation>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
|
||||
#[serde(untagged)]
|
||||
pub enum QueryRelation {
|
||||
Any(String), // events with this relation type
|
||||
// Typed(String, String), // TODO: events with this relation type and event type
|
||||
}
|
|
@ -7,9 +7,7 @@ edition = "2021"
|
|||
|
||||
[dependencies]
|
||||
axum = { version = "0.6.18", features = ["macros", "headers"] }
|
||||
base64 = "0.21.2"
|
||||
bytes = "1.4.0"
|
||||
ed25519-dalek = "1.0.1"
|
||||
futures-util = "0.3.28"
|
||||
image = "0.24.6"
|
||||
infer = "0.14.0"
|
||||
|
@ -17,7 +15,6 @@ json-canon = "0.1.3"
|
|||
lru = "0.10.1"
|
||||
nanoid = "0.4.0"
|
||||
once_cell = "1.18.0"
|
||||
rand = "^0.7"
|
||||
reqwest = { version = "0.11.18", features = ["json"] }
|
||||
serde = { version = "1.0.164", features = ["derive"] }
|
||||
serde_json = "1.0.97"
|
||||
|
|
|
@ -3,7 +3,7 @@ use once_cell::sync::Lazy;
|
|||
use tokio::sync::RwLock;
|
||||
use std::{num::NonZeroUsize, ops::Deref};
|
||||
use lru::LruCache;
|
||||
use ufh::ItemRef;
|
||||
use ufh::item::ItemRef;
|
||||
use bytes::Bytes;
|
||||
use crate::Error;
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use crate::query::Query;
|
||||
use crate::events::Event;
|
||||
use ufh::event::Event;
|
||||
|
||||
// TODO (future): abstract the database
|
||||
|
||||
|
|
|
@ -1,29 +1,7 @@
|
|||
use std::io::Cursor;
|
||||
|
||||
use infer::MatcherType;
|
||||
use serde::{Serialize, Deserialize};
|
||||
|
||||
#[derive(Debug, Default, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct Derived {
|
||||
#[serde(skip_serializing_if = "Option::is_none", default)]
|
||||
pub file: Option<DeriveFile>,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
||||
pub tags: Vec<String>,
|
||||
// #[serde(skip_serializing_if = "HashMap::is_empty")]
|
||||
// pub refs: HashMap<ItemRef, Event>
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct DeriveFile {
|
||||
pub size: u64,
|
||||
pub mime: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub height: Option<u64>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub width: Option<u64>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub duration: Option<u64>,
|
||||
}
|
||||
use ufh::derived::DeriveFile;
|
||||
|
||||
pub fn derive_file(buffer: &[u8]) -> DeriveFile {
|
||||
let Some(mime) = infer::get(buffer) else {
|
||||
|
|
|
@ -11,7 +11,7 @@ pub enum Error {
|
|||
#[error("{0}")] Image(image::ImageError),
|
||||
#[error("{0}")] Http(StatusCode, Value),
|
||||
#[error("{0}")] Validation(&'static str),
|
||||
#[error("{0}")] ItemRefParse(ufh::ItemRefParseError),
|
||||
#[error("{0}")] ItemRefParse(ufh::item::ItemRefParseError),
|
||||
// #[error("{0}")] Unknown(Box<dyn std::error::Error>),
|
||||
}
|
||||
|
||||
|
@ -39,8 +39,8 @@ impl From<image::ImageError> for Error {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<ufh::ItemRefParseError> for Error {
|
||||
fn from(value: ufh::ItemRefParseError) -> Self {
|
||||
impl From<ufh::item::ItemRefParseError> for Error {
|
||||
fn from(value: ufh::item::ItemRefParseError) -> Self {
|
||||
Error::ItemRefParse(value)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,8 +15,6 @@ use tokio::sync::broadcast;
|
|||
|
||||
mod error;
|
||||
mod blobs;
|
||||
mod events;
|
||||
mod actor;
|
||||
mod query;
|
||||
mod derive;
|
||||
|
||||
|
@ -25,7 +23,7 @@ mod db;
|
|||
mod routes;
|
||||
|
||||
pub(crate) use error::Error;
|
||||
use events::Event;
|
||||
use ufh::event::Event;
|
||||
|
||||
const MAX_SIZE: u64 = 1024 * 1024;
|
||||
|
||||
|
|
|
@ -1,24 +1,2 @@
|
|||
use serde::{Serialize, Deserialize};
|
||||
use ufh::ItemRef;
|
||||
|
||||
use crate::actor::ActorId;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
|
||||
pub struct Query {
|
||||
// each filter is logically ANDed together, and each item in each vec is logically ORed
|
||||
pub refs: Option<Vec<ItemRef>>,
|
||||
pub senders: Option<Vec<ActorId>>, // TODO (security): don't allow reading other people's events by default
|
||||
pub types: Option<Vec<String>>,
|
||||
pub tags: Option<Vec<String>>,
|
||||
|
||||
// after filtering, relations are fetched
|
||||
#[serde(default)]
|
||||
pub relations: Vec<QueryRelation>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
|
||||
#[serde(untagged)]
|
||||
pub enum QueryRelation {
|
||||
Any(String), // events with this relation type
|
||||
// Typed(String, String), // TODO: events with this relation type and event type
|
||||
}
|
||||
// TODO: remove
|
||||
pub use ufh::query::{Query, QueryRelation};
|
||||
|
|
|
@ -8,7 +8,7 @@ use crate::db::Database;
|
|||
use crate::derive::derive_file;
|
||||
use reqwest::StatusCode;
|
||||
use serde::{Serialize, Deserialize};
|
||||
use ufh::ItemRef;
|
||||
use ufh::item::ItemRef;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::io::Cursor;
|
||||
use std::sync::Arc;
|
||||
|
@ -20,7 +20,7 @@ use nanoid::nanoid;
|
|||
use tokio::sync::broadcast;
|
||||
|
||||
pub(crate) use crate::error::Error;
|
||||
use crate::events::{self, Event, EventContent};
|
||||
use ufh::event::{self, Event, EventContent};
|
||||
|
||||
use crate::ServerState;
|
||||
use crate::MAX_SIZE;
|
||||
|
@ -69,7 +69,8 @@ async fn thing_create(
|
|||
|
||||
let blob = Bytes::from(chunks.concat());
|
||||
let item_ref: ItemRef = if is_event {
|
||||
let mut event: events::WipEvent = serde_json::from_slice(&blob)?;
|
||||
let mut event: event::WipEvent = serde_json::from_slice(&blob)?;
|
||||
event.derived = None;
|
||||
|
||||
// validate the signature
|
||||
let Some(sig) = event.signature.take() else {
|
||||
|
@ -85,7 +86,7 @@ async fn thing_create(
|
|||
id: None,
|
||||
content: event.content,
|
||||
relations: event.relations.unwrap_or_default(),
|
||||
derived: event.derived,
|
||||
derived: event.derived.unwrap_or_default(),
|
||||
sender: event.sender,
|
||||
signature: event.signature.expect("already validated earlier"),
|
||||
origin_ts: event.origin_ts,
|
||||
|
|
|
@ -1,16 +1,15 @@
|
|||
use sha2::Digest;
|
||||
use std::path::PathBuf;
|
||||
use ufh::{ItemRef, HashType};
|
||||
use ufh::item::{ItemRef, HashType};
|
||||
use tokio::fs;
|
||||
use flume::Sender;
|
||||
use tokio::sync::broadcast;
|
||||
use sqlx::{SqlitePool, query, sqlite::SqliteConnectOptions};
|
||||
use crate::error::Error;
|
||||
|
||||
pub struct FileStore {
|
||||
blob_path: PathBuf,
|
||||
ref_db: SqlitePool,
|
||||
// TODO: long polling
|
||||
// pollers: Vec<Sender<ItemRef>>,
|
||||
stream: broadcast::Sender<ItemRef>,
|
||||
}
|
||||
|
||||
impl FileStore {
|
||||
|
@ -34,7 +33,7 @@ impl FileStore {
|
|||
Ok(Self {
|
||||
blob_path,
|
||||
ref_db: db,
|
||||
// pollers: Vec::new(),
|
||||
stream: broadcast::channel(32).0,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -74,7 +73,7 @@ impl FileStore {
|
|||
|
||||
pub async fn list(&mut self, after: Option<ItemRef>, limit: usize, timeout: Option<u64>) -> Result<Vec<ItemRef>, Error> {
|
||||
// this code doesn't seem good but works
|
||||
use futures_util::TryStreamExt;
|
||||
use futures_util::TryStreamExt as _;
|
||||
|
||||
let mut entries = vec![];
|
||||
if let Some(after) = after {
|
||||
|
@ -98,19 +97,17 @@ impl FileStore {
|
|||
}
|
||||
};
|
||||
|
||||
// if let Some(timeout) = timeout {
|
||||
// let timeout = std::time::Duration::from_millis(timeout);
|
||||
// if entries.is_empty() {
|
||||
// let (send, recv) = flume::bounded(1);
|
||||
// self.pollers.push(send);
|
||||
// if let Ok(result) = tokio::time::timeout(timeout, recv.recv_async()).await {
|
||||
// let item = result.map_err(|_| Error::Static("couldnt receive from flume channel"))?;
|
||||
// return Ok(vec![item]);
|
||||
// } else {
|
||||
// return Ok(entries);
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
if let Some(timeout) = timeout {
|
||||
let timeout = std::time::Duration::from_millis(timeout);
|
||||
if entries.is_empty() {
|
||||
if let Ok(result) = tokio::time::timeout(timeout, self.stream.subscribe().recv()).await {
|
||||
let item = result.map_err(|_| Error::Static("couldnt receive from channel"))?;
|
||||
return Ok(vec![item]);
|
||||
} else {
|
||||
return Ok(entries);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(entries)
|
||||
}
|
||||
|
|
|
@ -1,12 +1,11 @@
|
|||
use axum::extract::{BodyStream, Json, Path, Query, State, TypedHeader};
|
||||
use axum::headers::ContentLength;
|
||||
use axum::extract::{BodyStream, Json, Path, Query, State};
|
||||
use axum::http::StatusCode;
|
||||
use futures_util::StreamExt;
|
||||
use serde_json::{Value, json};
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
use ufh::ItemRef;
|
||||
use ufh::item::ItemRef;
|
||||
|
||||
mod fs;
|
||||
mod error;
|
||||
|
@ -50,7 +49,6 @@ struct ListQuery {
|
|||
|
||||
async fn blob_upload(
|
||||
State(state): State<Arc<UfhState>>,
|
||||
TypedHeader(ContentLength(length)): TypedHeader<ContentLength>,
|
||||
mut body: BodyStream,
|
||||
) -> Result<Json<Value>, (StatusCode, Json<Value>)> {
|
||||
let mut chunks = vec![];
|
||||
|
|
Loading…
Reference in a new issue