1
0
Fork 0
forked from mirror/grapevine

Compare commits

...

15 commits

Author SHA1 Message Date
Benjamin Lee
c78be5e7d7
add changelog entry for complement support 2024-08-04 23:22:23 -07:00
Benjamin Lee
46bc003ba1
kill dangling docker containers in complement run
This is a stupid hack and I hate it, but we do need to support
concurrent complement runs if we want to do this in CI.
2024-08-04 23:22:23 -07:00
Benjamin Lee
67ebbeaa70
write raw log file in complement wrapper 2024-08-04 23:22:23 -07:00
Benjamin Lee
169df56a84
handle cancellation in complement wrapper
What a mess lmao
2024-08-04 23:22:23 -07:00
Benjamin Lee
1b9b227ca8
xtask-specific binaries in GRAPEVINE_XTASK_PATH
Some possible alternatives to this:

 - Keep putting them in PATH.
 - Make xtask a nix derivation. We would lose out on incremental
   compilation this way, and would end up recompiling xtask from scratch
   whenever something in the main package changed.
 - Have xtask call `nix build --inputs-from $toplevel nixpkgs#go` and
   such. Slow and tedious.
2024-08-04 23:22:23 -07:00
Benjamin Lee
6b18dc0570
CLI help for the complement script 2024-08-04 23:22:23 -07:00
Benjamin Lee
c503fd3c3e
compare complement test results to a baseline
One thing that might be neat in the future is noticing differing results
while the tests are still running, and modifying the log messages to
indicate them. I can imagine situations where you would want to abort
the test run immediately after seeing the first regression.
2024-08-04 23:22:23 -07:00
Benjamin Lee
d6fb93743d
write complement per-test logs to files 2024-08-04 23:22:22 -07:00
Benjamin Lee
4eb3c56b98
write complement test result summary to a tsv file 2024-08-04 23:22:22 -07:00
Benjamin Lee
90c195188d
add live progress display to complement wrapper
Added the `derive` feature to the workspace serde dependency here.
Previously, the dependency was only used in the main package, which
ended up enabling the `derive` feature through transitive serde
dependencies. This is not the case for xtask, so we need to enable it
explicitly.
2024-08-04 23:22:22 -07:00
Benjamin Lee
2176a1a190
add complement wrapper xtask script 2024-08-04 23:22:22 -07:00
Benjamin Lee
5a299fef90
allow 'nix build'-specific nix-build-and-cache args
We need --no-link for the complement script.

I picked the syntax for this as an analogy with
'cargo rustc --cargo-args -- --rustc-args', but not sure how I feel
about it.
2024-08-04 23:22:22 -07:00
Benjamin Lee
b765961dca
move dependencies and some package attrs to workspace
This is in preparation for creating a separate `xtask` package.
2024-08-04 23:22:22 -07:00
Benjamin Lee
69e1d2fd7b
add test image for complement
This image should satisfy the requirements described in [1]. openssl
commands were copied from [2].

[1]: https://github.com/matrix-org/complement?tab=readme-ov-file#image-requirements
[2]: https://github.com/matrix-org/complement?tab=readme-ov-file#complement-pki
2024-08-04 23:22:22 -07:00
Benjamin Lee
69d4793b6e
add nix derivation for complement
Alternative to this would be just running 'go test' and pointing it at
the complement source code when we want to do a test run. This would
mean that we can't cache the unit test build, and would have to include
the 'olm' input in the devshell.
2024-08-04 22:17:44 -07:00
18 changed files with 1825 additions and 26 deletions

2
.cargo/config.toml Normal file
View file

@ -0,0 +1,2 @@
[alias]
xtask = "run --package xtask --"

228
Cargo.lock generated
View file

@ -285,6 +285,15 @@ dependencies = [
"rustc-demangle",
]
[[package]]
name = "backtrace-ext"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "537beee3be4a18fb023b570f80e3ae28003db9167a751266b259926e25539d50"
dependencies = [
"backtrace",
]
[[package]]
name = "base64"
version = "0.21.7"
@ -414,6 +423,12 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "cfg_aliases"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e"
[[package]]
name = "cfg_aliases"
version = "0.2.1"
@ -476,6 +491,19 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b"
[[package]]
name = "console"
version = "0.15.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb"
dependencies = [
"encode_unicode",
"lazy_static",
"libc",
"unicode-width",
"windows-sys 0.52.0",
]
[[package]]
name = "const-oid"
version = "0.9.6"
@ -633,6 +661,12 @@ version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3dca9240753cf90908d7e4aac30f630662b02aebaa1b58a3cadabdb23385b58b"
[[package]]
name = "encode_unicode"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f"
[[package]]
name = "enum-as-inner"
version = "0.6.0"
@ -850,7 +884,7 @@ dependencies = [
"image",
"jsonwebtoken",
"lru-cache",
"nix",
"nix 0.29.0",
"num_cpus",
"once_cell",
"opentelemetry",
@ -1242,6 +1276,28 @@ dependencies = [
"serde",
]
[[package]]
name = "indicatif"
version = "0.17.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "763a5a8f45087d6bcea4222e7b72c291a054edf80e4ef6efd2a4979878c7bea3"
dependencies = [
"console",
"instant",
"number_prefix",
"portable-atomic",
"unicode-width",
]
[[package]]
name = "instant"
version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222"
dependencies = [
"cfg-if",
]
[[package]]
name = "ipconfig"
version = "0.3.2"
@ -1260,6 +1316,12 @@ version = "2.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3"
[[package]]
name = "is_ci"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7655c9839580ee829dfacba1d1278c2b7883e50a277ff7541299489d6bdfdc45"
[[package]]
name = "itertools"
version = "0.12.1"
@ -1476,6 +1538,37 @@ version = "2.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d"
[[package]]
name = "miette"
version = "7.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4edc8853320c2a0dab800fbda86253c8938f6ea88510dc92c5f1ed20e794afc1"
dependencies = [
"backtrace",
"backtrace-ext",
"cfg-if",
"miette-derive",
"owo-colors",
"supports-color",
"supports-hyperlinks",
"supports-unicode",
"terminal_size",
"textwrap",
"thiserror",
"unicode-width",
]
[[package]]
name = "miette-derive"
version = "7.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dcf09caffaac8068c346b6df2a7fc27a177fd20b39421a39ce0a211bde679a6c"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "mime"
version = "0.3.17"
@ -1509,6 +1602,18 @@ dependencies = [
"windows-sys 0.48.0",
]
[[package]]
name = "nix"
version = "0.28.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4"
dependencies = [
"bitflags 2.5.0",
"cfg-if",
"cfg_aliases 0.1.1",
"libc",
]
[[package]]
name = "nix"
version = "0.29.0"
@ -1517,7 +1622,7 @@ checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46"
dependencies = [
"bitflags 2.5.0",
"cfg-if",
"cfg_aliases",
"cfg_aliases 0.2.1",
"libc",
]
@ -1585,6 +1690,12 @@ dependencies = [
"libc",
]
[[package]]
name = "number_prefix"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3"
[[package]]
name = "object"
version = "0.32.2"
@ -1709,6 +1820,12 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
[[package]]
name = "owo-colors"
version = "4.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "caff54706df99d2a78a5a4e3455ff45448d81ef1bb63c22cd14052ca0e993a3f"
[[package]]
name = "parking_lot"
version = "0.12.3"
@ -1868,6 +1985,12 @@ dependencies = [
"miniz_oxide",
]
[[package]]
name = "portable-atomic"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0"
[[package]]
name = "powerfmt"
version = "0.2.0"
@ -1898,6 +2021,16 @@ dependencies = [
"unicode-ident",
]
[[package]]
name = "process-wrap"
version = "8.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38ee68ae331824036479c84060534b18254c864fa73366c58d86db3b7b811619"
dependencies = [
"indexmap 2.2.6",
"nix 0.28.0",
]
[[package]]
name = "prometheus"
version = "0.13.4"
@ -2674,6 +2807,16 @@ version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
[[package]]
name = "signal-hook"
version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8621587d4798caf8eb44879d42e56b9a93ea5dcd315a6487c357130095b62801"
dependencies = [
"libc",
"signal-hook-registry",
]
[[package]]
name = "signal-hook-registry"
version = "1.4.2"
@ -2731,6 +2874,12 @@ version = "1.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67"
[[package]]
name = "smawk"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b7c388c1b5e93756d0c740965c41e8822f866621d41acbdf6336a6a168f8840c"
[[package]]
name = "socket2"
version = "0.5.7"
@ -2794,6 +2943,27 @@ version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
[[package]]
name = "supports-color"
version = "3.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9829b314621dfc575df4e409e79f9d6a66a3bd707ab73f23cb4aa3a854ac854f"
dependencies = [
"is_ci",
]
[[package]]
name = "supports-hyperlinks"
version = "3.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c0a1e5168041f5f3ff68ff7d95dcb9c8749df29f6e7e89ada40dd4c9de404ee"
[[package]]
name = "supports-unicode"
version = "3.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b7401a30af6cb5818bb64852270bb722533397edcfc7344954a38f420819ece2"
[[package]]
name = "syn"
version = "2.0.66"
@ -2827,6 +2997,17 @@ dependencies = [
"windows-sys 0.48.0",
]
[[package]]
name = "textwrap"
version = "0.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23d434d3f8967a09480fb04132ebe0a3e088c173e6d0ee7897abbdf4eab0f8b9"
dependencies = [
"smawk",
"unicode-linebreak",
"unicode-width",
]
[[package]]
name = "thiserror"
version = "1.0.61"
@ -3330,6 +3511,12 @@ version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
[[package]]
name = "unicode-linebreak"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f"
[[package]]
name = "unicode-normalization"
version = "0.1.23"
@ -3339,6 +3526,12 @@ dependencies = [
"tinyvec",
]
[[package]]
name = "unicode-width"
version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d"
[[package]]
name = "unsafe-libyaml"
version = "0.2.11"
@ -3720,6 +3913,37 @@ version = "2.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "213b7324336b53d2414b2db8537e56544d981803139155afa84f76eeebb7a546"
[[package]]
name = "xshell"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6db0ab86eae739efd1b054a8d3d16041914030ac4e01cd1dca0cf252fd8b6437"
dependencies = [
"xshell-macros",
]
[[package]]
name = "xshell-macros"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d422e8e38ec76e2f06ee439ccc765e9c6a9638b9e7c9f2e8255e4d41e8bd852"
[[package]]
name = "xtask"
version = "0.1.0"
dependencies = [
"clap",
"indicatif",
"miette",
"process-wrap",
"rand",
"serde",
"serde_json",
"signal-hook",
"strum",
"xshell",
]
[[package]]
name = "yap"
version = "0.12.0"

View file

@ -1,3 +1,11 @@
[workspace]
members = ["xtask"]
[workspace.package]
license = "Apache-2.0"
# See also `rust-toolchain.toml`
rust-version = "1.78.0"
# Keep alphabetically sorted
[workspace.lints.rust]
elided_lifetimes_in_paths = "warn"
@ -72,21 +80,8 @@ wildcard_dependencies = "warn"
missing_errors_doc = "allow"
missing_panics_doc = "allow"
[package]
name = "grapevine"
description = "A Matrix homeserver written in Rust"
license = "Apache-2.0"
version = "0.1.0"
edition = "2021"
# See also `rust-toolchain.toml`
rust-version = "1.78.0"
[lints]
workspace = true
# Keep sorted
[dependencies]
[workspace.dependencies]
argon2 = "0.5.3"
async-trait = "0.1.80"
axum = { version = "0.7.5", default-features = false, features = ["form", "http1", "http2", "json", "matched-path", "tracing"] }
@ -103,8 +98,11 @@ http-body-util = "0.1.1"
hyper = "1.3.1"
hyper-util = { version = "0.1.4", features = ["client", "client-legacy", "service"] }
image = { version = "0.25.1", default-features = false, features = ["jpeg", "png", "gif"] }
indicatif = "0.17.8"
jsonwebtoken = "9.3.0"
lru-cache = "0.1.2"
miette = { version = "7.2.0", features = ["fancy"] }
nix = { version = "0.29", features = ["resource"] }
num_cpus = "1.16.0"
once_cell = "1.19.0"
opentelemetry = "0.23.0"
@ -112,26 +110,28 @@ opentelemetry-jaeger-propagator = "0.2.0"
opentelemetry-otlp = "0.16.0"
opentelemetry-prometheus = "0.16.0"
opentelemetry_sdk = { version = "0.23.0", features = ["rt-tokio"] }
parking_lot = { version = "0.12.3", optional = true }
parking_lot = "0.12.3"
phf = { version = "0.11.2", features = ["macros"] }
process-wrap = { version = "8.0.2", default-features = false, features = ["std", "process-group"] }
prometheus = "0.13.4"
rand = "0.8.5"
regex = "1.10.4"
reqwest = { version = "0.12.4", default-features = false, features = ["http2", "rustls-tls-native-roots", "socks"] }
ring = "0.17.8"
rocksdb = { package = "rust-rocksdb", version = "0.26.0", features = ["lz4", "multi-threaded-cf", "zstd"], optional = true }
rocksdb = { package = "rust-rocksdb", version = "0.26.0", features = ["lz4", "multi-threaded-cf", "zstd"] }
ruma = { git = "https://github.com/ruma/ruma", branch = "main", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "server-util", "state-res", "unstable-msc2448", "unstable-msc3575", "unstable-exhaustive-types", "ring-compat", "unstable-unspecified" ] }
rusqlite = { version = "0.31.0", optional = true, features = ["bundled"] }
sd-notify = { version = "0.4.1", optional = true }
serde = { version = "1.0.202", features = ["rc"] }
rusqlite = { version = "0.31.0", features = ["bundled"] }
sd-notify = { version = "0.4.1" }
serde = { version = "1.0.202", features = ["rc", "derive"] }
serde_html_form = "0.2.6"
serde_json = { version = "1.0.117", features = ["raw_value"] }
serde_yaml = "0.9.34"
sha-1 = "0.10.1"
signal-hook = "0.3.17"
strum = { version = "0.26.2", features = ["derive"] }
thiserror = "1.0.61"
thread_local = "1.1.8"
tikv-jemallocator = { version = "0.5.4", features = ["unprefixed_malloc_on_supported_platforms"], optional = true }
tikv-jemallocator = { version = "0.5.4", features = ["unprefixed_malloc_on_supported_platforms"] }
tokio = { version = "1.37.0", features = ["fs", "macros", "signal", "sync"] }
toml = "0.8.14"
tower = { version = "0.4.13", features = ["util"] }
@ -142,9 +142,79 @@ tracing-opentelemetry = "0.24.0"
tracing-subscriber = { version = "0.3.18", features = ["env-filter", "json"] }
trust-dns-resolver = "0.23.2"
xdg = "2.5.2"
xshell = "0.2.6"
[package]
name = "grapevine"
description = "A Matrix homeserver written in Rust"
license.workspace = true
version = "0.1.0"
edition = "2021"
rust-version.workspace = true
[lints]
workspace = true
# Keep sorted
[dependencies]
argon2.workspace = true
async-trait.workspace = true
axum.workspace = true
axum-extra.workspace = true
axum-server.workspace = true
base64.workspace = true
bytes.workspace = true
clap.workspace = true
futures-util.workspace = true
hmac.workspace = true
html-escape.workspace = true
http.workspace = true
http-body-util.workspace = true
hyper.workspace = true
hyper-util.workspace = true
image.workspace = true
jsonwebtoken.workspace = true
lru-cache.workspace = true
num_cpus.workspace = true
once_cell.workspace = true
opentelemetry.workspace = true
opentelemetry-jaeger-propagator.workspace = true
opentelemetry-otlp.workspace = true
opentelemetry-prometheus.workspace = true
opentelemetry_sdk.workspace = true
parking_lot = { workspace = true, optional = true }
phf.workspace = true
prometheus.workspace = true
rand.workspace = true
regex.workspace = true
reqwest.workspace = true
ring.workspace = true
rocksdb = { workspace = true, optional = true }
ruma.workspace = true
rusqlite = { workspace = true, optional = true }
sd-notify = { workspace = true, optional = true }
serde.workspace = true
serde_html_form.workspace = true
serde_json.workspace = true
serde_yaml.workspace = true
sha-1.workspace = true
strum.workspace = true
thiserror.workspace = true
thread_local.workspace = true
tikv-jemallocator = { workspace = true, optional = true }
tokio.workspace = true
toml.workspace = true
tower.workspace = true
tower-http.workspace = true
tracing.workspace = true
tracing-flame.workspace = true
tracing-opentelemetry.workspace = true
tracing-subscriber.workspace = true
trust-dns-resolver.workspace = true
xdg.workspace = true
[target.'cfg(unix)'.dependencies]
nix = { version = "0.29", features = ["resource"] }
nix.workspace = true
[features]
default = ["rocksdb", "sqlite", "systemd"]

View file

@ -5,11 +5,33 @@ set -euo pipefail
toplevel="$(git rev-parse --show-toplevel)"
# Build and cache the specified arguments
#
# Usage: nix-build-and-cache just [args...] -- [build-only-args...]
#
# Arguments after the '--' (build-only-args) will only be passed to the
# 'nix build' command, while arguments before '--' will also be passed to any
# other nix command using the installable list. This is useful for things like
# '--no-link', which are only relevant to 'nix build'.
just() {
build_args=()
args=()
build_only_mode=0
for arg in "$@"; do
if [ "$build_only_mode" = 1 ]; then
build_args+=( "$arg" )
elif [ "$arg" = "--" ]; then
# Everything after the -- is passed only to 'nix build'
build_only_mode=1
else
args+=( "$arg" )
build_args+=( "$arg" )
fi
done
if command -v nom &> /dev/null; then
nom build "$@"
nom build "${build_args[@]}"
else
nix build "$@"
nix build "${build_args[@]}"
fi
if [ -z ${ATTIC_TOKEN+x} ]; then
@ -24,7 +46,7 @@ just() {
"$ATTIC_TOKEN"
# Find all output paths of the installables and their build dependencies
readarray -t derivations < <(nix path-info --derivation "$@")
readarray -t derivations < <(nix path-info --derivation "${args[@]}")
cache=()
for derivation in "${derivations[@]}"; do
cache+=(

View file

@ -19,6 +19,7 @@ particular version so that attention can be drawn to the important parts:
4. Changed
5. Fixed
6. Added
7. Internal
Entries within each section should be sorted by merge order. If multiple changes
result in a single entry, choose the merge order of the first or last change.
@ -190,3 +191,10 @@ This will be the first release of Grapevine since it was forked from Conduit
output.
* `observability.logs.timestamp`: Whether timestamps should be included in
the logs.
### Internal
1. Support for running black-box integration tests tests with [complement][complement].
([!47](https://gitlab.computer.surgery/matrix/grapevine-fork/-/merge_requests/47))
[complement]: https://github.com/matrix-org/complement

152
complement-baseline.tsv Normal file
View file

@ -0,0 +1,152 @@
test result
GLOBAL FAIL
TestACLs FAIL
TestBannedUserCannotSendJoin FAIL
TestCannotSendKnockViaSendKnockInMSC3787Room FAIL
TestCannotSendNonJoinViaSendJoinV1 FAIL
TestCannotSendNonJoinViaSendJoinV2 FAIL
TestCannotSendNonKnockViaSendKnock FAIL
TestCannotSendNonLeaveViaSendLeaveV1 FAIL
TestCannotSendNonLeaveViaSendLeaveV2 FAIL
TestClientSpacesSummary FAIL
TestClientSpacesSummary/max_depth FAIL
TestClientSpacesSummary/pagination FAIL
TestClientSpacesSummary/query_whole_graph FAIL
TestClientSpacesSummary/redact_link FAIL
TestClientSpacesSummary/suggested_only FAIL
TestClientSpacesSummaryJoinRules FAIL
TestDeviceListsUpdateOverFederation FAIL
TestDeviceListsUpdateOverFederation/good_connectivity FAIL
TestDeviceListsUpdateOverFederation/interrupted_connectivity FAIL
TestDeviceListsUpdateOverFederation/stopped_server FAIL
TestDeviceListsUpdateOverFederationOnRoomJoin FAIL
TestEventAuth FAIL
TestFederatedClientSpaces FAIL
TestFederationKeyUploadQuery FAIL
TestFederationRedactSendsWithoutEvent FAIL
TestFederationRejectInvite FAIL
TestFederationRoomsInvite FAIL
TestFederationRoomsInvite/Parallel FAIL
TestFederationRoomsInvite/Parallel/Invited_user_can_reject_invite_over_federation FAIL
TestFederationRoomsInvite/Parallel/Invited_user_can_reject_invite_over_federation_for_empty_room FAIL
TestFederationRoomsInvite/Parallel/Invited_user_can_reject_invite_over_federation_several_times FAIL
TestFederationRoomsInvite/Parallel/Invited_user_has_'is_direct'_flag_in_prev_content_after_joining FAIL
TestFederationRoomsInvite/Parallel/Remote_invited_user_can_see_room_metadata FAIL
TestGetMissingEventsGapFilling FAIL
TestInboundCanReturnMissingEvents FAIL
TestInboundCanReturnMissingEvents/Inbound_federation_can_return_missing_events_for_invited_visibility FAIL
TestInboundCanReturnMissingEvents/Inbound_federation_can_return_missing_events_for_joined_visibility FAIL
TestInboundCanReturnMissingEvents/Inbound_federation_can_return_missing_events_for_shared_visibility FAIL
TestInboundCanReturnMissingEvents/Inbound_federation_can_return_missing_events_for_world_readable_visibility FAIL
TestInboundFederationKeys PASS
TestInboundFederationProfile FAIL
TestInboundFederationProfile/Inbound_federation_can_query_profile_data FAIL
TestInboundFederationProfile/Non-numeric_ports_in_server_names_are_rejected FAIL
TestInboundFederationRejectsEventsWithRejectedAuthEvents FAIL
TestIsDirectFlagFederation FAIL
TestIsDirectFlagLocal PASS
TestJoinFederatedRoomFailOver FAIL
TestJoinFederatedRoomFromApplicationServiceBridgeUser FAIL
TestJoinFederatedRoomFromApplicationServiceBridgeUser/join_remote_federated_room_as_application_service_user FAIL
TestJoinFederatedRoomWithUnverifiableEvents FAIL
TestJoinFederatedRoomWithUnverifiableEvents//send_join_response_missing_signatures_shouldn't_block_room_join FAIL
TestJoinFederatedRoomWithUnverifiableEvents//send_join_response_with_bad_signatures_shouldn't_block_room_join FAIL
TestJoinFederatedRoomWithUnverifiableEvents//send_join_response_with_state_with_unverifiable_auth_events_shouldn't_block_room_join FAIL
TestJoinFederatedRoomWithUnverifiableEvents//send_join_response_with_unobtainable_keys_shouldn't_block_room_join FAIL
TestJoinViaRoomIDAndServerName FAIL
TestJumpToDateEndpoint FAIL
TestJumpToDateEndpoint/parallel FAIL
TestJumpToDateEndpoint/parallel/federation FAIL
TestJumpToDateEndpoint/parallel/federation/can_paginate_after_getting_remote_event_from_timestamp_to_event_endpoint FAIL
TestJumpToDateEndpoint/parallel/federation/looking_backwards,_should_be_able_to_find_event_that_was_sent_before_we_joined FAIL
TestJumpToDateEndpoint/parallel/federation/looking_forwards,_should_be_able_to_find_event_that_was_sent_before_we_joined FAIL
TestJumpToDateEndpoint/parallel/federation/when_looking_backwards_before_the_room_was_created,_should_be_able_to_find_event_that_was_imported FAIL
TestJumpToDateEndpoint/parallel/should_find_event_after_given_timestmap FAIL
TestJumpToDateEndpoint/parallel/should_find_event_before_given_timestmap FAIL
TestJumpToDateEndpoint/parallel/should_find_next_event_topologically_after_given_timestmap_when_all_message_timestamps_are_the_same FAIL
TestJumpToDateEndpoint/parallel/should_find_next_event_topologically_before_given_timestamp_when_all_message_timestamps_are_the_same FAIL
TestJumpToDateEndpoint/parallel/should_find_nothing_after_the_latest_timestmap PASS
TestJumpToDateEndpoint/parallel/should_find_nothing_before_the_earliest_timestmap PASS
TestJumpToDateEndpoint/parallel/should_not_be_able_to_query_a_private_room_you_are_not_a_member_of FAIL
TestJumpToDateEndpoint/parallel/should_not_be_able_to_query_a_public_room_you_are_not_a_member_of FAIL
TestKnockRoomsInPublicRoomsDirectory FAIL
TestKnockRoomsInPublicRoomsDirectoryInMSC3787Room FAIL
TestKnocking FAIL
TestKnockingInMSC3787Room FAIL
TestLocalPngThumbnail PASS
TestMediaFilenames FAIL
TestMediaFilenames/Parallel FAIL
TestMediaFilenames/Parallel/ASCII FAIL
TestMediaFilenames/Parallel/ASCII/Can_download_file_'ascii' FAIL
TestMediaFilenames/Parallel/ASCII/Can_download_file_'name;with;semicolons' FAIL
TestMediaFilenames/Parallel/ASCII/Can_download_file_'name_with_spaces' FAIL
TestMediaFilenames/Parallel/ASCII/Can_download_specifying_a_different_ASCII_file_name PASS
TestMediaFilenames/Parallel/ASCII/Can_upload_with_ASCII_file_name PASS
TestMediaFilenames/Parallel/Unicode FAIL
TestMediaFilenames/Parallel/Unicode/Can_download_specifying_a_different_Unicode_file_name FAIL
TestMediaFilenames/Parallel/Unicode/Can_download_with_Unicode_file_name_locally FAIL
TestMediaFilenames/Parallel/Unicode/Can_download_with_Unicode_file_name_over_federation FAIL
TestMediaFilenames/Parallel/Unicode/Can_upload_with_Unicode_file_name PASS
TestMediaFilenames/Parallel/Unicode/Will_serve_safe_media_types_as_inline SKIP
TestMediaFilenames/Parallel/Unicode/Will_serve_safe_media_types_with_parameters_as_inline SKIP
TestMediaFilenames/Parallel/Unicode/Will_serve_unsafe_media_types_as_attachments SKIP
TestMediaWithoutFileName FAIL
TestMediaWithoutFileName/parallel FAIL
TestMediaWithoutFileName/parallel/Can_download_without_a_file_name_locally PASS
TestMediaWithoutFileName/parallel/Can_download_without_a_file_name_over_federation FAIL
TestMediaWithoutFileName/parallel/Can_upload_without_a_file_name PASS
TestNetworkPartitionOrdering FAIL
TestOutboundFederationIgnoresMissingEventWithBadJSONForRoomVersion6 FAIL
TestOutboundFederationProfile FAIL
TestOutboundFederationProfile/Outbound_federation_can_query_profile_data FAIL
TestOutboundFederationSend FAIL
TestRemoteAliasRequestsUnderstandUnicode FAIL
TestRemotePngThumbnail FAIL
TestRemotePresence FAIL
TestRemoteTyping FAIL
TestRestrictedRoomsLocalJoin FAIL
TestRestrictedRoomsLocalJoin/Join_should_fail_initially PASS
TestRestrictedRoomsLocalJoin/Join_should_fail_when_left_allowed_room FAIL
TestRestrictedRoomsLocalJoin/Join_should_fail_with_mangled_join_rules PASS
TestRestrictedRoomsLocalJoin/Join_should_succeed_when_invited FAIL
TestRestrictedRoomsLocalJoin/Join_should_succeed_when_joined_to_allowed_room PASS
TestRestrictedRoomsLocalJoinInMSC3787Room FAIL
TestRestrictedRoomsLocalJoinInMSC3787Room/Join_should_fail_initially PASS
TestRestrictedRoomsLocalJoinInMSC3787Room/Join_should_fail_when_left_allowed_room FAIL
TestRestrictedRoomsLocalJoinInMSC3787Room/Join_should_fail_with_mangled_join_rules PASS
TestRestrictedRoomsLocalJoinInMSC3787Room/Join_should_succeed_when_invited FAIL
TestRestrictedRoomsLocalJoinInMSC3787Room/Join_should_succeed_when_joined_to_allowed_room PASS
TestRestrictedRoomsRemoteJoin FAIL
TestRestrictedRoomsRemoteJoin/Join_should_fail_initially PASS
TestRestrictedRoomsRemoteJoin/Join_should_fail_when_left_allowed_room FAIL
TestRestrictedRoomsRemoteJoin/Join_should_fail_with_mangled_join_rules PASS
TestRestrictedRoomsRemoteJoin/Join_should_succeed_when_invited FAIL
TestRestrictedRoomsRemoteJoin/Join_should_succeed_when_joined_to_allowed_room FAIL
TestRestrictedRoomsRemoteJoinFailOver FAIL
TestRestrictedRoomsRemoteJoinFailOverInMSC3787Room FAIL
TestRestrictedRoomsRemoteJoinInMSC3787Room FAIL
TestRestrictedRoomsRemoteJoinInMSC3787Room/Join_should_fail_initially PASS
TestRestrictedRoomsRemoteJoinInMSC3787Room/Join_should_fail_when_left_allowed_room FAIL
TestRestrictedRoomsRemoteJoinInMSC3787Room/Join_should_fail_with_mangled_join_rules PASS
TestRestrictedRoomsRemoteJoinInMSC3787Room/Join_should_succeed_when_invited FAIL
TestRestrictedRoomsRemoteJoinInMSC3787Room/Join_should_succeed_when_joined_to_allowed_room FAIL
TestRestrictedRoomsRemoteJoinLocalUser FAIL
TestRestrictedRoomsRemoteJoinLocalUserInMSC3787Room FAIL
TestRestrictedRoomsSpacesSummaryFederation FAIL
TestRestrictedRoomsSpacesSummaryLocal PASS
TestSendJoinPartialStateResponse FAIL
TestSyncOmitsStateChangeOnFilteredEvents FAIL
TestToDeviceMessagesOverFederation FAIL
TestToDeviceMessagesOverFederation/good_connectivity FAIL
TestToDeviceMessagesOverFederation/interrupted_connectivity FAIL
TestToDeviceMessagesOverFederation/stopped_server FAIL
TestUnbanViaInvite FAIL
TestUnknownEndpoints FAIL
TestUnknownEndpoints/Client-server_endpoints PASS
TestUnknownEndpoints/Key_endpoints FAIL
TestUnknownEndpoints/Media_endpoints PASS
TestUnknownEndpoints/Server-server_endpoints PASS
TestUnknownEndpoints/Unknown_prefix PASS
TestUnrejectRejectedEvents FAIL
TestUserAppearsInChangedDeviceListOnJoinOverFederation FAIL
TestWriteMDirectAccountData PASS
1 test result
2 GLOBAL FAIL
3 TestACLs FAIL
4 TestBannedUserCannotSendJoin FAIL
5 TestCannotSendKnockViaSendKnockInMSC3787Room FAIL
6 TestCannotSendNonJoinViaSendJoinV1 FAIL
7 TestCannotSendNonJoinViaSendJoinV2 FAIL
8 TestCannotSendNonKnockViaSendKnock FAIL
9 TestCannotSendNonLeaveViaSendLeaveV1 FAIL
10 TestCannotSendNonLeaveViaSendLeaveV2 FAIL
11 TestClientSpacesSummary FAIL
12 TestClientSpacesSummary/max_depth FAIL
13 TestClientSpacesSummary/pagination FAIL
14 TestClientSpacesSummary/query_whole_graph FAIL
15 TestClientSpacesSummary/redact_link FAIL
16 TestClientSpacesSummary/suggested_only FAIL
17 TestClientSpacesSummaryJoinRules FAIL
18 TestDeviceListsUpdateOverFederation FAIL
19 TestDeviceListsUpdateOverFederation/good_connectivity FAIL
20 TestDeviceListsUpdateOverFederation/interrupted_connectivity FAIL
21 TestDeviceListsUpdateOverFederation/stopped_server FAIL
22 TestDeviceListsUpdateOverFederationOnRoomJoin FAIL
23 TestEventAuth FAIL
24 TestFederatedClientSpaces FAIL
25 TestFederationKeyUploadQuery FAIL
26 TestFederationRedactSendsWithoutEvent FAIL
27 TestFederationRejectInvite FAIL
28 TestFederationRoomsInvite FAIL
29 TestFederationRoomsInvite/Parallel FAIL
30 TestFederationRoomsInvite/Parallel/Invited_user_can_reject_invite_over_federation FAIL
31 TestFederationRoomsInvite/Parallel/Invited_user_can_reject_invite_over_federation_for_empty_room FAIL
32 TestFederationRoomsInvite/Parallel/Invited_user_can_reject_invite_over_federation_several_times FAIL
33 TestFederationRoomsInvite/Parallel/Invited_user_has_'is_direct'_flag_in_prev_content_after_joining FAIL
34 TestFederationRoomsInvite/Parallel/Remote_invited_user_can_see_room_metadata FAIL
35 TestGetMissingEventsGapFilling FAIL
36 TestInboundCanReturnMissingEvents FAIL
37 TestInboundCanReturnMissingEvents/Inbound_federation_can_return_missing_events_for_invited_visibility FAIL
38 TestInboundCanReturnMissingEvents/Inbound_federation_can_return_missing_events_for_joined_visibility FAIL
39 TestInboundCanReturnMissingEvents/Inbound_federation_can_return_missing_events_for_shared_visibility FAIL
40 TestInboundCanReturnMissingEvents/Inbound_federation_can_return_missing_events_for_world_readable_visibility FAIL
41 TestInboundFederationKeys PASS
42 TestInboundFederationProfile FAIL
43 TestInboundFederationProfile/Inbound_federation_can_query_profile_data FAIL
44 TestInboundFederationProfile/Non-numeric_ports_in_server_names_are_rejected FAIL
45 TestInboundFederationRejectsEventsWithRejectedAuthEvents FAIL
46 TestIsDirectFlagFederation FAIL
47 TestIsDirectFlagLocal PASS
48 TestJoinFederatedRoomFailOver FAIL
49 TestJoinFederatedRoomFromApplicationServiceBridgeUser FAIL
50 TestJoinFederatedRoomFromApplicationServiceBridgeUser/join_remote_federated_room_as_application_service_user FAIL
51 TestJoinFederatedRoomWithUnverifiableEvents FAIL
52 TestJoinFederatedRoomWithUnverifiableEvents//send_join_response_missing_signatures_shouldn't_block_room_join FAIL
53 TestJoinFederatedRoomWithUnverifiableEvents//send_join_response_with_bad_signatures_shouldn't_block_room_join FAIL
54 TestJoinFederatedRoomWithUnverifiableEvents//send_join_response_with_state_with_unverifiable_auth_events_shouldn't_block_room_join FAIL
55 TestJoinFederatedRoomWithUnverifiableEvents//send_join_response_with_unobtainable_keys_shouldn't_block_room_join FAIL
56 TestJoinViaRoomIDAndServerName FAIL
57 TestJumpToDateEndpoint FAIL
58 TestJumpToDateEndpoint/parallel FAIL
59 TestJumpToDateEndpoint/parallel/federation FAIL
60 TestJumpToDateEndpoint/parallel/federation/can_paginate_after_getting_remote_event_from_timestamp_to_event_endpoint FAIL
61 TestJumpToDateEndpoint/parallel/federation/looking_backwards,_should_be_able_to_find_event_that_was_sent_before_we_joined FAIL
62 TestJumpToDateEndpoint/parallel/federation/looking_forwards,_should_be_able_to_find_event_that_was_sent_before_we_joined FAIL
63 TestJumpToDateEndpoint/parallel/federation/when_looking_backwards_before_the_room_was_created,_should_be_able_to_find_event_that_was_imported FAIL
64 TestJumpToDateEndpoint/parallel/should_find_event_after_given_timestmap FAIL
65 TestJumpToDateEndpoint/parallel/should_find_event_before_given_timestmap FAIL
66 TestJumpToDateEndpoint/parallel/should_find_next_event_topologically_after_given_timestmap_when_all_message_timestamps_are_the_same FAIL
67 TestJumpToDateEndpoint/parallel/should_find_next_event_topologically_before_given_timestamp_when_all_message_timestamps_are_the_same FAIL
68 TestJumpToDateEndpoint/parallel/should_find_nothing_after_the_latest_timestmap PASS
69 TestJumpToDateEndpoint/parallel/should_find_nothing_before_the_earliest_timestmap PASS
70 TestJumpToDateEndpoint/parallel/should_not_be_able_to_query_a_private_room_you_are_not_a_member_of FAIL
71 TestJumpToDateEndpoint/parallel/should_not_be_able_to_query_a_public_room_you_are_not_a_member_of FAIL
72 TestKnockRoomsInPublicRoomsDirectory FAIL
73 TestKnockRoomsInPublicRoomsDirectoryInMSC3787Room FAIL
74 TestKnocking FAIL
75 TestKnockingInMSC3787Room FAIL
76 TestLocalPngThumbnail PASS
77 TestMediaFilenames FAIL
78 TestMediaFilenames/Parallel FAIL
79 TestMediaFilenames/Parallel/ASCII FAIL
80 TestMediaFilenames/Parallel/ASCII/Can_download_file_'ascii' FAIL
81 TestMediaFilenames/Parallel/ASCII/Can_download_file_'name;with;semicolons' FAIL
82 TestMediaFilenames/Parallel/ASCII/Can_download_file_'name_with_spaces' FAIL
83 TestMediaFilenames/Parallel/ASCII/Can_download_specifying_a_different_ASCII_file_name PASS
84 TestMediaFilenames/Parallel/ASCII/Can_upload_with_ASCII_file_name PASS
85 TestMediaFilenames/Parallel/Unicode FAIL
86 TestMediaFilenames/Parallel/Unicode/Can_download_specifying_a_different_Unicode_file_name FAIL
87 TestMediaFilenames/Parallel/Unicode/Can_download_with_Unicode_file_name_locally FAIL
88 TestMediaFilenames/Parallel/Unicode/Can_download_with_Unicode_file_name_over_federation FAIL
89 TestMediaFilenames/Parallel/Unicode/Can_upload_with_Unicode_file_name PASS
90 TestMediaFilenames/Parallel/Unicode/Will_serve_safe_media_types_as_inline SKIP
91 TestMediaFilenames/Parallel/Unicode/Will_serve_safe_media_types_with_parameters_as_inline SKIP
92 TestMediaFilenames/Parallel/Unicode/Will_serve_unsafe_media_types_as_attachments SKIP
93 TestMediaWithoutFileName FAIL
94 TestMediaWithoutFileName/parallel FAIL
95 TestMediaWithoutFileName/parallel/Can_download_without_a_file_name_locally PASS
96 TestMediaWithoutFileName/parallel/Can_download_without_a_file_name_over_federation FAIL
97 TestMediaWithoutFileName/parallel/Can_upload_without_a_file_name PASS
98 TestNetworkPartitionOrdering FAIL
99 TestOutboundFederationIgnoresMissingEventWithBadJSONForRoomVersion6 FAIL
100 TestOutboundFederationProfile FAIL
101 TestOutboundFederationProfile/Outbound_federation_can_query_profile_data FAIL
102 TestOutboundFederationSend FAIL
103 TestRemoteAliasRequestsUnderstandUnicode FAIL
104 TestRemotePngThumbnail FAIL
105 TestRemotePresence FAIL
106 TestRemoteTyping FAIL
107 TestRestrictedRoomsLocalJoin FAIL
108 TestRestrictedRoomsLocalJoin/Join_should_fail_initially PASS
109 TestRestrictedRoomsLocalJoin/Join_should_fail_when_left_allowed_room FAIL
110 TestRestrictedRoomsLocalJoin/Join_should_fail_with_mangled_join_rules PASS
111 TestRestrictedRoomsLocalJoin/Join_should_succeed_when_invited FAIL
112 TestRestrictedRoomsLocalJoin/Join_should_succeed_when_joined_to_allowed_room PASS
113 TestRestrictedRoomsLocalJoinInMSC3787Room FAIL
114 TestRestrictedRoomsLocalJoinInMSC3787Room/Join_should_fail_initially PASS
115 TestRestrictedRoomsLocalJoinInMSC3787Room/Join_should_fail_when_left_allowed_room FAIL
116 TestRestrictedRoomsLocalJoinInMSC3787Room/Join_should_fail_with_mangled_join_rules PASS
117 TestRestrictedRoomsLocalJoinInMSC3787Room/Join_should_succeed_when_invited FAIL
118 TestRestrictedRoomsLocalJoinInMSC3787Room/Join_should_succeed_when_joined_to_allowed_room PASS
119 TestRestrictedRoomsRemoteJoin FAIL
120 TestRestrictedRoomsRemoteJoin/Join_should_fail_initially PASS
121 TestRestrictedRoomsRemoteJoin/Join_should_fail_when_left_allowed_room FAIL
122 TestRestrictedRoomsRemoteJoin/Join_should_fail_with_mangled_join_rules PASS
123 TestRestrictedRoomsRemoteJoin/Join_should_succeed_when_invited FAIL
124 TestRestrictedRoomsRemoteJoin/Join_should_succeed_when_joined_to_allowed_room FAIL
125 TestRestrictedRoomsRemoteJoinFailOver FAIL
126 TestRestrictedRoomsRemoteJoinFailOverInMSC3787Room FAIL
127 TestRestrictedRoomsRemoteJoinInMSC3787Room FAIL
128 TestRestrictedRoomsRemoteJoinInMSC3787Room/Join_should_fail_initially PASS
129 TestRestrictedRoomsRemoteJoinInMSC3787Room/Join_should_fail_when_left_allowed_room FAIL
130 TestRestrictedRoomsRemoteJoinInMSC3787Room/Join_should_fail_with_mangled_join_rules PASS
131 TestRestrictedRoomsRemoteJoinInMSC3787Room/Join_should_succeed_when_invited FAIL
132 TestRestrictedRoomsRemoteJoinInMSC3787Room/Join_should_succeed_when_joined_to_allowed_room FAIL
133 TestRestrictedRoomsRemoteJoinLocalUser FAIL
134 TestRestrictedRoomsRemoteJoinLocalUserInMSC3787Room FAIL
135 TestRestrictedRoomsSpacesSummaryFederation FAIL
136 TestRestrictedRoomsSpacesSummaryLocal PASS
137 TestSendJoinPartialStateResponse FAIL
138 TestSyncOmitsStateChangeOnFilteredEvents FAIL
139 TestToDeviceMessagesOverFederation FAIL
140 TestToDeviceMessagesOverFederation/good_connectivity FAIL
141 TestToDeviceMessagesOverFederation/interrupted_connectivity FAIL
142 TestToDeviceMessagesOverFederation/stopped_server FAIL
143 TestUnbanViaInvite FAIL
144 TestUnknownEndpoints FAIL
145 TestUnknownEndpoints/Client-server_endpoints PASS
146 TestUnknownEndpoints/Key_endpoints FAIL
147 TestUnknownEndpoints/Media_endpoints PASS
148 TestUnknownEndpoints/Server-server_endpoints PASS
149 TestUnknownEndpoints/Unknown_prefix PASS
150 TestUnrejectRejectedEvents FAIL
151 TestUserAppearsInChangedDeviceListOnJoinOverFederation FAIL
152 TestWriteMDirectAccountData PASS

View file

@ -14,6 +14,11 @@
let
# Keep sorted
mkScope = pkgs: pkgs.lib.makeScope pkgs.newScope (self: {
complement = self.callPackage ./nix/pkgs/complement {};
complement-grapevine-oci-image =
self.callPackage ./nix/pkgs/complement-grapevine-oci-image { };
craneLib =
(inputs.crane.mkLib pkgs).overrideToolchain self.toolchain;
@ -48,6 +53,9 @@
{
packages = {
default = (mkScope pkgs).default;
complement = (mkScope pkgs).complement;
complement-grapevine-oci-image =
(mkScope pkgs).complement-grapevine-oci-image;
}
//
builtins.listToAttrs

View file

@ -0,0 +1,32 @@
# this config file is processed with envsubst before being loaded
server_name = "$SERVER_NAME"
allow_registration = true
[federation]
trusted_servers = []
[database]
backend = "rocksdb"
path = "/app/db"
[observability.logs]
filter = "debug,h2=warn,hyper=warn"
# ansi escapes can make it hard to read the log files in an editor
colors = false
[tls]
certs = "/app/grapevine.crt"
key = "/app/grapevine.key"
[[listen]]
type = "tcp"
address = "0.0.0.0"
port = 8008
[[listen]]
type = "tcp"
address = "0.0.0.0"
port = 8448
tls = true

View file

@ -0,0 +1,67 @@
# Keep sorted
{ buildEnv
, coreutils
, default
, dockerTools
, envsubst
, moreutils
, openssl
, writeShellScript
, writeTextDir
}:
dockerTools.buildImage {
name = "complement-grapevine";
copyToRoot = buildEnv {
name = "image-root";
paths = [
(writeTextDir "app/config.toml" (builtins.readFile ./config.toml))
coreutils
default
moreutils
envsubst
openssl
];
pathsToLink = [ "/bin" "/app" ];
};
config = {
ExposedPorts = {
"8008/tcp" = {};
"8448/tcp" = {};
};
Cmd = [
(writeShellScript "docker-entrypoint.sh" ''
set -euo pipefail
mkdir -p /tmp
# trust certs signed by the complement test CA
mkdir -p /etc/ca-certificates
cp /complement/ca/ca.crt /etc/ca-certificates/
# sign our TLS cert with the complement test CA
openssl genrsa \
-out /app/grapevine.key \
2048
openssl req -new \
-sha256 \
-key /app/grapevine.key \
-subj "/CN=$SERVER_NAME" \
-out /app/grapevine.csr
openssl x509 -req \
-in /app/grapevine.csr \
-CA /complement/ca/ca.crt \
-CAkey /complement/ca/ca.key \
-CAcreateserial \
-out /app/grapevine.crt \
-days 365 \
-sha256
envsubst --no-unset < /app/config.toml | sponge /app/config.toml
grapevine --config /app/config.toml
'')
];
};
}

View file

@ -0,0 +1,33 @@
# Dependencies (keep sorted)
{ buildGoModule
, fetchFromGitHub
, lib
, olm
}:
buildGoModule {
name = "complement";
src = fetchFromGitHub {
owner = "matrix-org";
repo = "complement";
rev = "8587fb3cbe746754b2c883ff6c818ca4d987d0a5";
hash = "sha256-cie+b5N/TQAFD8vF/XbqfyFJkFU0qUPDbtJQDm/TfQc=";
};
vendorHash = "sha256-GyvxXUOoXnRebfdgZgTdg34/zKvWmf0igOfblho9OTc=";
buildInputs = [ olm ];
doCheck = false;
postBuild = ''
# compiles the tests into a binary
go test -c ./tests -o "$GOPATH/bin/complement.test"
'';
meta = {
description = "Matrix compliance test suite";
homepage = "https://github.com/matrix-org/complement";
license = lib.licenses.asl20;
};
}

View file

@ -86,6 +86,7 @@ let
"Cargo.lock"
"Cargo.toml"
"src"
"xtask"
];
};

View file

@ -1,8 +1,11 @@
# Keep sorted
{ default
, engage
, complement
, go
, inputs
, jq
, lib
, lychee
, markdownlint-cli
, mdbook
@ -17,6 +20,13 @@ mkShell {
# sources, and it can read this environment variable to do so. The
# `rust-src` component is required in order for this to work.
RUST_SRC_PATH = "${toolchain}/lib/rustlib/src/rust/library";
# See the doc comment on `use_xtask_path` in `xtask/src/main.rs`.
GRAPEVINE_XTASK_PATH = lib.makeBinPath [
# Keep sorted
complement
go
];
};
# Development tools

21
xtask/Cargo.toml Normal file
View file

@ -0,0 +1,21 @@
[package]
name = "xtask"
version = "0.1.0"
edition = "2021"
license.workspace = true
rust-version.workspace = true
[dependencies]
clap.workspace = true
miette.workspace = true
process-wrap.workspace = true
indicatif.workspace = true
rand.workspace = true
serde.workspace = true
serde_json.workspace = true
signal-hook.workspace = true
strum.workspace = true
xshell.workspace = true
[lints]
workspace = true

142
xtask/src/complement.rs Normal file
View file

@ -0,0 +1,142 @@
use std::{
fs::{self},
path::{Path, PathBuf},
};
use miette::{miette, IntoDiagnostic, LabeledSpan, Result, WrapErr};
use serde::Deserialize;
use xshell::{cmd, Shell};
mod docker;
mod summary;
mod test2json;
use self::{
docker::{load_docker_image, retag_docker_image},
summary::{compare_summary, read_summary},
test2json::{count_complement_tests, run_complement},
};
/// Runs complement tests, writes results to an output directory, and compares
/// results with a baseline.
///
/// The output directory structure is
///
/// - `$out/summary.tsv`: a TSV file with the pass/fail/skip result for each
/// test
///
/// - `$out/raw-log.jsonl`: raw output of the go test2json tool
///
/// - `$out/logs/...`: a text file named `$test.txt` for each test, containing
/// the test logs.
///
/// These files will be updated incrementally during the test run. When the run
/// the complete, the wrapper compares the results in `$out/summary.tsv`
/// against the baseline result. If there are any differences, it exits with an
/// error.
///
/// The expected workflow is to run this after making changes to Grapevine, to
/// look for regressions in tests that were previously passing. If you make
/// change that fix an existing failing test, you need to make sure that they
/// did not introduce any regressions, and then copy the `summary.tsv` file from
/// your test run over the existing `complement-baseline.tsv` file in the
/// repository root. The intent is that `complement-baseline.tsv` should always
/// be in sync with the expected results from a test run.
#[derive(clap::Args)]
pub(crate) struct Args {
/// Directory to write test results
///
/// This directory will be created automatically, but it must be empty.
/// If it exists and is not empty, an error will be returned.
#[clap(short, long)]
out: PathBuf,
/// Baseline test summary file to compare with
///
/// If unspecified, defaults to `$repo_root/complement-baseline.tsv`
#[clap(short, long)]
baseline: Option<PathBuf>,
}
#[allow(clippy::needless_pass_by_value)]
pub(crate) fn main(args: Args, sh: &Shell) -> Result<()> {
let toplevel = get_toplevel_path(sh)
.wrap_err("failed to determine repository root directory")?;
let baseline_path = args
.baseline
.unwrap_or_else(|| toplevel.join("complement-baseline.tsv"));
let baseline = read_summary(&baseline_path).wrap_err_with(|| {
format!(
"failed to read baseline test result summary from \
{baseline_path:?}"
)
})?;
create_out_dir(&args.out).wrap_err_with(|| {
format!("error initializing output directory {:?}", args.out)
})?;
let docker_image = load_docker_image(sh, &toplevel).wrap_err(
"failed to build and load complement-grapevine docker image",
)?;
let docker_image = retag_docker_image(sh, &docker_image)
.wrap_err("failed to retag docker image")?;
let test_count = count_complement_tests(sh, &docker_image)
.wrap_err("failed to determine total complement test count")?;
let results = run_complement(sh, &args.out, &docker_image, test_count)
.wrap_err("failed to run complement tests")?;
let summary_path = args.out.join("summary.tsv");
compare_summary(&baseline, &results, &baseline_path, &summary_path)?;
println!("\nTest results were identical to baseline.");
Ok(())
}
/// Deserialize a single-line json string using [`serde_json::from_str`] and
/// convert the error to a miette diagnostic.
///
/// # Panics
/// Panics if `line` contains a newline.
fn from_json_line<'a, T: Deserialize<'a>>(line: &'a str) -> Result<T> {
assert!(
!line.contains('\n'),
"from_json_line requires single-line json source"
);
serde_json::from_str(line).map_err(|e| {
// Needs single-line input so that we don't have to deal with converting
// line/column to a span offset.
let offset = e.column() - 1;
let label = LabeledSpan::at_offset(offset, "error here");
miette!(labels = vec![label], "{e}").with_source_code(line.to_owned())
})
}
/// Ensures that output directory exists and is empty
///
/// If the directory does not exist, it will be created. If it is not empty, an
/// error will be returned.
///
/// We have no protection against concurrent programs modifying the contents of
/// the directory while the complement wrapper tool is running.
fn create_out_dir(out: &Path) -> Result<()> {
fs::create_dir_all(out)
.into_diagnostic()
.wrap_err("error creating output directory")?;
let mut entries = fs::read_dir(out)
.into_diagnostic()
.wrap_err("error checking current contents of output directory")?;
if entries.next().is_some() {
return Err(miette!(
"output directory is not empty. Refusing to run, instead of \
possibly overwriting existing files."
));
}
fs::create_dir(out.join("logs"))
.into_diagnostic()
.wrap_err("error creating logs subdirectory in output directory")?;
Ok(())
}
/// Returns the path to the repository root
fn get_toplevel_path(sh: &Shell) -> Result<PathBuf> {
let path =
cmd!(sh, "git rev-parse --show-toplevel").read().into_diagnostic()?;
Ok(path.into())
}

View file

@ -0,0 +1,133 @@
//! Functions for working with docker images and containers.
use std::path::Path;
use miette::{miette, IntoDiagnostic, LabeledSpan, Result, WrapErr};
use rand::{distributions::Alphanumeric, thread_rng, Rng};
use serde::Deserialize;
use xshell::{cmd, Shell};
use super::from_json_line;
/// Build the 'grapevine-complement' OCI image and load it into the docker
/// daemon.
pub(crate) fn load_docker_image(sh: &Shell, toplevel: &Path) -> Result<String> {
// > i would Not trust that parser as far as i can throw it
// - @jade_:matrix.org, 2024-06-19
//
// So we're not even gonna try to escape the arbitrary top level path
// correctly for a flake installable reference. Instead we're just gonna cd
// into toplevel before running nix commands.
let _pushd_guard = sh.push_dir(toplevel);
let installable = ".#complement-grapevine-oci-image";
cmd!(sh, "nix-build-and-cache just {installable} -- --no-link")
.run()
.into_diagnostic()
.wrap_err("error building complement-grapevine-oci-image")?;
let oci_image_path = cmd!(sh, "nix path-info {installable}")
.read()
.into_diagnostic()
.wrap_err(
"error getting nix store path for complement-grapevine-oci-image",
)?;
// Instead of building the image with a fixed tag, we let nix choose the tag
// based on the input hash, and then determine the image/tag it used by
// parsing the 'docker load' output. This is to avoid a race condition
// between multiple concurrent 'xtask complement' invocations, which might
// otherwise assign the same tag to different images.
let load_output = cmd!(sh, "docker image load --input {oci_image_path}")
.read()
.into_diagnostic()
.wrap_err("error loading complement-grapevine docker image")?;
let expected_prefix = "Loaded image: ";
let docker_image = load_output
.strip_prefix(expected_prefix)
.ok_or_else(|| {
// Miette doesn't support inclusive ranges.
// <https://github.com/zkat/miette/pull/385>
#[allow(clippy::range_plus_one)]
let span = 0..(expected_prefix.len().min(load_output.len()) + 1);
let label =
LabeledSpan::at(span, format!("Expected {expected_prefix:?}"));
miette!(
labels = vec![label],
"failed to parse 'docker image load' output"
)
.with_source_code(load_output.clone())
})?
.to_owned();
Ok(docker_image)
}
/// Retags the docker image with a random tag. Returns the new image reference.
///
/// This is useful so that we can uniquely identify the set of docker containers
/// spawned by a complement run. Without using a unique tag, there is no way to
/// determine which docker containers to kill if a run is cancelled, since other
/// concurrent complement runs may have created containers with the same image.
pub(crate) fn retag_docker_image(sh: &Shell, image: &str) -> Result<String> {
let mut rng = thread_rng();
let new_tag: String =
(0..16).map(|_| char::from(rng.sample(Alphanumeric))).collect();
let (repo, _old_tag) = image.split_once(':').ok_or_else(|| {
miette!(
"Docker image reference was not in the expected format. Expected \
\"{{repository}}:{{tag}}\", got {image:?}"
)
})?;
let new_image = format!("{repo}:{new_tag}");
cmd!(sh, "docker image tag {image} {new_image}").run().into_diagnostic()?;
Ok(new_image)
}
/// Kills all docker containers using a particular image.
///
/// This can be used to clean up dangling docker images after a cancelled
/// complement run, but it's important that the image reference be unique. See
/// the [`retag_docker_image`] function for a discussion of this.
pub(crate) fn kill_docker_containers(sh: &Shell, image: &str) -> Result<()> {
#[derive(Deserialize)]
struct ContainerInfo {
#[serde(rename = "ID")]
id: String,
#[serde(rename = "Image")]
image: String,
}
// --filter ancestor={image} doesn't work here, because images with the same
// image id will be picked up even if their image reference (repo:tag) are
// different. We need to list all the containers and filter them ourselves.
let containers = cmd!(sh, "docker container ls --format json")
.read()
.into_diagnostic()
.wrap_err("error listing running docker containers")?;
let containers = containers
.lines()
.map(from_json_line)
.collect::<Result<Vec<ContainerInfo>, _>>()
.wrap_err(
"error parsing docker container info from 'docker container ls' \
output",
)?;
let our_containers = containers
.into_iter()
.filter(|container| container.image == image)
.map(|container| container.id)
.collect::<Vec<_>>();
if !our_containers.is_empty() {
// Ignore non-zero exit status because 'docker kill' will fail if
// containers already exited before sending the signal, which is
// fine.
cmd!(sh, "docker kill --signal=SIGKILL {our_containers...}")
.ignore_status()
.run()
.into_diagnostic()
.wrap_err("error killing docker containers")?;
}
Ok(())
}

View file

@ -0,0 +1,286 @@
//! Functions for working with the `summary.tsv` files emitted by the complement
//! wrapper.
//!
//! This file is a TSV containing test names and results for each test in a
//! complement run.
use std::{
collections::BTreeMap,
fs,
io::{BufWriter, Write},
path::Path,
};
use miette::{
miette, IntoDiagnostic, LabeledSpan, NamedSource, Result, WrapErr,
};
use super::test2json::TestResult;
pub(crate) type TestResults = BTreeMap<String, TestResult>;
/// Escape a string value for use in a TSV file.
///
/// According to the [tsv spec][1], the only characters that need to be escaped
/// are `\n`, `\t`, `\r`, and `\`.
///
/// [1]: https://www.loc.gov/preservation/digital/formats/fdd/fdd000533.shtml
fn escape_tsv_value(value: &str) -> String {
value
.replace('\\', "\\\\")
.replace('\n', "\\n")
.replace('\t', "\\t")
.replace('\r', "\\r")
}
/// Converts a string from a TSV value from to unescaped form.
fn unescape_tsv_value(value: &str) -> String {
let mut chars = value.chars();
let mut out = String::new();
while let Some(c) = chars.next() {
if c == '\\' {
match chars.next() {
Some('\\') => out.push('\\'),
Some('n') => out.push('\n'),
Some('t') => out.push('\t'),
Some('r') => out.push('\r'),
Some(c2) => {
out.push(c);
out.push(c2);
}
None => out.push(c),
}
} else {
out.push(c);
}
}
out
}
/// Write a test result summary to a writer.
pub(crate) fn write_summary<W: Write>(
w: &mut BufWriter<W>,
summary: &TestResults,
) -> Result<()> {
// Write header line
writeln!(w, "test\tresult").into_diagnostic()?;
// Write rows
for (test, result) in summary {
writeln!(
w,
"{}\t{}",
escape_tsv_value(test),
escape_tsv_value(&result.to_string())
)
.into_diagnostic()?;
}
Ok(())
}
/// Reads test result summary from a TSV file written by a previous run of the
/// complement wrapper.
pub(crate) fn read_summary(
path: &Path,
) -> Result<BTreeMap<String, TestResult>> {
let contents = fs::read_to_string(path)
.into_diagnostic()
.wrap_err("failed to read summary file contents")?;
let source = NamedSource::new(path.to_string_lossy(), contents);
let contents = &source.inner();
let mut offset = 0;
// The TSV spec allows CRLF, but we never emit these ourselves
let mut lines = contents.split('\n');
let header_line = lines.next().ok_or_else(|| {
miette!(
labels = vec![LabeledSpan::at_offset(0, "expected header row")],
"summary file missing header row",
)
.with_source_code(source.clone())
})?;
let expected_header_line = "test\tresult";
if header_line != expected_header_line {
return Err(miette!(
labels = vec![LabeledSpan::at(
0..header_line.len(),
"unexpected header"
)],
"summary file header row has unexpected columns. Expecting \
{expected_header_line:?}."
)
.with_source_code(source));
}
offset += header_line.len() + 1;
let mut results = BTreeMap::new();
for line in lines {
if line.is_empty() {
continue;
}
let tabs = line.match_indices('\t').collect::<Vec<_>>();
let column_count = tabs.len() + 1;
let (result_span, test, result) = match tabs[..] {
[(first_tab, _)] => {
let result_span = offset + first_tab + 1..offset + line.len();
let test = line.get(..first_tab).expect(
"index should be valid because it was returned from \
'match_indices'",
);
let result = line.get(first_tab + 1..).expect(
"index should be valid because it was returned from \
'match_indices'",
);
(result_span, test, result)
}
[] => {
return Err(miette!(
labels = vec![LabeledSpan::at_offset(
offset + line.len(),
"expected more columns here"
)],
"each row in the summary file should have exactly two \
columns. This row only has {column_count} columns.",
)
.with_source_code(source))
}
[_, (first_bad_tab, _), ..] => {
let span = offset + first_bad_tab..offset + line.len();
return Err(miette!(
labels =
vec![LabeledSpan::at(span, "unexpected extra columns")],
"each row in the summary file should have exactly two \
columns. This row has {column_count} columns.",
)
.with_source_code(source));
}
};
let test = unescape_tsv_value(test);
let result = unescape_tsv_value(result);
let result = result.parse().map_err(|_| {
miette!(
labels =
vec![LabeledSpan::at(result_span, "invalid result value")],
"test result value must be one of 'PASS', 'FAIL', or 'SKIP'."
)
.with_source_code(source.clone())
})?;
results.insert(test, result);
offset += line.len() + 1;
}
Ok(results)
}
/// Print a bulleted list of test names, truncating if there are too many.
fn print_truncated_tests(tests: &[&str]) {
let max = 5;
for test in &tests[..max.min(tests.len())] {
println!(" - {test}");
}
if tests.len() > max {
println!(" ... ({} more)", tests.len() - max);
}
}
/// Compares new test results against older results, returning a error if they
/// differ.
///
/// A description of the differences will be logged separately from the returned
/// error.
pub(crate) fn compare_summary(
old: &TestResults,
new: &TestResults,
old_path: &Path,
new_path: &Path,
) -> Result<()> {
let mut unexpected_pass: Vec<&str> = Vec::new();
let mut unexpected_fail: Vec<&str> = Vec::new();
let mut unexpected_skip: Vec<&str> = Vec::new();
let mut added: Vec<&str> = Vec::new();
let mut removed: Vec<&str> = Vec::new();
for (test, new_result) in new {
if let Some(old_result) = old.get(test) {
if old_result != new_result {
match new_result {
TestResult::Pass => unexpected_pass.push(test),
TestResult::Fail => unexpected_fail.push(test),
TestResult::Skip => unexpected_skip.push(test),
}
}
} else {
added.push(test);
}
}
for test in old.keys() {
if !new.contains_key(test) {
removed.push(test);
}
}
let mut differences = false;
if !added.is_empty() {
differences = true;
println!(
"\n{} tests were added that were not present in the baseline:",
added.len()
);
print_truncated_tests(&added);
}
if !removed.is_empty() {
differences = true;
println!(
"\n{} tests present in the baseline were removed:",
removed.len()
);
print_truncated_tests(&removed);
}
if !unexpected_pass.is_empty() {
differences = true;
println!(
"\n{} tests passed that did not pass in the baseline:",
unexpected_pass.len()
);
print_truncated_tests(&unexpected_pass);
}
if !unexpected_skip.is_empty() {
differences = true;
println!(
"\n{} tests skipped that were not skipped in the baseline:",
unexpected_skip.len()
);
print_truncated_tests(&unexpected_skip);
}
if !unexpected_fail.is_empty() {
differences = true;
println!(
"\n{} tests failed that did not fail in the baseline (these are \
likely regressions):",
unexpected_fail.len()
);
print_truncated_tests(&unexpected_fail);
}
if differences {
Err(miette!(
help = format!(
"Evaluate each of the differences to determine whether they \
are expected. If all differences are expected, copy the new \
summary file {new_path:?} to {old_path:?} and commit the \
change. If some differences are unexpected, fix them and try \
another test run.\n\nAn example of an expected change would \
be a test that is now passing after your changes fixed it. \
An example of an unexpected change would be an unrelated \
test that is now failing, which would be a regression."
),
"Test results differed from baseline in {old_path:?}. The \
differences are described above."
))
} else {
Ok(())
}
}

View file

@ -0,0 +1,514 @@
//! Functions for working with the go [`test2json`][test2json] tool.
//!
//! [test2json]: https://pkg.go.dev/cmd/test2json@go1.22.4
use std::{
collections::BTreeMap,
fs::{self, File},
io::{BufRead, BufReader, BufWriter, Seek, SeekFrom, Write},
mem, panic,
path::{Path, PathBuf},
process::{Command, Stdio},
sync::{
atomic::{AtomicBool, Ordering},
Arc, Mutex,
},
thread,
time::Duration,
};
use indicatif::{ProgressBar, ProgressStyle};
use miette::{miette, IntoDiagnostic, Result, WrapErr};
use process_wrap::std::{ProcessGroup, StdChildWrapper, StdCommandWrap};
use serde::Deserialize;
use signal_hook::{
consts::signal::{SIGINT, SIGQUIT, SIGTERM},
flag,
iterator::Signals,
};
use strum::{Display, EnumString};
use xshell::{cmd, Shell};
use super::{
docker::kill_docker_containers,
from_json_line,
summary::{write_summary, TestResults},
};
/// Returns the total number of complement tests that will be run
///
/// This is only able to count toplevel tests, and will not included subtests
/// (`A/B`)
pub(crate) fn count_complement_tests(
sh: &Shell,
docker_image: &str,
) -> Result<u64> {
let test_list = cmd!(sh, "go tool test2json complement.test -test.list .*")
.env("COMPLEMENT_BASE_IMAGE", docker_image)
.read()
.into_diagnostic()?;
let test_count = u64::try_from(test_list.lines().count())
.into_diagnostic()
.wrap_err("test count overflowed u64")?;
Ok(test_count)
}
/// Run complement tests.
///
/// This function mostly deals with handling shutdown signals, while the actual
/// logic for running complement is in `run_complement_inner`, which is spawned
/// as a separate thread. This is necessary because the go `test2json` tool
/// ignores SIGTERM and SIGINT. Without signal handling on our end, terminating
/// the complement wrapper process would leave a dangling complement child
/// process running.
///
/// The reason that `test2json` does this is that it does not implement any kind
/// of test cleanup, and so the developers decided that ignoring termination
/// signals entirely was safer. Running go unit tests outside of `test2json`
/// (and so without machine-readable output) does not have this limitation.
/// Unfortunately neither of these are an option for us. We need
/// machine-readable output to compare against the baseline result. Complement
/// runs can take 40+ minutes, so being able to cancel them is a requirement.
///
/// Because we don't trigger any of the normal cleanup, we need to handle
/// dangling docker containers ourselves.
pub(crate) fn run_complement(
sh: &Shell,
out: &Path,
docker_image: &str,
test_count: u64,
) -> Result<TestResults> {
let term_signals = [SIGTERM, SIGINT, SIGQUIT];
let term_now = Arc::new(AtomicBool::new(false));
for sig in &term_signals {
// Terminate immediately if `term_now` is true and we receive a
// terminating signal
flag::register_conditional_shutdown(*sig, 1, Arc::clone(&term_now))
.into_diagnostic()
.wrap_err("error registering signal handler")?;
}
let mut signals = Signals::new(term_signals).unwrap();
let state = Mutex::new(ComplementRunnerState::Startup);
let signals_handle = signals.handle();
let result = thread::scope(|s| {
let state_ref = &state;
let cloned_sh = sh.clone();
let thread_handle = s.spawn(move || {
let panic_result = panic::catch_unwind(|| {
run_complement_inner(
&cloned_sh,
out,
docker_image,
test_count,
state_ref,
)
});
// Stop the signal-handling loop, even if we panicked
signals_handle.close();
match panic_result {
Ok(result) => result,
Err(panic) => panic::resume_unwind(panic),
}
});
let canceled = if let Some(signal) = signals.forever().next() {
let description = match signal {
SIGTERM => "SIGTERM",
SIGINT => "ctrl+c",
SIGQUIT => "SIGQUIT",
_ => unreachable!(),
};
eprintln!(
"Received {description}, stopping complement run. Send \
{description} a second time to terminate without cleaning \
up, which may leave dangling processes and docker containers"
);
term_now.store(true, Ordering::Relaxed);
{
let mut state = state.lock().unwrap();
let old_state =
mem::replace(&mut *state, ComplementRunnerState::Shutdown);
match old_state {
ComplementRunnerState::Startup => (),
ComplementRunnerState::Shutdown => unreachable!(),
ComplementRunnerState::Running(mut child) => {
// Killing the child process should terminate the
// complement runner thread in a
// bounded amount of time, because it will cause the
// stdout reader to return EOF.
child.kill().unwrap();
}
}
}
kill_docker_containers(sh, docker_image).wrap_err(
"failed to kill dangling complement docker containers",
)?;
true
} else {
// hit this branch if the signal handler is closed by the complement
// runner thread. This means the complement run finished
// without being canceled.
false
};
match thread_handle.join() {
Ok(result) => {
if canceled {
Err(miette!("complement run was canceled"))
} else {
result
}
}
Err(panic_value) => panic::resume_unwind(panic_value),
}
});
// From this point on, terminate immediately when signalled
term_now.store(true, Ordering::Relaxed);
result
}
/// Possible states for the complement runner thread.
///
/// The current state should be protected by a mutex, where state changes are
/// only performed while the mutex is locked. This is to prevent a race
/// condition where the main thread handles a shutdown signal at the same time
/// that the complement runner thread is starting the child process, and so the
/// main thread fails to kill the child process.
///
/// Valid state transitions:
///
/// - `Startup` -> `Running`
/// - `Startup` -> `Shutdown`
/// - `Running` -> `Shutdown`
#[derive(Debug)]
enum ComplementRunnerState {
/// The complement child process has not been started yet
Startup,
/// The complement child process is running, and we have not yet received
/// a shutdown signal.
Running(Box<dyn StdChildWrapper>),
/// We have received a shutdown signal.
Shutdown,
}
/// Spawn complement chind process and handle it's output
///
/// This is the "complement runner" thread, spawned by the [`run_complement`]
/// function.
fn run_complement_inner(
sh: &Shell,
out: &Path,
docker_image: &str,
test_count: u64,
state: &Mutex<ComplementRunnerState>,
) -> Result<TestResults> {
let cmd = cmd!(sh, "go tool test2json complement.test -test.v=test2json")
.env("COMPLEMENT_BASE_IMAGE", docker_image)
.env("COMPLEMENT_SPAWN_HS_TIMEOUT", "5")
.env("COMPLEMENT_ALWAYS_PRINT_SERVER_LOGS", "1");
eprintln!("$ {cmd}");
let stdout = {
let mut state = state.lock().unwrap();
match &*state {
ComplementRunnerState::Startup => (),
ComplementRunnerState::Running(_) => unreachable!(),
ComplementRunnerState::Shutdown => {
return Err(miette!("complement run was canceled"))
}
}
let mut cmd = Command::from(cmd);
cmd.stdout(Stdio::piped());
let mut child = StdCommandWrap::from(cmd)
.wrap(ProcessGroup::leader())
.spawn()
.into_diagnostic()
.wrap_err("error spawning complement process")?;
let stdout = child.stdout().take().expect(
"child process spawned with piped stdout should have stdout",
);
*state = ComplementRunnerState::Running(child);
stdout
};
let lines = BufReader::new(stdout).lines();
let mut ctx = TestContext::new(out, test_count)?;
for line in lines {
let line = line
.into_diagnostic()
.wrap_err("error reading output from complement process")?;
ctx.handle_line(&line)?;
}
ctx.finish()
}
/// Schema from <https://pkg.go.dev/cmd/test2json#hdr-Output_Format>
///
/// Only the fields that we need are included here.
#[derive(Deserialize)]
#[serde(
rename_all = "snake_case",
rename_all_fields = "PascalCase",
tag = "Action"
)]
enum GoTestEvent {
Run {
test: Option<String>,
},
Pass {
test: Option<String>,
},
Fail {
test: Option<String>,
},
Skip {
test: Option<String>,
},
Output {
test: Option<String>,
output: String,
},
#[serde(other)]
OtherAction,
}
#[derive(Copy, Clone, Display, EnumString, Eq, PartialEq, Debug)]
#[strum(serialize_all = "UPPERCASE")]
pub(crate) enum TestResult {
Pass,
Fail,
Skip,
}
struct TestContext {
pb: ProgressBar,
pass_count: u64,
fail_count: u64,
skip_count: u64,
// We do not need a specific method to flush this before dropping
// `TestContext`, because the file is only written from the
// `update_summary_file` method. This method always calls flush on
// a non-error path, and the file is left in an inconsistent state on an
// error anyway.
summary_file: BufWriter<File>,
raw_log_file: BufWriter<File>,
log_dir: PathBuf,
results: TestResults,
}
/// Returns a string to use for displaying a test name
///
/// From the test2json docs:
///
/// > The Test field, if present, specifies the test, example, or benchmark
/// > function that caused the event. Events for the overall package test do not
/// > set Test.
///
/// For events that do not have a `Test` field, we display their test name as
/// `"GLOBAL"` instead.
fn test_str(test: &Option<String>) -> &str {
if let Some(test) = test {
test
} else {
"GLOBAL"
}
}
/// Returns whether a test name is a toplevel test (as opposed to a subtest)
fn test_is_toplevel(test: &str) -> bool {
!test.contains('/')
}
impl TestContext {
fn new(out: &Path, test_count: u64) -> Result<TestContext> {
// TODO: figure out how to display ETA without it fluctuating wildly.
let style = ProgressStyle::with_template(
"({msg}) {pos}/{len} [{elapsed}] {wide_bar}",
)
.expect("static progress bar template should be valid")
.progress_chars("##-");
let pb = ProgressBar::new(test_count).with_style(style);
pb.enable_steady_tick(Duration::from_secs(1));
let summary_file = File::create(out.join("summary.tsv"))
.into_diagnostic()
.wrap_err("failed to create summary file in output dir")?;
let summary_file = BufWriter::new(summary_file);
let raw_log_file = File::create(out.join("raw-log.jsonl"))
.into_diagnostic()
.wrap_err("failed to create raw log file in output dir")?;
let raw_log_file = BufWriter::new(raw_log_file);
let log_dir = out.join("logs");
let ctx = TestContext {
pb,
pass_count: 0,
fail_count: 0,
skip_count: 0,
log_dir,
summary_file,
raw_log_file,
results: BTreeMap::new(),
};
ctx.update_progress();
Ok(ctx)
}
fn finish(mut self) -> Result<TestResults> {
self.raw_log_file
.flush()
.into_diagnostic()
.wrap_err("error flushing writes to raw log file")?;
Ok(self.results)
}
fn write_raw_log_line(&mut self, line: &str) -> Result<()> {
self.raw_log_file
.write_all(line.as_bytes())
.into_diagnostic()
.wrap_err("error writing line to raw log file")?;
self.raw_log_file
.write_all(&[b'\n'])
.into_diagnostic()
.wrap_err("error writing line to raw log file")?;
Ok(())
}
fn update_progress(&self) {
self.pb
.set_position(self.pass_count + self.fail_count + self.skip_count);
self.pb.set_message(format!(
"PASS {}, FAIL {}, SKIP {}",
self.pass_count, self.fail_count, self.skip_count
));
}
fn update_summary_file(&mut self) -> Result<()> {
// Truncate the file to clear existing contents
self.summary_file
.get_mut()
.seek(SeekFrom::Start(0))
.into_diagnostic()?;
self.summary_file.get_mut().set_len(0).into_diagnostic()?;
write_summary(&mut self.summary_file, &self.results)?;
self.summary_file.flush().into_diagnostic()?;
Ok(())
}
fn handle_test_result(
&mut self,
test: &str,
result: TestResult,
) -> Result<()> {
self.pb.println(format!("=== {result}\t{test}"));
self.results.insert(test.to_owned(), result);
// 'complement.test -test.list' is only able to count toplevel tests
// ahead-of-time, so we don't include subtests in the pass/fail/skip
// counts.
if test_is_toplevel(test) {
match result {
TestResult::Pass => self.pass_count += 1,
TestResult::Fail => self.fail_count += 1,
TestResult::Skip => self.skip_count += 1,
}
self.update_progress();
}
self.update_summary_file().wrap_err("error writing summary file")?;
Ok(())
}
fn handle_test_output(&mut self, test: &str, output: &str) -> Result<()> {
let path = self.log_dir.join(test).with_extension("txt");
// Some tests have a '/' in their name, so create the extra dirs if they
// don't already exist.
let parent_dir = path.parent().expect(
"log file path should have parent. At worst, the toplevel dir is \
$out/logs/.",
);
fs::create_dir_all(parent_dir).into_diagnostic().wrap_err_with(
|| {
format!(
"error creating directory at {parent_dir:?} for log file \
{path:?}"
)
},
)?;
let mut log_file = File::options()
.create(true)
.append(true)
.open(&path)
.into_diagnostic()
.wrap_err_with(|| format!("error creating log file at {path:?}"))?;
log_file.write_all(output.as_bytes()).into_diagnostic().wrap_err_with(
|| format!("error writing to log file at {path:?}"),
)?;
Ok(())
}
fn handle_event(&mut self, event: GoTestEvent) -> Result<()> {
match event {
GoTestEvent::OtherAction => (),
GoTestEvent::Run {
test,
} => {
self.pb.println(format!("=== RUN \t{}", test_str(&test)));
}
GoTestEvent::Pass {
test,
} => {
self.handle_test_result(test_str(&test), TestResult::Pass)?;
}
GoTestEvent::Fail {
test,
} => {
self.handle_test_result(test_str(&test), TestResult::Fail)?;
}
GoTestEvent::Skip {
test,
} => {
self.handle_test_result(test_str(&test), TestResult::Skip)?;
}
GoTestEvent::Output {
test,
output,
} => {
let test = test_str(&test);
self.handle_test_output(test, &output).wrap_err_with(|| {
format!(
"failed to write test output to a log file for test \
{test:?}"
)
})?;
}
}
Ok(())
}
/// Processes a line of output from `test2json`
fn handle_line(&mut self, line: &str) -> Result<()> {
self.write_raw_log_line(line)?;
let result = from_json_line(line).wrap_err(
"failed to parse go test2json event from complement tests. \
Ignoring this event",
);
match result {
Ok(event) => self.handle_event(event)?,
Err(e) => eprintln!("{e:?}"),
}
Ok(())
}
}

74
xtask/src/main.rs Normal file
View file

@ -0,0 +1,74 @@
mod complement;
use std::{env, ffi::OsString, process::ExitCode};
use clap::{Parser, Subcommand};
use miette::{miette, IntoDiagnostic, Result, WrapErr};
use xshell::Shell;
#[derive(Parser)]
struct Args {
#[clap(subcommand)]
command: Command,
}
#[derive(Subcommand)]
enum Command {
Complement(complement::Args),
}
fn main() -> ExitCode {
let Err(e) = try_main() else {
return ExitCode::SUCCESS;
};
// Include a leading newline because sometimes an error will occur in
// the middle of displaying a progress indicator.
eprintln!("\n{e:?}");
ExitCode::FAILURE
}
fn try_main() -> Result<()> {
let args = Args::parse();
let sh = new_shell()?;
match args.command {
Command::Complement(args) => complement::main(args, &sh),
}
}
fn new_shell() -> Result<Shell> {
let path = get_shell_path()?;
let sh = Shell::new()
.into_diagnostic()
.wrap_err("failed to initialize internal xshell::Shell wrapper")?;
sh.set_var("PATH", path);
Ok(sh)
}
/// Returns the value to set the `PATH` environment variable to in
/// [`xshell::Shell`] instances.
///
/// This function appends the paths from the `GRAPEVINE_XTASK_PATH` environment
/// variable to the existing value of `PATH` set in the xtask process.
///
/// Executable dependencies that are only called by commands in xtask should be
/// added to `GRAPEVINE_XTASK_PATH` instead of `PATH` in the devshell, to avoid
/// polluting the devshell path with extra entries.
fn get_shell_path() -> Result<OsString> {
let xtask_path = env::var_os("GRAPEVINE_XTASK_PATH").ok_or(miette!(
help = "This tool must be run from inside the Grapevine devshell. \
Make sure you didn't interrupt direnv or something similar.",
"GRAPEVINE_XTASK_PATH environment variable is unset"
))?;
if let Some(path) = env::var_os("PATH") {
let old_paths = env::split_paths(&path);
let xtask_paths = env::split_paths(&xtask_path);
env::join_paths(old_paths.chain(xtask_paths))
.into_diagnostic()
.wrap_err(
"error constructing new PATH value to include the paths from \
GRAPEVINE_XTASK_PATH",
)
} else {
Ok(xtask_path)
}
}