7 Commits

56 changed files with 3040 additions and 2734 deletions

View File

@@ -1,3 +0,0 @@
{
"git.enabled": false
}

View File

@@ -1,24 +0,0 @@
when:
- event: pull_request
path:
include: ['.woodpecker/server-*.yaml', 'server/**']
- event: push
branch: main
path:
include: ['.woodpecker/server-*.yaml', 'server/**']
steps:
- name: test
image: jdxcode/mise:latest
directory: server
environment:
CARGO_TERM_COLOR: always
CARGO_TARGET_DIR: /usr/local/cargo/target
CARGO_HOME: /usr/local/cargo/registry
volumes:
- cargo-target:/usr/local/cargo/target
- cargo-registry:/usr/local/cargo/registry
commands:
- apt-get update && apt-get install -y pkg-config
- mise install rust
- mise exec rust -- cargo clippy --all-targets --all-features -- -D warnings

View File

@@ -10,10 +10,6 @@ backend = "cargo:cargo-features"
version = "0.11.1" version = "0.11.1"
backend = "cargo:cargo-features-manager" backend = "cargo:cargo-features-manager"
[[tools."cargo:cargo-insta"]]
version = "1.46.3"
backend = "cargo:cargo-insta"
[[tools."cargo:cargo-nextest"]] [[tools."cargo:cargo-nextest"]]
version = "0.9.126" version = "0.9.126"
backend = "cargo:cargo-nextest" backend = "cargo:cargo-nextest"

View File

@@ -2,10 +2,10 @@
"cargo:diesel_cli" = { version = "2.3.6", features = "sqlite,sqlite-bundled", default-features = false } "cargo:diesel_cli" = { version = "2.3.6", features = "sqlite,sqlite-bundled", default-features = false }
"cargo:cargo-audit" = "0.22.1" "cargo:cargo-audit" = "0.22.1"
"cargo:cargo-vet" = "0.10.2" "cargo:cargo-vet" = "0.10.2"
flutter = "3.38.9-stable" flutter = "3.38.9-stable"
protoc = "29.6" protoc = "29.6"
"rust" = {version = "1.93.0", components = "clippy"} rust = "1.93.1"
"cargo:cargo-features-manager" = "0.11.1" "cargo:cargo-features-manager" = "0.11.1"
"cargo:cargo-nextest" = "0.9.126" "cargo:cargo-nextest" = "0.9.126"
"cargo:cargo-shear" = "latest" "cargo:cargo-shear" = "latest"
"cargo:cargo-insta" = "1.46.3"

View File

@@ -3,14 +3,111 @@ syntax = "proto3";
package arbiter; package arbiter;
import "auth.proto"; import "auth.proto";
import "client.proto";
import "user_agent.proto"; message ClientRequest {
oneof payload {
arbiter.auth.ClientMessage auth_message = 1;
CertRotationAck cert_rotation_ack = 2;
}
}
message ClientResponse {
oneof payload {
arbiter.auth.ServerMessage auth_message = 1;
CertRotationNotification cert_rotation_notification = 2;
}
}
message UserAgentRequest {
oneof payload {
arbiter.auth.ClientMessage auth_message = 1;
CertRotationAck cert_rotation_ack = 2;
UnsealRequest unseal_request = 3;
}
}
message UserAgentResponse {
oneof payload {
arbiter.auth.ServerMessage auth_message = 1;
CertRotationNotification cert_rotation_notification = 2;
UnsealResponse unseal_response = 3;
}
}
message ServerInfo { message ServerInfo {
string version = 1; string version = 1;
bytes cert_public_key = 2; bytes cert_public_key = 2;
} }
// TLS Certificate Rotation Protocol
message CertRotationNotification {
// New public certificate (DER-encoded)
bytes new_cert = 1;
// Unix timestamp when rotation will be executed (if all ACKs received)
int64 rotation_scheduled_at = 2;
// Unix timestamp deadline for ACK (7 days from now)
int64 ack_deadline = 3;
// Rotation ID for tracking
int32 rotation_id = 4;
}
message CertRotationAck {
// Rotation ID (from CertRotationNotification)
int32 rotation_id = 1;
// Client public key for identification
bytes client_public_key = 2;
// Confirmation that client saved the new certificate
bool cert_saved = 3;
}
// Vault Unseal Protocol (X25519 ECDH + ChaCha20Poly1305)
message UnsealRequest {
oneof payload {
EphemeralKeyRequest ephemeral_key_request = 1;
SealedPassword sealed_password = 2;
}
}
message UnsealResponse {
oneof payload {
EphemeralKeyResponse ephemeral_key_response = 1;
UnsealResult unseal_result = 2;
}
}
message EphemeralKeyRequest {}
message EphemeralKeyResponse {
// Server's X25519 ephemeral public key (32 bytes)
bytes server_pubkey = 1;
// Unix timestamp when this key expires (60 seconds from generation)
int64 expires_at = 2;
}
message SealedPassword {
// Client's X25519 ephemeral public key (32 bytes)
bytes client_pubkey = 1;
// ChaCha20Poly1305 encrypted password (ciphertext + tag)
bytes encrypted_password = 2;
// 12-byte nonce for ChaCha20Poly1305
bytes nonce = 3;
}
message UnsealResult {
// Whether unseal was successful
bool success = 1;
// Error message if unseal failed
optional string error_message = 2;
}
service ArbiterService { service ArbiterService {
rpc Client(stream ClientRequest) returns (stream ClientResponse); rpc Client(stream ClientRequest) returns (stream ClientResponse);
rpc UserAgent(stream UserAgentRequest) returns (stream UserAgentResponse); rpc UserAgent(stream UserAgentRequest) returns (stream UserAgentResponse);

View File

@@ -1,17 +0,0 @@
syntax = "proto3";
package arbiter;
import "auth.proto";
message ClientRequest {
oneof payload {
arbiter.auth.ClientMessage auth_message = 1;
}
}
message ClientResponse {
oneof payload {
arbiter.auth.ServerMessage auth_message = 1;
}
}

View File

@@ -0,0 +1,46 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
syntax = "proto3";
package google.protobuf;
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
option cc_enable_arenas = true;
option go_package = "google.golang.org/protobuf/types/known/timestamppb";
option java_package = "com.google.protobuf";
option java_outer_classname = "TimestampProto";
option java_multiple_files = true;
option objc_class_prefix = "GPB";
// A Timestamp represents a point in time independent of any time zone or local
// calendar, encoded as a count of seconds and fractions of seconds at
// nanosecond resolution. The count is relative to an epoch at UTC midnight on
// January 1, 1970, in the proleptic Gregorian calendar which extends the
// Gregorian calendar backwards to year one.
message Timestamp {
// Represents seconds of UTC time since Unix epoch
// 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
// 9999-12-31T23:59:59Z inclusive.
int64 seconds = 1;
// Non-negative fractions of a second at nanosecond resolution. Negative
// second values with fractions must still have non-negative nanos values
// that count forward in time. Must be from 0 to 999,999,999
// inclusive.
int32 nanos = 2;
}

View File

@@ -2,24 +2,13 @@ syntax = "proto3";
package arbiter.unseal; package arbiter.unseal;
import "google/protobuf/empty.proto"; message UserAgentKeyRequest {}
message UnsealStart { message ServerKeyResponse {
bytes client_pubkey = 1; bytes pubkey = 1;
} }
message UserAgentSealedKey {
message UnsealStartResponse { bytes sealed_key = 1;
bytes server_pubkey = 1; bytes pubkey = 2;
} bytes nonce = 3;
message UnsealEncryptedKey {
bytes nonce = 1;
bytes ciphertext = 2;
bytes associated_data = 3;
}
enum UnsealResult {
UNSEAL_RESULT_UNSPECIFIED = 0;
UNSEAL_RESULT_SUCCESS = 1;
UNSEAL_RESULT_INVALID_KEY = 2;
UNSEAL_RESULT_UNBOOTSTRAPPED = 3;
} }

View File

@@ -1,21 +0,0 @@
syntax = "proto3";
package arbiter;
import "auth.proto";
import "unseal.proto";
message UserAgentRequest {
oneof payload {
arbiter.auth.ClientMessage auth_message = 1;
arbiter.unseal.UnsealStart unseal_start = 2;
arbiter.unseal.UnsealEncryptedKey unseal_encrypted_key = 3;
}
}
message UserAgentResponse {
oneof payload {
arbiter.auth.ServerMessage auth_message = 1;
arbiter.unseal.UnsealStartResponse unseal_start_response = 2;
arbiter.unseal.UnsealResult unseal_result = 3;
}
}

Binary file not shown.

388
server/Cargo.lock generated
View File

@@ -47,9 +47,9 @@ dependencies = [
[[package]] [[package]]
name = "anyhow" name = "anyhow"
version = "1.0.101" version = "1.0.102"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f0e0fee31ef5ed1ba1316088939cea399010ed7731dba877ed44aeb407a75ea" checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c"
[[package]] [[package]]
name = "arbiter-client" name = "arbiter-client"
@@ -63,6 +63,9 @@ dependencies = [
"hex", "hex",
"kameo", "kameo",
"prost", "prost",
"prost-build",
"prost-types",
"serde_json",
"tokio", "tokio",
"tonic", "tonic",
"tonic-prost", "tonic-prost",
@@ -84,24 +87,21 @@ dependencies = [
"diesel_migrations", "diesel_migrations",
"ed25519-dalek", "ed25519-dalek",
"futures", "futures",
"insta", "hex",
"kameo", "kameo",
"memsafe", "memsafe",
"miette", "miette",
"rand", "rand",
"rcgen", "rcgen",
"restructed",
"rustls", "rustls",
"secrecy", "secrecy",
"smlang", "smlang",
"strum",
"test-log", "test-log",
"thiserror", "thiserror",
"tokio", "tokio",
"tokio-stream", "tokio-stream",
"tonic", "tonic",
"tracing", "tracing",
"tracing-subscriber",
"x25519-dalek", "x25519-dalek",
"zeroize", "zeroize",
] ]
@@ -120,7 +120,6 @@ dependencies = [
"blake2", "blake2",
"cpufeatures 0.2.17", "cpufeatures 0.2.17",
"password-hash", "password-hash",
"zeroize",
] ]
[[package]] [[package]]
@@ -147,7 +146,7 @@ checksum = "3109e49b1e4909e9db6515a30c633684d68cdeaa252f215214cb4fa1a5bfee2c"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.115", "syn 2.0.117",
"synstructure", "synstructure",
] ]
@@ -159,7 +158,7 @@ checksum = "7b18050c2cd6fe86c3a76584ef5e0baf286d038cda203eb6223df2cc413565f7"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.115", "syn 2.0.117",
] ]
[[package]] [[package]]
@@ -170,7 +169,7 @@ checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.115", "syn 2.0.117",
] ]
[[package]] [[package]]
@@ -187,9 +186,9 @@ checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
[[package]] [[package]]
name = "aws-lc-rs" name = "aws-lc-rs"
version = "1.15.4" version = "1.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b7b6141e96a8c160799cc2d5adecd5cbbe5054cb8c7c4af53da0f83bb7ad256" checksum = "94bffc006df10ac2a68c83692d734a465f8ee6c5b384d8545a636f81d858f4bf"
dependencies = [ dependencies = [
"aws-lc-sys", "aws-lc-sys",
"untrusted 0.7.1", "untrusted 0.7.1",
@@ -198,9 +197,9 @@ dependencies = [
[[package]] [[package]]
name = "aws-lc-sys" name = "aws-lc-sys"
version = "0.37.1" version = "0.38.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b092fe214090261288111db7a2b2c2118e5a7f30dc2569f1732c4069a6840549" checksum = "4321e568ed89bb5a7d291a7f37997c2c0df89809d7b6d12062c81ddb54aa782e"
dependencies = [ dependencies = [
"cc", "cc",
"cmake", "cmake",
@@ -325,18 +324,18 @@ dependencies = [
[[package]] [[package]]
name = "block-buffer" name = "block-buffer"
version = "0.11.0" version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96eb4cdd6cf1b31d671e9efe75c5d1ec614776856cefbe109ca373554a6d514f" checksum = "cdd35008169921d80bc60d3d0ab416eecb028c4cd653352907921d95084790be"
dependencies = [ dependencies = [
"hybrid-array", "hybrid-array",
] ]
[[package]] [[package]]
name = "bumpalo" name = "bumpalo"
version = "3.19.1" version = "3.20.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510" checksum = "5d20789868f4b01b2f2caec9f5c4e0213b41e3e5702a50157d699ae31ced2fcb"
[[package]] [[package]]
name = "bytes" name = "bytes"
@@ -399,9 +398,9 @@ dependencies = [
[[package]] [[package]]
name = "chrono" name = "chrono"
version = "0.4.43" version = "0.4.44"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fac4744fb15ae8337dc853fee7fb3f4e48c0fbaa23d0afe49c447b4fab126118" checksum = "c673075a2e0e5f4a1dde27ce9dee1ea4558c7ffe648f576438a20ca1d2acc4b0"
dependencies = [ dependencies = [
"iana-time-zone", "iana-time-zone",
"js-sys", "js-sys",
@@ -431,18 +430,6 @@ dependencies = [
"cc", "cc",
] ]
[[package]]
name = "console"
version = "0.15.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "054ccb5b10f9f2cbf51eb355ca1d05c2d279ce1804688d0db74b4733a5aeafd8"
dependencies = [
"encode_unicode",
"libc",
"once_cell",
"windows-sys 0.59.0",
]
[[package]] [[package]]
name = "core-foundation-sys" name = "core-foundation-sys"
version = "0.8.7" version = "0.8.7"
@@ -495,9 +482,9 @@ dependencies = [
[[package]] [[package]]
name = "crypto-common" name = "crypto-common"
version = "0.2.0" version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "211f05e03c7d03754740fd9e585de910a095d6b99f8bcfffdef8319fa02a8331" checksum = "77727bb15fa921304124b128af125e7e3b968275d1b108b379190264f4423710"
dependencies = [ dependencies = [
"hybrid-array", "hybrid-array",
] ]
@@ -526,7 +513,7 @@ dependencies = [
"cfg-if", "cfg-if",
"cpufeatures 0.2.17", "cpufeatures 0.2.17",
"curve25519-dalek-derive", "curve25519-dalek-derive",
"digest 0.11.0", "digest 0.11.1",
"fiat-crypto 0.3.0", "fiat-crypto 0.3.0",
"rustc_version", "rustc_version",
"subtle", "subtle",
@@ -541,7 +528,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.115", "syn 2.0.117",
] ]
[[package]] [[package]]
@@ -565,7 +552,7 @@ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"strsim", "strsim",
"syn 2.0.115", "syn 2.0.117",
] ]
[[package]] [[package]]
@@ -576,7 +563,7 @@ checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81"
dependencies = [ dependencies = [
"darling_core", "darling_core",
"quote", "quote",
"syn 2.0.115", "syn 2.0.117",
] ]
[[package]] [[package]]
@@ -615,9 +602,9 @@ dependencies = [
[[package]] [[package]]
name = "deranged" name = "deranged"
version = "0.5.6" version = "0.5.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc3dc5ad92c2e2d1c193bbbbdf2ea477cb81331de4f3103f267ca18368b988c4" checksum = "7cd812cc2bc1d69d4764bd80df88b4317eaef9e773c75226407d9bc0876b211c"
dependencies = [ dependencies = [
"powerfmt", "powerfmt",
] ]
@@ -663,7 +650,7 @@ dependencies = [
"dsl_auto_type", "dsl_auto_type",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.115", "syn 2.0.117",
] ]
[[package]] [[package]]
@@ -683,7 +670,7 @@ version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe2444076b48641147115697648dc743c2c00b61adade0f01ce67133c7babe8c" checksum = "fe2444076b48641147115697648dc743c2c00b61adade0f01ce67133c7babe8c"
dependencies = [ dependencies = [
"syn 2.0.115", "syn 2.0.117",
] ]
[[package]] [[package]]
@@ -699,12 +686,12 @@ dependencies = [
[[package]] [[package]]
name = "digest" name = "digest"
version = "0.11.0" version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8bf3682cdec91817be507e4aa104314898b95b84d74f3d43882210101a545b6" checksum = "285743a676ccb6b3e116bc14cc69319b957867930ae9c4822f8e0f54509d7243"
dependencies = [ dependencies = [
"block-buffer 0.11.0", "block-buffer 0.12.0",
"crypto-common 0.2.0", "crypto-common 0.2.1",
] ]
[[package]] [[package]]
@@ -715,7 +702,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.115", "syn 2.0.117",
] ]
[[package]] [[package]]
@@ -735,7 +722,7 @@ dependencies = [
"heck", "heck",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.115", "syn 2.0.117",
] ]
[[package]] [[package]]
@@ -779,12 +766,6 @@ version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
[[package]]
name = "encode_unicode"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0"
[[package]] [[package]]
name = "equivalent" name = "equivalent"
version = "1.0.2" version = "1.0.2"
@@ -921,7 +902,7 @@ checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.115", "syn 2.0.117",
] ]
[[package]] [[package]]
@@ -982,19 +963,19 @@ checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"libc", "libc",
"r-efi", "r-efi 5.3.0",
"wasip2", "wasip2",
] ]
[[package]] [[package]]
name = "getrandom" name = "getrandom"
version = "0.4.1" version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "139ef39800118c7683f2fd3c98c1b23c09ae076556b435f8e9064ae108aaeeec" checksum = "0de51e6874e94e7bf76d726fc5d13ba782deca734ff60d5bb2fb2607c7406555"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"libc", "libc",
"r-efi", "r-efi 6.0.0",
"rand_core 0.10.0", "rand_core 0.10.0",
"wasip2", "wasip2",
"wasip3", "wasip3",
@@ -1108,9 +1089,9 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
[[package]] [[package]]
name = "hybrid-array" name = "hybrid-array"
version = "0.4.7" version = "0.4.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1b229d73f5803b562cc26e4da0396c8610a4ee209f4fac8fa4f8d709166dc45" checksum = "8655f91cd07f2b9d0c24137bd650fe69617773435ee5ec83022377777ce65ef1"
dependencies = [ dependencies = [
"typenum", "typenum",
] ]
@@ -1228,18 +1209,6 @@ dependencies = [
"generic-array", "generic-array",
] ]
[[package]]
name = "insta"
version = "1.46.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e82db8c87c7f1ccecb34ce0c24399b8a73081427f3c7c50a5d597925356115e4"
dependencies = [
"console",
"once_cell",
"similar",
"tempfile",
]
[[package]] [[package]]
name = "is_ci" name = "is_ci"
version = "1.2.0" version = "1.2.0"
@@ -1273,9 +1242,9 @@ dependencies = [
[[package]] [[package]]
name = "js-sys" name = "js-sys"
version = "0.3.85" version = "0.3.91"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c942ebf8e95485ca0d52d97da7c5a2c387d0e7f0ba4c35e93bfcaee045955b3" checksum = "b49715b7073f385ba4bc528e5747d02e66cb39c6146efb66b781f131f0fb399c"
dependencies = [ dependencies = [
"once_cell", "once_cell",
"wasm-bindgen", "wasm-bindgen",
@@ -1305,7 +1274,7 @@ dependencies = [
"heck", "heck",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.115", "syn 2.0.117",
] ]
[[package]] [[package]]
@@ -1322,9 +1291,9 @@ checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2"
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.182" version = "0.2.183"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6800badb6cb2082ffd7b6a67e6125bb39f18782f793520caee8cb8846be06112" checksum = "b5b646652bf6661599e1da8901b3b9522896f01e736bad5f723fe7a3a27f899d"
[[package]] [[package]]
name = "libsqlite3-sys" name = "libsqlite3-sys"
@@ -1338,9 +1307,9 @@ dependencies = [
[[package]] [[package]]
name = "linux-raw-sys" name = "linux-raw-sys"
version = "0.11.0" version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" checksum = "32a66949e030da00e8c7d4434b251670a91556f4144941d37452769c25d58a53"
[[package]] [[package]]
name = "lock_api" name = "lock_api"
@@ -1416,7 +1385,7 @@ checksum = "db5b29714e950dbb20d5e6f74f9dcec4edbcc1067bb7f8ed198c097b8c1a818b"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.115", "syn 2.0.117",
] ]
[[package]] [[package]]
@@ -1489,15 +1458,6 @@ dependencies = [
"minimal-lexical", "minimal-lexical",
] ]
[[package]]
name = "nu-ansi-term"
version = "0.50.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5"
dependencies = [
"windows-sys 0.61.2",
]
[[package]] [[package]]
name = "num-bigint" name = "num-bigint"
version = "0.4.6" version = "0.4.6"
@@ -1564,9 +1524,9 @@ checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381"
[[package]] [[package]]
name = "owo-colors" name = "owo-colors"
version = "4.2.3" version = "4.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c6901729fa79e91a0913333229e9ca5dc725089d1c363b2f4b4760709dc4a52" checksum = "d211803b9b6b570f68772237e415a029d5a50c65d382910b879fb19d3271f94d"
[[package]] [[package]]
name = "parking_lot" name = "parking_lot"
@@ -1631,29 +1591,29 @@ dependencies = [
[[package]] [[package]]
name = "pin-project" name = "pin-project"
version = "1.1.10" version = "1.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a" checksum = "f1749c7ed4bcaf4c3d0a3efc28538844fb29bcdd7d2b67b2be7e20ba861ff517"
dependencies = [ dependencies = [
"pin-project-internal", "pin-project-internal",
] ]
[[package]] [[package]]
name = "pin-project-internal" name = "pin-project-internal"
version = "1.1.10" version = "1.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" checksum = "d9b20ed30f105399776b9c883e68e536ef602a16ae6f596d2c473591d6ad64c6"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.115", "syn 2.0.117",
] ]
[[package]] [[package]]
name = "pin-project-lite" name = "pin-project-lite"
version = "0.2.16" version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd"
[[package]] [[package]]
name = "pin-utils" name = "pin-utils"
@@ -1697,31 +1657,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"syn 2.0.115", "syn 2.0.117",
]
[[package]]
name = "proc-macro-error"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
dependencies = [
"proc-macro-error-attr",
"proc-macro2",
"quote",
"syn 1.0.109",
"version_check",
]
[[package]]
name = "proc-macro-error-attr"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
dependencies = [
"proc-macro2",
"quote",
"version_check",
] ]
[[package]] [[package]]
@@ -1760,7 +1696,7 @@ dependencies = [
"pulldown-cmark", "pulldown-cmark",
"pulldown-cmark-to-cmark", "pulldown-cmark-to-cmark",
"regex", "regex",
"syn 2.0.115", "syn 2.0.117",
"tempfile", "tempfile",
] ]
@@ -1774,7 +1710,7 @@ dependencies = [
"itertools", "itertools",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.115", "syn 2.0.117",
] ]
[[package]] [[package]]
@@ -1783,14 +1719,15 @@ version = "0.14.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8991c4cbdb8bc5b11f0b074ffe286c30e523de90fee5ba8132f1399f23cb3dd7" checksum = "8991c4cbdb8bc5b11f0b074ffe286c30e523de90fee5ba8132f1399f23cb3dd7"
dependencies = [ dependencies = [
"chrono",
"prost", "prost",
] ]
[[package]] [[package]]
name = "pulldown-cmark" name = "pulldown-cmark"
version = "0.13.0" version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e8bbe1a966bd2f362681a44f6edce3c2310ac21e4d5067a6e7ec396297a6ea0" checksum = "83c41efbf8f90ac44de7f3a868f0867851d261b56291732d0cbf7cceaaeb55a6"
dependencies = [ dependencies = [
"bitflags", "bitflags",
"memchr", "memchr",
@@ -1808,9 +1745,9 @@ dependencies = [
[[package]] [[package]]
name = "quote" name = "quote"
version = "1.0.44" version = "1.0.45"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "21b2ebcf727b7760c461f091f9f0f539b77b8e87f2fd88131e7f1b433b3cece4" checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
] ]
@@ -1821,6 +1758,12 @@ version = "5.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
[[package]]
name = "r-efi"
version = "6.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8dcc9c7d52a811697d2151c701e0d08956f92b0e24136cf4cf27b57a6a0d9bf"
[[package]] [[package]]
name = "rand" name = "rand"
version = "0.10.0" version = "0.10.0"
@@ -1828,7 +1771,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc266eb313df6c5c09c1c7b1fbe2510961e5bcd3add930c1e31f7ed9da0feff8" checksum = "bc266eb313df6c5c09c1c7b1fbe2510961e5bcd3add930c1e31f7ed9da0feff8"
dependencies = [ dependencies = [
"chacha20 0.10.0", "chacha20 0.10.0",
"getrandom 0.4.1", "getrandom 0.4.2",
"rand_core 0.10.0", "rand_core 0.10.0",
] ]
@@ -1896,21 +1839,9 @@ dependencies = [
[[package]] [[package]]
name = "regex-syntax" name = "regex-syntax"
version = "0.8.9" version = "0.8.10"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a96887878f22d7bad8a3b6dc5b7440e0ada9a245242924394987b21cf2210a4c" checksum = "dc897dd8d9e8bd1ed8cdad82b5966c3e0ecae09fb1907d58efaa013543185d0a"
[[package]]
name = "restructed"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f6f6e863d7d9d318699737c043d560dce1ea3cb6f5c78e0a3f0d1f257c73dfc"
dependencies = [
"proc-macro-error",
"proc-macro2",
"quote",
"syn 2.0.115",
]
[[package]] [[package]]
name = "ring" name = "ring"
@@ -1962,9 +1893,9 @@ dependencies = [
[[package]] [[package]]
name = "rustix" name = "rustix"
version = "1.1.3" version = "1.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34" checksum = "b6fe4565b9518b83ef4f91bb47ce29620ca828bd32cb7e408f0062e9930ba190"
dependencies = [ dependencies = [
"bitflags", "bitflags",
"errno", "errno",
@@ -1975,9 +1906,9 @@ dependencies = [
[[package]] [[package]]
name = "rustls" name = "rustls"
version = "0.23.36" version = "0.23.37"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c665f33d38cea657d9614f766881e4d510e0eda4239891eea56b4cadcf01801b" checksum = "758025cb5fccfd3bc2fd74708fd4682be41d99e5dff73c377c0646c6012c73a4"
dependencies = [ dependencies = [
"aws-lc-rs", "aws-lc-rs",
"log", "log",
@@ -2072,7 +2003,7 @@ checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.115", "syn 2.0.117",
] ]
[[package]] [[package]]
@@ -2105,7 +2036,7 @@ checksum = "7c5f3b1e2dc8aad28310d8410bd4d7e180eca65fca176c52ab00d364475d0024"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"cpufeatures 0.2.17", "cpufeatures 0.2.17",
"digest 0.11.0", "digest 0.11.1",
] ]
[[package]] [[package]]
@@ -2145,12 +2076,6 @@ version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2" checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2"
[[package]]
name = "similar"
version = "2.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbbb5d9659141646ae647b42fe094daf6c6192d1620870b449d9557f748b2daa"
[[package]] [[package]]
name = "slab" name = "slab"
version = "0.4.12" version = "0.4.12"
@@ -2186,12 +2111,12 @@ dependencies = [
[[package]] [[package]]
name = "socket2" name = "socket2"
version = "0.6.2" version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "86f4aa3ad99f2088c990dfa82d367e19cb29268ed67c574d10d0a4bfe71f07e0" checksum = "3a766e1110788c36f4fa1c2b71b387a7815aa65f88ce0229841826633d93723e"
dependencies = [ dependencies = [
"libc", "libc",
"windows-sys 0.60.2", "windows-sys 0.61.2",
] ]
[[package]] [[package]]
@@ -2218,27 +2143,6 @@ version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]]
name = "strum"
version = "0.27.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf"
dependencies = [
"strum_macros",
]
[[package]]
name = "strum_macros"
version = "0.27.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7"
dependencies = [
"heck",
"proc-macro2",
"quote",
"syn 2.0.115",
]
[[package]] [[package]]
name = "subtle" name = "subtle"
version = "2.6.1" version = "2.6.1"
@@ -2279,9 +2183,9 @@ dependencies = [
[[package]] [[package]]
name = "syn" name = "syn"
version = "2.0.115" version = "2.0.117"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e614ed320ac28113fa64972c4262d5dbc89deacdfd00c34a3e4cea073243c12" checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@@ -2302,17 +2206,17 @@ checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.115", "syn 2.0.117",
] ]
[[package]] [[package]]
name = "tempfile" name = "tempfile"
version = "3.25.0" version = "3.27.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0136791f7c95b1f6dd99f9cc786b91bb81c3800b639b3478e561ddb7be95e5f1" checksum = "32497e9a4c7b38532efcdebeef879707aa9f794296a4f0244f6f69e9bc8574bd"
dependencies = [ dependencies = [
"fastrand", "fastrand",
"getrandom 0.4.1", "getrandom 0.4.2",
"once_cell", "once_cell",
"rustix", "rustix",
"windows-sys 0.61.2", "windows-sys 0.61.2",
@@ -2346,7 +2250,7 @@ checksum = "be35209fd0781c5401458ab66e4f98accf63553e8fae7425503e92fdd319783b"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.115", "syn 2.0.117",
] ]
[[package]] [[package]]
@@ -2376,7 +2280,7 @@ checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.115", "syn 2.0.117",
] ]
[[package]] [[package]]
@@ -2421,9 +2325,9 @@ dependencies = [
[[package]] [[package]]
name = "tokio" name = "tokio"
version = "1.49.0" version = "1.50.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72a2903cd7736441aac9df9d7688bd0ce48edccaadf181c3b90be801e81d3d86" checksum = "27ad5e34374e03cfffefc301becb44e9dc3c17584f414349ebe29ed26661822d"
dependencies = [ dependencies = [
"bytes", "bytes",
"libc", "libc",
@@ -2439,13 +2343,13 @@ dependencies = [
[[package]] [[package]]
name = "tokio-macros" name = "tokio-macros"
version = "2.6.0" version = "2.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" checksum = "5c55a2eff8b69ce66c84f85e1da1c233edc36ceb85a2058d11b0d6a3c7e7569c"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.115", "syn 2.0.117",
] ]
[[package]] [[package]]
@@ -2507,18 +2411,18 @@ dependencies = [
[[package]] [[package]]
name = "toml_parser" name = "toml_parser"
version = "1.0.8+spec-1.1.0" version = "1.0.9+spec-1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0742ff5ff03ea7e67c8ae6c93cac239e0d9784833362da3f9a9c1da8dfefcbdc" checksum = "702d4415e08923e7e1ef96cd5727c0dfed80b4d2fa25db9647fe5eb6f7c5a4c4"
dependencies = [ dependencies = [
"winnow", "winnow",
] ]
[[package]] [[package]]
name = "tonic" name = "tonic"
version = "0.14.4" version = "0.14.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f32a6f80051a4111560201420c7885d0082ba9efe2ab61875c587bb6b18b9a0" checksum = "fec7c61a0695dc1887c1b53952990f3ad2e3a31453e1f49f10e75424943a93ec"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"axum", "axum",
@@ -2548,21 +2452,21 @@ dependencies = [
[[package]] [[package]]
name = "tonic-build" name = "tonic-build"
version = "0.14.4" version = "0.14.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce6d8958ed3be404120ca43ffa0fb1e1fc7be214e96c8d33bd43a131b6eebc9e" checksum = "1882ac3bf5ef12877d7ed57aad87e75154c11931c2ba7e6cde5e22d63522c734"
dependencies = [ dependencies = [
"prettyplease", "prettyplease",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.115", "syn 2.0.117",
] ]
[[package]] [[package]]
name = "tonic-prost" name = "tonic-prost"
version = "0.14.4" version = "0.14.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9f86539c0089bfd09b1f8c0ab0239d80392af74c21bc9e0f15e1b4aca4c1647f" checksum = "a55376a0bbaa4975a3f10d009ad763d8f4108f067c7c2e74f3001fb49778d309"
dependencies = [ dependencies = [
"bytes", "bytes",
"prost", "prost",
@@ -2571,16 +2475,16 @@ dependencies = [
[[package]] [[package]]
name = "tonic-prost-build" name = "tonic-prost-build"
version = "0.14.4" version = "0.14.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "65873ace111e90344b8973e94a1fc817c924473affff24629281f90daed1cd2e" checksum = "f3144df636917574672e93d0f56d7edec49f90305749c668df5101751bb8f95a"
dependencies = [ dependencies = [
"prettyplease", "prettyplease",
"proc-macro2", "proc-macro2",
"prost-build", "prost-build",
"prost-types", "prost-types",
"quote", "quote",
"syn 2.0.115", "syn 2.0.117",
"tempfile", "tempfile",
"tonic-build", "tonic-build",
] ]
@@ -2635,7 +2539,7 @@ checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.115", "syn 2.0.117",
] ]
[[package]] [[package]]
@@ -2645,18 +2549,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a"
dependencies = [ dependencies = [
"once_cell", "once_cell",
"valuable",
]
[[package]]
name = "tracing-log"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3"
dependencies = [
"log",
"once_cell",
"tracing-core",
] ]
[[package]] [[package]]
@@ -2666,15 +2558,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e" checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e"
dependencies = [ dependencies = [
"matchers", "matchers",
"nu-ansi-term",
"once_cell", "once_cell",
"regex-automata", "regex-automata",
"sharded-slab", "sharded-slab",
"smallvec",
"thread_local", "thread_local",
"tracing", "tracing",
"tracing-core", "tracing-core",
"tracing-log",
] ]
[[package]] [[package]]
@@ -2697,9 +2586,9 @@ checksum = "dbc4bc3a9f746d862c45cb89d705aa10f187bb96c76001afab07a0d35ce60142"
[[package]] [[package]]
name = "unicode-ident" name = "unicode-ident"
version = "1.0.23" version = "1.0.24"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "537dd038a89878be9b64dd4bd1b260315c1bb94f4d784956b81e27a088d9a09e" checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75"
[[package]] [[package]]
name = "unicode-linebreak" name = "unicode-linebreak"
@@ -2749,20 +2638,14 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
[[package]] [[package]]
name = "uuid" name = "uuid"
version = "1.21.0" version = "1.22.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b672338555252d43fd2240c714dc444b8c6fb0a5c5335e65a07bba7742735ddb" checksum = "a68d3c8f01c0cfa54a75291d83601161799e4a89a39e0929f4b0354d88757a37"
dependencies = [ dependencies = [
"js-sys", "js-sys",
"wasm-bindgen", "wasm-bindgen",
] ]
[[package]]
name = "valuable"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65"
[[package]] [[package]]
name = "vcpkg" name = "vcpkg"
version = "0.2.15" version = "0.2.15"
@@ -2810,9 +2693,9 @@ dependencies = [
[[package]] [[package]]
name = "wasm-bindgen" name = "wasm-bindgen"
version = "0.2.108" version = "0.2.114"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64024a30ec1e37399cf85a7ffefebdb72205ca1c972291c51512360d90bd8566" checksum = "6532f9a5c1ece3798cb1c2cfdba640b9b3ba884f5db45973a6f442510a87d38e"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"once_cell", "once_cell",
@@ -2823,9 +2706,9 @@ dependencies = [
[[package]] [[package]]
name = "wasm-bindgen-macro" name = "wasm-bindgen-macro"
version = "0.2.108" version = "0.2.114"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "008b239d9c740232e71bd39e8ef6429d27097518b6b30bdf9086833bd5b6d608" checksum = "18a2d50fcf105fb33bb15f00e7a77b772945a2ee45dcf454961fd843e74c18e6"
dependencies = [ dependencies = [
"quote", "quote",
"wasm-bindgen-macro-support", "wasm-bindgen-macro-support",
@@ -2833,22 +2716,22 @@ dependencies = [
[[package]] [[package]]
name = "wasm-bindgen-macro-support" name = "wasm-bindgen-macro-support"
version = "0.2.108" version = "0.2.114"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5256bae2d58f54820e6490f9839c49780dff84c65aeab9e772f15d5f0e913a55" checksum = "03ce4caeaac547cdf713d280eda22a730824dd11e6b8c3ca9e42247b25c631e3"
dependencies = [ dependencies = [
"bumpalo", "bumpalo",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.115", "syn 2.0.117",
"wasm-bindgen-shared", "wasm-bindgen-shared",
] ]
[[package]] [[package]]
name = "wasm-bindgen-shared" name = "wasm-bindgen-shared"
version = "0.2.108" version = "0.2.114"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f01b580c9ac74c8d8f0c0e4afb04eeef2acf145458e52c03845ee9cd23e3d12" checksum = "75a326b8c223ee17883a4251907455a2431acc2791c98c26279376490c378c16"
dependencies = [ dependencies = [
"unicode-ident", "unicode-ident",
] ]
@@ -2930,7 +2813,7 @@ checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.115", "syn 2.0.117",
] ]
[[package]] [[package]]
@@ -2941,7 +2824,7 @@ checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.115", "syn 2.0.117",
] ]
[[package]] [[package]]
@@ -2977,15 +2860,6 @@ dependencies = [
"windows-targets 0.52.6", "windows-targets 0.52.6",
] ]
[[package]]
name = "windows-sys"
version = "0.59.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
dependencies = [
"windows-targets 0.52.6",
]
[[package]] [[package]]
name = "windows-sys" name = "windows-sys"
version = "0.60.2" version = "0.60.2"
@@ -3135,9 +3009,9 @@ checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650"
[[package]] [[package]]
name = "winnow" name = "winnow"
version = "0.7.14" version = "0.7.15"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829" checksum = "df79d97927682d2fd8adb29682d1140b343be4ac0f08fd68b7765d9c059d3945"
[[package]] [[package]]
name = "wit-bindgen" name = "wit-bindgen"
@@ -3169,7 +3043,7 @@ dependencies = [
"heck", "heck",
"indexmap", "indexmap",
"prettyplease", "prettyplease",
"syn 2.0.115", "syn 2.0.117",
"wasm-metadata", "wasm-metadata",
"wit-bindgen-core", "wit-bindgen-core",
"wit-component", "wit-component",
@@ -3185,7 +3059,7 @@ dependencies = [
"prettyplease", "prettyplease",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.115", "syn 2.0.117",
"wit-bindgen-core", "wit-bindgen-core",
"wit-bindgen-rust", "wit-bindgen-rust",
] ]
@@ -3283,7 +3157,7 @@ checksum = "85a5b4158499876c763cb03bc4e49185d3cccbabb15b33c627f7884f43db852e"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.115", "syn 2.0.117",
] ]
[[package]] [[package]]

View File

@@ -23,3 +23,4 @@ async-trait = "0.1.89"
futures = "0.3.31" futures = "0.3.31"
tokio-stream = { version = "0.1.18", features = ["full"] } tokio-stream = { version = "0.1.18", features = ["full"] }
kameo = "0.19.2" kameo = "0.19.2"
prost-types = { version = "0.14.3", features = ["chrono"] }

Binary file not shown.

View File

@@ -12,8 +12,11 @@ hex = "0.4.3"
tonic-prost = "0.14.3" tonic-prost = "0.14.3"
prost = "0.14.3" prost = "0.14.3"
kameo.workspace = true kameo.workspace = true
prost-types.workspace = true
[build-dependencies] [build-dependencies]
prost-build = "0.14.3"
serde_json = "1"
tonic-prost-build = "0.14.3" tonic-prost-build = "0.14.3"

View File

@@ -1,21 +1,15 @@
use tonic_prost_build::configure;
static PROTOBUF_DIR: &str = "../../../protobufs"; static PROTOBUF_DIR: &str = "../../../protobufs";
fn main() -> Result<(), Box<dyn std::error::Error>> { fn main() -> Result<(), Box<dyn std::error::Error>> {
let proto_files = vec![
format!("{}/arbiter.proto", PROTOBUF_DIR),
format!("{}/auth.proto", PROTOBUF_DIR),
];
println!("cargo::rerun-if-changed={PROTOBUF_DIR}"); // Компилируем protobuf (tonic-prost-build автоматически использует prost_types для google.protobuf)
tonic_prost_build::configure()
configure()
.message_attribute(".", "#[derive(::kameo::Reply)]") .message_attribute(".", "#[derive(::kameo::Reply)]")
.compile_protos( .compile_protos(&proto_files, &[PROTOBUF_DIR.to_string()])?;
&[
format!("{}/arbiter.proto", PROTOBUF_DIR),
format!("{}/auth.proto", PROTOBUF_DIR),
],
&[PROTOBUF_DIR.to_string()],
)
.unwrap();
Ok(()) Ok(())
} }

View File

@@ -6,17 +6,14 @@ pub mod proto {
pub mod auth { pub mod auth {
tonic::include_proto!("arbiter.auth"); tonic::include_proto!("arbiter.auth");
} }
pub mod unseal {
tonic::include_proto!("arbiter.unseal");
}
} }
pub mod transport; pub mod transport;
pub static BOOTSTRAP_TOKEN_PATH: &'static str = "bootstrap_token"; pub static BOOTSTRAP_TOKEN_PATH: &str = "bootstrap_token";
pub fn home_path() -> Result<std::path::PathBuf, std::io::Error> { pub fn home_path() -> Result<std::path::PathBuf, std::io::Error> {
static ARBITER_HOME: &'static str = ".arbiter"; static ARBITER_HOME: &str = ".arbiter";
let home_dir = std::env::home_dir().ok_or(std::io::Error::new( let home_dir = std::env::home_dir().ok_or(std::io::Error::new(
std::io::ErrorKind::PermissionDenied, std::io::ErrorKind::PermissionDenied,
"can not get home directory", "can not get home directory",

Binary file not shown.

View File

@@ -5,7 +5,13 @@ edition = "2024"
repository = "https://git.markettakers.org/MarketTakers/arbiter" repository = "https://git.markettakers.org/MarketTakers/arbiter"
[dependencies] [dependencies]
diesel = { version = "2.3.6", features = ["chrono", "returning_clauses_for_sqlite_3_35", "serde_json", "time", "uuid"] } diesel = { version = "2.3.6", features = [
"sqlite",
"uuid",
"time",
"chrono",
"serde_json",
] }
diesel-async = { version = "0.7.4", features = [ diesel-async = { version = "0.7.4", features = [
"bb8", "bb8",
"migrations", "migrations",
@@ -15,7 +21,6 @@ diesel-async = { version = "0.7.4", features = [
ed25519-dalek.workspace = true ed25519-dalek.workspace = true
arbiter-proto.path = "../arbiter-proto" arbiter-proto.path = "../arbiter-proto"
tracing.workspace = true tracing.workspace = true
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
tonic.workspace = true tonic.workspace = true
tokio.workspace = true tokio.workspace = true
rustls.workspace = true rustls.workspace = true
@@ -38,13 +43,11 @@ rcgen = { version = "0.14.7", features = [
chrono.workspace = true chrono.workspace = true
memsafe = "0.4.0" memsafe = "0.4.0"
zeroize = { version = "1.8.2", features = ["std", "simd"] } zeroize = { version = "1.8.2", features = ["std", "simd"] }
argon2 = { version = "0.5", features = ["std"] }
kameo.workspace = true kameo.workspace = true
x25519-dalek = { version = "2.0.1", features = ["getrandom"] } hex = "0.4.3"
chacha20poly1305 = { version = "0.10.1", features = ["std"] } chacha20poly1305 = "0.10.1"
argon2 = { version = "0.5.3", features = ["zeroize"] } x25519-dalek = { version = "2.0", features = ["static_secrets"] }
restructed = "0.2.2"
strum = { version = "0.27.2", features = ["derive"] }
[dev-dependencies] [dev-dependencies]
insta = "1.46.3"
test-log = { version = "0.2", default-features = false, features = ["trace"] } test-log = { version = "0.2", default-features = false, features = ["trace"] }

View File

@@ -0,0 +1,11 @@
-- Rollback TLS rotation tables
-- Удалить добавленную колонку из arbiter_settings
ALTER TABLE arbiter_settings DROP COLUMN current_cert_id;
-- Удалить таблицы в обратном порядке
DROP TABLE IF EXISTS tls_rotation_history;
DROP TABLE IF EXISTS rotation_client_acks;
DROP TABLE IF EXISTS tls_rotation_state;
DROP INDEX IF EXISTS idx_tls_certificates_active;
DROP TABLE IF EXISTS tls_certificates;

View File

@@ -0,0 +1,57 @@
-- История всех сертификатов
CREATE TABLE IF NOT EXISTS tls_certificates (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
cert BLOB NOT NULL, -- DER-encoded
cert_key BLOB NOT NULL, -- PEM-encoded
not_before INTEGER NOT NULL, -- Unix timestamp
not_after INTEGER NOT NULL, -- Unix timestamp
created_at INTEGER NOT NULL DEFAULT(unixepoch('now')),
is_active BOOLEAN NOT NULL DEFAULT 0 -- Только один active=1
) STRICT;
CREATE INDEX idx_tls_certificates_active ON tls_certificates(is_active, not_after);
-- Tracking процесса ротации
CREATE TABLE IF NOT EXISTS tls_rotation_state (
id INTEGER NOT NULL PRIMARY KEY CHECK(id = 1), -- Singleton
state TEXT NOT NULL DEFAULT('normal') CHECK(state IN ('normal', 'initiated', 'waiting_acks', 'ready')),
new_cert_id INTEGER REFERENCES tls_certificates(id),
initiated_at INTEGER,
timeout_at INTEGER -- Таймаут для ожидания ACKs (initiated_at + 7 дней)
) STRICT;
-- Tracking ACKs от клиентов
CREATE TABLE IF NOT EXISTS rotation_client_acks (
rotation_id INTEGER NOT NULL, -- Ссылка на new_cert_id
client_key TEXT NOT NULL, -- Публичный ключ клиента (hex)
ack_received_at INTEGER NOT NULL DEFAULT(unixepoch('now')),
PRIMARY KEY (rotation_id, client_key)
) STRICT;
-- Audit trail событий ротации
CREATE TABLE IF NOT EXISTS tls_rotation_history (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
cert_id INTEGER NOT NULL REFERENCES tls_certificates(id),
event_type TEXT NOT NULL CHECK(event_type IN ('created', 'rotation_initiated', 'acks_complete', 'activated', 'timeout')),
timestamp INTEGER NOT NULL DEFAULT(unixepoch('now')),
details TEXT -- JSON с доп. информацией
) STRICT;
-- Миграция существующего сертификата
INSERT INTO tls_certificates (id, cert, cert_key, not_before, not_after, is_active, created_at)
SELECT
1,
cert,
cert_key,
unixepoch('now') as not_before,
unixepoch('now') + (90 * 24 * 60 * 60) as not_after, -- 90 дней
1 as is_active,
unixepoch('now')
FROM arbiter_settings WHERE id = 1;
-- Инициализация rotation_state
INSERT INTO tls_rotation_state (id, state) VALUES (1, 'normal');
-- Добавить ссылку на текущий сертификат
ALTER TABLE arbiter_settings ADD COLUMN current_cert_id INTEGER REFERENCES tls_certificates(id);
UPDATE arbiter_settings SET current_cert_id = 1 WHERE id = 1;

View File

@@ -1,40 +1,22 @@
create table if not exists root_key_history (
id INTEGER not null PRIMARY KEY,
-- root key stored as aead encrypted artifact, with only difference that it's decrypted by unseal key (derived from user password)
root_key_encryption_nonce blob not null default(1), -- if re-encrypted, this should be incremented. Used for encrypting root key
data_encryption_nonce blob not null default(1), -- nonce used for encrypting with key itself
ciphertext blob not null,
tag blob not null,
schema_version integer not null default(1), -- server would need to reencrypt, because this means that we have changed algorithm
salt blob not null -- for key deriviation
) STRICT;
create table if not exists aead_encrypted ( create table if not exists aead_encrypted (
id INTEGER not null PRIMARY KEY, id INTEGER not null PRIMARY KEY,
current_nonce blob not null default(1), -- if re-encrypted, this should be incremented current_nonce integer not null default(1), -- if re-encrypted, this should be incremented
ciphertext blob not null, ciphertext blob not null,
tag blob not null, tag blob not null,
schema_version integer not null default(1), -- server would need to reencrypt, because this means that we have changed algorithm schema_version integer not null default(1) -- server would need to reencrypt, because this means that we have changed algorithm
associated_root_key_id integer not null references root_key_history (id) on delete RESTRICT,
created_at integer not null default(unixepoch ('now'))
) STRICT; ) STRICT;
create unique index if not exists uniq_nonce_per_root_key on aead_encrypted (
current_nonce,
associated_root_key_id
);
-- This is a singleton -- This is a singleton
create table if not exists arbiter_settings ( create table if not exists arbiter_settings (
id INTEGER not null PRIMARY KEY CHECK (id = 1), -- singleton row, id must be 1 id INTEGER not null PRIMARY KEY CHECK (id = 1), -- singleton row, id must be 1
root_key_id integer references root_key_history (id) on delete RESTRICT, -- if null, means wasn't bootstrapped yet root_key_id integer references aead_encrypted (id) on delete RESTRICT, -- if null, means wasn't bootstrapped yet
cert_key blob not null, cert_key blob not null,
cert blob not null cert blob not null
) STRICT; ) STRICT;
create table if not exists useragent_client ( create table if not exists useragent_client (
id integer not null primary key, id integer not null primary key,
nonce integer not null default(1), -- used for auth challenge nonce integer not null default (1), -- used for auth challenge
public_key blob not null, public_key blob not null,
created_at integer not null default(unixepoch ('now')), created_at integer not null default(unixepoch ('now')),
updated_at integer not null default(unixepoch ('now')) updated_at integer not null default(unixepoch ('now'))
@@ -42,7 +24,7 @@ create table if not exists useragent_client (
create table if not exists program_client ( create table if not exists program_client (
id integer not null primary key, id integer not null primary key,
nonce integer not null default(1), -- used for auth challenge nonce integer not null default (1), -- used for auth challenge
public_key blob not null, public_key blob not null,
created_at integer not null default(unixepoch ('now')), created_at integer not null default(unixepoch ('now')),
updated_at integer not null default(unixepoch ('now')) updated_at integer not null default(unixepoch ('now'))

View File

@@ -0,0 +1,2 @@
-- Remove argon2_salt column
ALTER TABLE aead_encrypted DROP COLUMN argon2_salt;

View File

@@ -0,0 +1,2 @@
-- Add argon2_salt column to store password derivation salt
ALTER TABLE aead_encrypted ADD COLUMN argon2_salt TEXT;

Binary file not shown.

View File

@@ -1,4 +1,2 @@
pub mod user_agent; pub mod user_agent;
pub mod client; pub mod client;
pub(crate) mod bootstrap;
pub(crate) mod keyholder;

View File

@@ -1,939 +0,0 @@
use diesel::{
ExpressionMethods as _, OptionalExtension, QueryDsl, SelectableHelper,
dsl::{insert_into, update},
};
use diesel_async::{AsyncConnection, RunQueryDsl};
use kameo::{Actor, Reply, messages};
use memsafe::MemSafe;
use strum::{EnumDiscriminants, IntoDiscriminant};
use tracing::{error, info};
use crate::{
actors::keyholder::v1::{KeyCell, Nonce},
db::{
self,
models::{self, RootKeyHistory},
schema::{self},
},
};
pub mod v1;
#[derive(Default, EnumDiscriminants)]
#[strum_discriminants(derive(Reply), vis(pub))]
enum State {
#[default]
Unbootstrapped,
Sealed {
root_key_history_id: i32,
},
Unsealed {
root_key_history_id: i32,
root_key: KeyCell,
},
}
#[derive(Debug, thiserror::Error, miette::Diagnostic)]
pub enum Error {
#[error("Keyholder is already bootstrapped")]
#[diagnostic(code(arbiter::keyholder::already_bootstrapped))]
AlreadyBootstrapped,
#[error("Keyholder is not bootstrapped")]
#[diagnostic(code(arbiter::keyholder::not_bootstrapped))]
NotBootstrapped,
#[error("Invalid key provided")]
#[diagnostic(code(arbiter::keyholder::invalid_key))]
InvalidKey,
#[error("Requested aead entry not found")]
#[diagnostic(code(arbiter::keyholder::aead_not_found))]
NotFound,
#[error("Encryption error: {0}")]
#[diagnostic(code(arbiter::keyholder::encryption_error))]
Encryption(#[from] chacha20poly1305::aead::Error),
#[error("Database error: {0}")]
#[diagnostic(code(arbiter::keyholder::database_error))]
DatabaseConnection(#[from] db::PoolError),
#[error("Database transaction error: {0}")]
#[diagnostic(code(arbiter::keyholder::database_transaction_error))]
DatabaseTransaction(#[from] diesel::result::Error),
#[error("Broken database")]
#[diagnostic(code(arbiter::keyholder::broken_database))]
BrokenDatabase,
}
/// Manages vault root key and tracks current state of the vault (bootstrapped/unbootstrapped, sealed/unsealed).
/// Provides API for encrypting and decrypting data using the vault root key.
/// Abstraction over database to make sure nonces are never reused and encryption keys are never exposed in plaintext outside of this actor.
#[derive(Actor)]
pub struct KeyHolder {
db: db::DatabasePool,
state: State,
}
#[messages]
impl KeyHolder {
pub async fn new(db: db::DatabasePool) -> Result<Self, Error> {
let state = {
let mut conn = db.get().await?;
let (root_key_history,) = schema::arbiter_settings::table
.left_join(schema::root_key_history::table)
.select((Option::<RootKeyHistory>::as_select(),))
.get_result::<(Option<RootKeyHistory>,)>(&mut conn)
.await?;
match root_key_history {
Some(root_key_history) => State::Sealed {
root_key_history_id: root_key_history.id,
},
None => State::Unbootstrapped,
}
};
Ok(Self { db, state })
}
// Exclusive transaction to avoid race condtions if multiple keyholders write
// additional layer of protection against nonce-reuse
async fn get_new_nonce(pool: &db::DatabasePool, root_key_id: i32) -> Result<Nonce, Error> {
let mut conn = pool.get().await?;
let nonce = conn
.exclusive_transaction(|conn| {
Box::pin(async move {
let current_nonce: Vec<u8> = schema::root_key_history::table
.filter(schema::root_key_history::id.eq(root_key_id))
.select(schema::root_key_history::data_encryption_nonce)
.first(conn)
.await?;
let mut nonce =
v1::Nonce::try_from(current_nonce.as_slice()).map_err(|_| {
error!(
"Broken database: invalid nonce for root key history id={}",
root_key_id
);
Error::BrokenDatabase
})?;
nonce.increment();
update(schema::root_key_history::table)
.filter(schema::root_key_history::id.eq(root_key_id))
.set(schema::root_key_history::data_encryption_nonce.eq(nonce.to_vec()))
.execute(conn)
.await?;
Result::<_, Error>::Ok(nonce)
})
})
.await?;
Ok(nonce)
}
#[message]
pub async fn bootstrap(&mut self, seal_key_raw: MemSafe<Vec<u8>>) -> Result<(), Error> {
if !matches!(self.state, State::Unbootstrapped) {
return Err(Error::AlreadyBootstrapped);
}
let salt = v1::generate_salt();
let mut seal_key = v1::derive_seal_key(seal_key_raw, &salt);
let mut root_key = KeyCell::new_secure_random();
// Zero nonces are fine because they are one-time
let root_key_nonce = v1::Nonce::default();
let data_encryption_nonce = v1::Nonce::default();
let root_key_ciphertext: Vec<u8> = {
let root_key_reader = root_key.0.read().unwrap();
let root_key_reader = root_key_reader.as_slice();
seal_key
.encrypt(&root_key_nonce, v1::ROOT_KEY_TAG, root_key_reader)
.map_err(|err| {
error!(?err, "Fatal bootstrap error");
Error::Encryption(err)
})?
};
let mut conn = self.db.get().await?;
let data_encryption_nonce_bytes = data_encryption_nonce.to_vec();
let root_key_history_id = conn
.transaction(|conn| {
Box::pin(async move {
let root_key_history_id: i32 = insert_into(schema::root_key_history::table)
.values(&models::NewRootKeyHistory {
ciphertext: root_key_ciphertext,
tag: v1::ROOT_KEY_TAG.to_vec(),
root_key_encryption_nonce: root_key_nonce.to_vec(),
data_encryption_nonce: data_encryption_nonce_bytes,
schema_version: 1,
salt: salt.to_vec(),
})
.returning(schema::root_key_history::id)
.get_result(conn)
.await?;
update(schema::arbiter_settings::table)
.set(schema::arbiter_settings::root_key_id.eq(root_key_history_id))
.execute(conn)
.await?;
Result::<_, diesel::result::Error>::Ok(root_key_history_id)
})
})
.await?;
self.state = State::Unsealed {
root_key,
root_key_history_id,
};
info!("Keyholder bootstrapped successfully");
Ok(())
}
#[message]
pub async fn try_unseal(&mut self, seal_key_raw: MemSafe<Vec<u8>>) -> Result<(), Error> {
let State::Sealed {
root_key_history_id,
} = &self.state
else {
return Err(Error::NotBootstrapped);
};
// We don't want to hold connection while doing expensive KDF work
let current_key = {
let mut conn = self.db.get().await?;
schema::root_key_history::table
.filter(schema::root_key_history::id.eq(*root_key_history_id))
.select(schema::root_key_history::data_encryption_nonce )
.select(RootKeyHistory::as_select() )
.first(&mut conn)
.await?
};
let salt = &current_key.salt;
let salt = v1::Salt::try_from(salt.as_slice()).map_err(|_| {
error!("Broken database: invalid salt for root key");
Error::BrokenDatabase
})?;
let mut seal_key = v1::derive_seal_key(seal_key_raw, &salt);
let mut root_key = MemSafe::new(current_key.ciphertext.clone()).unwrap();
let nonce = v1::Nonce::try_from(current_key.root_key_encryption_nonce.as_slice()).map_err(
|_| {
error!("Broken database: invalid nonce for root key");
Error::BrokenDatabase
},
)?;
seal_key
.decrypt_in_place(&nonce, v1::ROOT_KEY_TAG, &mut root_key)
.map_err(|err| {
error!(?err, "Failed to unseal root key: invalid seal key");
Error::InvalidKey
})?;
self.state = State::Unsealed {
root_key_history_id: current_key.id,
root_key: v1::KeyCell::try_from(root_key).map_err(|err| {
error!(?err, "Broken database: invalid encryption key size");
Error::BrokenDatabase
})?,
};
info!("Keyholder unsealed successfully");
Ok(())
}
// Decrypts the `aead_encrypted` entry with the given ID and returns the plaintext
#[message]
pub async fn decrypt(&mut self, aead_id: i32) -> Result<MemSafe<Vec<u8>>, Error> {
let State::Unsealed { root_key, .. } = &mut self.state else {
return Err(Error::NotBootstrapped);
};
let row: models::AeadEncrypted = {
let mut conn = self.db.get().await?;
schema::aead_encrypted::table
.select(models::AeadEncrypted::as_select())
.filter(schema::aead_encrypted::id.eq(aead_id))
.first(&mut conn)
.await
.optional()?
.ok_or(Error::NotFound)?
};
let nonce = v1::Nonce::try_from(row.current_nonce.as_slice()).map_err(|_| {
error!(
"Broken database: invalid nonce for aead_encrypted id={}",
aead_id
);
Error::BrokenDatabase
})?;
let mut output = MemSafe::new(row.ciphertext).unwrap();
root_key.decrypt_in_place(&nonce, v1::TAG, &mut output)?;
Ok(output)
}
// Creates new `aead_encrypted` entry in the database and returns it's ID
#[message]
pub async fn create_new(&mut self, mut plaintext: MemSafe<Vec<u8>>) -> Result<i32, Error> {
let State::Unsealed {
root_key,
root_key_history_id,
} = &mut self.state
else {
return Err(Error::NotBootstrapped);
};
// Order matters here - `get_new_nonce` acquires connection, so we need to call it before next acquire
// Borrow checker note: &mut borrow a few lines above is disjoint from this field
let nonce = Self::get_new_nonce(&self.db, *root_key_history_id).await?;
let mut ciphertext_buffer = plaintext.write().unwrap();
let ciphertext_buffer: &mut Vec<u8> = ciphertext_buffer.as_mut();
root_key.encrypt_in_place(&nonce, v1::TAG, &mut *ciphertext_buffer)?;
let ciphertext = std::mem::take(ciphertext_buffer);
let mut conn = self.db.get().await?;
let aead_id: i32 = insert_into(schema::aead_encrypted::table)
.values(&models::NewAeadEncrypted {
ciphertext,
tag: v1::TAG.to_vec(),
current_nonce: nonce.to_vec(),
schema_version: 1,
associated_root_key_id: *root_key_history_id,
created_at: chrono::Utc::now().timestamp() as i32,
})
.returning(schema::aead_encrypted::id)
.get_result(&mut conn)
.await?;
Ok(aead_id)
}
#[message]
pub fn get_state(&self) -> StateDiscriminants {
self.state.discriminant()
}
}
#[cfg(test)]
mod tests {
use std::collections::{HashMap, HashSet};
use std::sync::Arc;
use diesel::dsl::{insert_into, sql_query, update};
use diesel_async::RunQueryDsl;
use futures::stream::TryUnfold;
use kameo::actor::{ActorRef, Spawn as _};
use memsafe::MemSafe;
use tokio::sync::Mutex;
use tokio::task::JoinSet;
use crate::db::{self, models::ArbiterSetting};
use super::*;
async fn seed_settings(pool: &db::DatabasePool) {
let mut conn = pool.get().await.unwrap();
insert_into(schema::arbiter_settings::table)
.values(&ArbiterSetting {
id: 1,
root_key_id: None,
cert_key: vec![],
cert: vec![],
})
.execute(&mut conn)
.await
.unwrap();
}
async fn bootstrapped_actor(db: &db::DatabasePool) -> KeyHolder {
seed_settings(db).await;
let mut actor = KeyHolder::new(db.clone()).await.unwrap();
let seal_key = MemSafe::new(b"test-seal-key".to_vec()).unwrap();
actor.bootstrap(seal_key).await.unwrap();
actor
}
async fn write_concurrently(
actor: ActorRef<KeyHolder>,
prefix: &'static str,
count: usize,
) -> Vec<(i32, Vec<u8>)> {
let mut set = JoinSet::new();
for i in 0..count {
let actor = actor.clone();
set.spawn(async move {
let plaintext = format!("{prefix}-{i}").into_bytes();
let id = {
actor
.ask(CreateNew {
plaintext: MemSafe::new(plaintext.clone()).unwrap(),
})
.await
.unwrap()
};
(id, plaintext)
});
}
let mut out = Vec::with_capacity(count);
while let Some(res) = set.join_next().await {
out.push(res.unwrap());
}
out
}
#[tokio::test]
#[test_log::test]
async fn test_bootstrap() {
let db = db::create_test_pool().await;
seed_settings(&db).await;
let mut actor = KeyHolder::new(db.clone()).await.unwrap();
assert!(matches!(actor.state, State::Unbootstrapped));
let seal_key = MemSafe::new(b"test-seal-key".to_vec()).unwrap();
actor.bootstrap(seal_key).await.unwrap();
assert!(matches!(actor.state, State::Unsealed { .. }));
let mut conn = db.get().await.unwrap();
let row: models::RootKeyHistory = schema::root_key_history::table
.select(models::RootKeyHistory::as_select())
.first(&mut conn)
.await
.unwrap();
assert_eq!(row.schema_version, 1);
assert_eq!(row.tag, v1::ROOT_KEY_TAG);
assert!(!row.ciphertext.is_empty());
assert!(!row.salt.is_empty());
assert_eq!(row.data_encryption_nonce, v1::Nonce::default().to_vec());
}
#[tokio::test]
#[test_log::test]
async fn test_bootstrap_rejects_double() {
let db = db::create_test_pool().await;
let mut actor = bootstrapped_actor(&db).await;
let seal_key2 = MemSafe::new(b"test-seal-key".to_vec()).unwrap();
let err = actor.bootstrap(seal_key2).await.unwrap_err();
assert!(matches!(err, Error::AlreadyBootstrapped));
}
#[tokio::test]
#[test_log::test]
async fn test_create_decrypt_roundtrip() {
let db = db::create_test_pool().await;
let mut actor = bootstrapped_actor(&db).await;
let plaintext = b"hello arbiter";
let aead_id = actor
.create_new(MemSafe::new(plaintext.to_vec()).unwrap())
.await
.unwrap();
let mut decrypted = actor.decrypt(aead_id).await.unwrap();
let decrypted = decrypted.read().unwrap();
assert_eq!(*decrypted, plaintext);
}
#[tokio::test]
#[test_log::test]
async fn test_create_new_before_bootstrap_fails() {
let db = db::create_test_pool().await;
seed_settings(&db).await;
let mut actor = KeyHolder::new(db).await.unwrap();
let err = actor
.create_new(MemSafe::new(b"data".to_vec()).unwrap())
.await
.unwrap_err();
assert!(matches!(err, Error::NotBootstrapped));
}
#[tokio::test]
#[test_log::test]
async fn test_decrypt_before_bootstrap_fails() {
let db = db::create_test_pool().await;
seed_settings(&db).await;
let mut actor = KeyHolder::new(db).await.unwrap();
let err = actor.decrypt(1).await.unwrap_err();
assert!(matches!(err, Error::NotBootstrapped));
}
#[tokio::test]
#[test_log::test]
async fn test_decrypt_nonexistent_returns_not_found() {
let db = db::create_test_pool().await;
let mut actor = bootstrapped_actor(&db).await;
let err = actor.decrypt(9999).await.unwrap_err();
assert!(matches!(err, Error::NotFound));
}
#[tokio::test]
#[test_log::test]
async fn test_new_restores_sealed_state() {
let db = db::create_test_pool().await;
let actor = bootstrapped_actor(&db).await;
drop(actor);
let actor2 = KeyHolder::new(db).await.unwrap();
assert!(matches!(actor2.state, State::Sealed { .. }));
}
#[tokio::test]
#[test_log::test]
async fn test_nonce_never_reused() {
let db = db::create_test_pool().await;
let mut actor = bootstrapped_actor(&db).await;
let n = 5;
let mut ids = Vec::with_capacity(n);
for i in 0..n {
let id = actor
.create_new(MemSafe::new(format!("secret {i}").into_bytes()).unwrap())
.await
.unwrap();
ids.push(id);
}
// read all stored nonces from DB
let mut conn = db.get().await.unwrap();
let rows: Vec<models::AeadEncrypted> = schema::aead_encrypted::table
.select(models::AeadEncrypted::as_select())
.load(&mut conn)
.await
.unwrap();
assert_eq!(rows.len(), n);
let nonces: Vec<&Vec<u8>> = rows.iter().map(|r| &r.current_nonce).collect();
let unique: HashSet<&Vec<u8>> = nonces.iter().copied().collect();
assert_eq!(nonces.len(), unique.len(), "all nonces must be unique");
// verify nonces are sequential increments from 1
for (i, row) in rows.iter().enumerate() {
let mut expected = v1::Nonce::default();
for _ in 0..=i {
expected.increment();
}
assert_eq!(row.current_nonce, expected.to_vec(), "nonce {i} mismatch");
}
// verify data_encryption_nonce on root_key_history tracks the latest nonce
let root_row: models::RootKeyHistory = schema::root_key_history::table
.select(models::RootKeyHistory::as_select())
.first(&mut conn)
.await
.unwrap();
let last_nonce = &rows.last().unwrap().current_nonce;
assert_eq!(
&root_row.data_encryption_nonce, last_nonce,
"root_key_history must track the latest nonce"
);
}
#[tokio::test]
#[test_log::test]
async fn test_unseal_correct_password() {
let db = db::create_test_pool().await;
let mut actor = bootstrapped_actor(&db).await;
let plaintext = b"survive a restart";
let aead_id = actor
.create_new(MemSafe::new(plaintext.to_vec()).unwrap())
.await
.unwrap();
drop(actor);
let mut actor = KeyHolder::new(db.clone()).await.unwrap();
assert!(matches!(actor.state, State::Sealed { .. }));
let seal_key = MemSafe::new(b"test-seal-key".to_vec()).unwrap();
actor.try_unseal(seal_key).await.unwrap();
assert!(matches!(actor.state, State::Unsealed { .. }));
// previously encrypted data is still decryptable
let mut decrypted = actor.decrypt(aead_id).await.unwrap();
assert_eq!(*decrypted.read().unwrap(), plaintext);
}
#[tokio::test]
#[test_log::test]
async fn test_unseal_wrong_then_correct_password() {
let db = db::create_test_pool().await;
let mut actor = bootstrapped_actor(&db).await;
let plaintext = b"important data";
let aead_id = actor
.create_new(MemSafe::new(plaintext.to_vec()).unwrap())
.await
.unwrap();
drop(actor);
let mut actor = KeyHolder::new(db.clone()).await.unwrap();
assert!(matches!(actor.state, State::Sealed { .. }));
// wrong password
let bad_key = MemSafe::new(b"wrong-password".to_vec()).unwrap();
let err = actor.try_unseal(bad_key).await.unwrap_err();
assert!(matches!(err, Error::InvalidKey));
assert!(
matches!(actor.state, State::Sealed { .. }),
"state must remain Sealed after failed attempt"
);
// correct password
let good_key = MemSafe::new(b"test-seal-key".to_vec()).unwrap();
actor.try_unseal(good_key).await.unwrap();
assert!(matches!(actor.state, State::Unsealed { .. }));
let mut decrypted = actor.decrypt(aead_id).await.unwrap();
assert_eq!(*decrypted.read().unwrap(), plaintext);
}
#[tokio::test]
#[test_log::test]
async fn test_ciphertext_differs_across_entries() {
let db = db::create_test_pool().await;
let mut actor = bootstrapped_actor(&db).await;
let plaintext = b"same content";
let id1 = actor
.create_new(MemSafe::new(plaintext.to_vec()).unwrap())
.await
.unwrap();
let id2 = actor
.create_new(MemSafe::new(plaintext.to_vec()).unwrap())
.await
.unwrap();
// different nonces => different ciphertext, even for identical plaintext
let mut conn = db.get().await.unwrap();
let row1: models::AeadEncrypted = schema::aead_encrypted::table
.filter(schema::aead_encrypted::id.eq(id1))
.select(models::AeadEncrypted::as_select())
.first(&mut conn)
.await
.unwrap();
let row2: models::AeadEncrypted = schema::aead_encrypted::table
.filter(schema::aead_encrypted::id.eq(id2))
.select(models::AeadEncrypted::as_select())
.first(&mut conn)
.await
.unwrap();
assert_ne!(row1.ciphertext, row2.ciphertext);
// but both decrypt to the same plaintext
let mut d1 = actor.decrypt(id1).await.unwrap();
let mut d2 = actor.decrypt(id2).await.unwrap();
assert_eq!(*d1.read().unwrap(), plaintext);
assert_eq!(*d2.read().unwrap(), plaintext);
}
#[tokio::test]
#[test_log::test]
async fn concurrent_create_new_no_duplicate_nonces_() {
let db = db::create_test_pool().await;
let actor = KeyHolder::spawn(bootstrapped_actor(&db).await);
let writes = write_concurrently(actor, "nonce-unique", 32).await;
assert_eq!(writes.len(), 32);
let mut conn = db.get().await.unwrap();
let rows: Vec<models::AeadEncrypted> = schema::aead_encrypted::table
.select(models::AeadEncrypted::as_select())
.load(&mut conn)
.await
.unwrap();
assert_eq!(rows.len(), 32);
let nonces: Vec<&Vec<u8>> = rows.iter().map(|r| &r.current_nonce).collect();
let unique: HashSet<&Vec<u8>> = nonces.iter().copied().collect();
assert_eq!(nonces.len(), unique.len(), "all nonces must be unique");
}
#[tokio::test]
#[test_log::test]
async fn concurrent_create_new_root_nonce_never_moves_backward() {
let db = db::create_test_pool().await;
let actor = KeyHolder::spawn(bootstrapped_actor(&db).await);
write_concurrently(actor, "root-max", 24).await;
let mut conn = db.get().await.unwrap();
let rows: Vec<models::AeadEncrypted> = schema::aead_encrypted::table
.select(models::AeadEncrypted::as_select())
.load(&mut conn)
.await
.unwrap();
let max_nonce = rows
.iter()
.map(|r| r.current_nonce.clone())
.max()
.expect("at least one row");
let root_row: models::RootKeyHistory = schema::root_key_history::table
.select(models::RootKeyHistory::as_select())
.first(&mut conn)
.await
.unwrap();
assert_eq!(root_row.data_encryption_nonce, max_nonce);
}
#[tokio::test]
#[test_log::test]
async fn nonce_monotonic_even_when_nonce_allocation_interleaves() {
let db = db::create_test_pool().await;
let mut actor = bootstrapped_actor(&db).await;
let root_key_history_id = match actor.state {
State::Unsealed {
root_key_history_id,
..
} => root_key_history_id,
_ => panic!("expected unsealed state"),
};
let n1 = KeyHolder::get_new_nonce(&db, root_key_history_id)
.await
.unwrap();
let n2 = KeyHolder::get_new_nonce(&db, root_key_history_id)
.await
.unwrap();
assert!(n2.to_vec() > n1.to_vec(), "nonce must increase");
let mut conn = db.get().await.unwrap();
let root_row: models::RootKeyHistory = schema::root_key_history::table
.select(models::RootKeyHistory::as_select())
.first(&mut conn)
.await
.unwrap();
assert_eq!(root_row.data_encryption_nonce, n2.to_vec());
let id = actor
.create_new(MemSafe::new(b"post-interleave".to_vec()).unwrap())
.await
.unwrap();
let row: models::AeadEncrypted = schema::aead_encrypted::table
.filter(schema::aead_encrypted::id.eq(id))
.select(models::AeadEncrypted::as_select())
.first(&mut conn)
.await
.unwrap();
assert!(
row.current_nonce > n2.to_vec(),
"next write must advance nonce"
);
}
#[tokio::test]
#[test_log::test]
async fn insert_failure_does_not_create_partial_row() {
let db = db::create_test_pool().await;
let mut actor = bootstrapped_actor(&db).await;
let root_key_history_id = match actor.state {
State::Unsealed {
root_key_history_id,
..
} => root_key_history_id,
_ => panic!("expected unsealed state"),
};
let mut conn = db.get().await.unwrap();
let before_count: i64 = schema::aead_encrypted::table
.count()
.get_result(&mut conn)
.await
.unwrap();
let before_root_nonce: Vec<u8> = schema::root_key_history::table
.filter(schema::root_key_history::id.eq(root_key_history_id))
.select(schema::root_key_history::data_encryption_nonce)
.first(&mut conn)
.await
.unwrap();
sql_query(
"CREATE TRIGGER fail_aead_insert BEFORE INSERT ON aead_encrypted BEGIN SELECT RAISE(ABORT, 'forced test failure'); END;",
)
.execute(&mut conn)
.await
.unwrap();
drop(conn);
let err = actor
.create_new(MemSafe::new(b"should fail".to_vec()).unwrap())
.await
.unwrap_err();
assert!(matches!(err, Error::DatabaseTransaction(_)));
let mut conn = db.get().await.unwrap();
sql_query("DROP TRIGGER fail_aead_insert;")
.execute(&mut conn)
.await
.unwrap();
let after_count: i64 = schema::aead_encrypted::table
.count()
.get_result(&mut conn)
.await
.unwrap();
assert_eq!(
before_count, after_count,
"failed insert must not create row"
);
let after_root_nonce: Vec<u8> = schema::root_key_history::table
.filter(schema::root_key_history::id.eq(root_key_history_id))
.select(schema::root_key_history::data_encryption_nonce)
.first(&mut conn)
.await
.unwrap();
assert!(
after_root_nonce > before_root_nonce,
"current behavior allows nonce gap on failed insert"
);
}
#[tokio::test]
#[test_log::test]
async fn decrypt_roundtrip_after_high_concurrency() {
let db = db::create_test_pool().await;
let actor = KeyHolder::spawn(bootstrapped_actor(&db).await);
let writes = write_concurrently(actor, "roundtrip", 40).await;
let expected: HashMap<i32, Vec<u8>> = writes.into_iter().collect();
let mut decryptor = KeyHolder::new(db.clone()).await.unwrap();
decryptor
.try_unseal(MemSafe::new(b"test-seal-key".to_vec()).unwrap())
.await
.unwrap();
for (id, plaintext) in expected {
let mut decrypted = decryptor.decrypt(id).await.unwrap();
assert_eq!(*decrypted.read().unwrap(), plaintext);
}
}
// #[tokio::test]
// #[test_log::test]
// async fn swapping_ciphertext_and_nonce_between_rows_changes_logical_binding() {
// let db = db::create_test_pool().await;
// let mut actor = bootstrapped_actor(&db).await;
// let plaintext1 = b"entry-one";
// let plaintext2 = b"entry-two";
// let id1 = actor
// .create_new(MemSafe::new(plaintext1.to_vec()).unwrap())
// .await
// .unwrap();
// let id2 = actor
// .create_new(MemSafe::new(plaintext2.to_vec()).unwrap())
// .await
// .unwrap();
// let mut conn = db.get().await.unwrap();
// let row1: models::AeadEncrypted = schema::aead_encrypted::table
// .filter(schema::aead_encrypted::id.eq(id1))
// .select(models::AeadEncrypted::as_select())
// .first(&mut conn)
// .await
// .unwrap();
// let row2: models::AeadEncrypted = schema::aead_encrypted::table
// .filter(schema::aead_encrypted::id.eq(id2))
// .select(models::AeadEncrypted::as_select())
// .first(&mut conn)
// .await
// .unwrap();
// update(schema::aead_encrypted::table.filter(schema::aead_encrypted::id.eq(id1)))
// .set((
// schema::aead_encrypted::ciphertext.eq(row2.ciphertext.clone()),
// schema::aead_encrypted::current_nonce.eq(row2.current_nonce.clone()),
// ))
// .execute(&mut conn)
// .await
// .unwrap();
// update(schema::aead_encrypted::table.filter(schema::aead_encrypted::id.eq(id2)))
// .set((
// schema::aead_encrypted::ciphertext.eq(row1.ciphertext.clone()),
// schema::aead_encrypted::current_nonce.eq(row1.current_nonce.clone()),
// ))
// .execute(&mut conn)
// .await
// .unwrap();
// let mut d1 = actor.decrypt(id1).await.unwrap();
// let mut d2 = actor.decrypt(id2).await.unwrap();
// assert_eq!(*d1.read().unwrap(), plaintext2);
// assert_eq!(*d2.read().unwrap(), plaintext1);
// }
#[tokio::test]
#[test_log::test]
async fn broken_db_nonce_format_fails_closed() {
// malformed root_key_history nonce must fail create_new
let db = db::create_test_pool().await;
let mut actor = bootstrapped_actor(&db).await;
let root_key_history_id = match actor.state {
State::Unsealed {
root_key_history_id,
..
} => root_key_history_id,
_ => panic!("expected unsealed state"),
};
let mut conn = db.get().await.unwrap();
update(
schema::root_key_history::table
.filter(schema::root_key_history::id.eq(root_key_history_id)),
)
.set(schema::root_key_history::data_encryption_nonce.eq(vec![1, 2, 3]))
.execute(&mut conn)
.await
.unwrap();
drop(conn);
let err = actor
.create_new(MemSafe::new(b"must fail".to_vec()).unwrap())
.await
.unwrap_err();
assert!(matches!(err, Error::BrokenDatabase));
// malformed per-row nonce must fail decrypt
let db = db::create_test_pool().await;
let mut actor = bootstrapped_actor(&db).await;
let id = actor
.create_new(MemSafe::new(b"decrypt target".to_vec()).unwrap())
.await
.unwrap();
let mut conn = db.get().await.unwrap();
update(schema::aead_encrypted::table.filter(schema::aead_encrypted::id.eq(id)))
.set(schema::aead_encrypted::current_nonce.eq(vec![7, 8]))
.execute(&mut conn)
.await
.unwrap();
drop(conn);
let err = actor.decrypt(id).await.unwrap_err();
assert!(matches!(err, Error::BrokenDatabase));
}
}

View File

@@ -1,242 +0,0 @@
use std::ops::Deref as _;
use argon2::{Algorithm, Argon2, password_hash::Salt as ArgonSalt};
use chacha20poly1305::{
AeadInPlace, Key, KeyInit as _, XChaCha20Poly1305, XNonce,
aead::{AeadMut, Error, Payload},
};
use memsafe::MemSafe;
use rand::{
Rng as _, SeedableRng,
rngs::{StdRng, SysRng},
};
pub const ROOT_KEY_TAG: &[u8] = "arbiter/seal/v1".as_bytes();
pub const TAG: &[u8] = "arbiter/private-key/v1".as_bytes();
pub const NONCE_LENGTH: usize = 24;
#[derive(Default)]
pub struct Nonce([u8; NONCE_LENGTH]);
impl Nonce {
pub fn increment(&mut self) {
for i in (0..self.0.len()).rev() {
if self.0[i] == 0xFF {
self.0[i] = 0;
} else {
self.0[i] += 1;
break;
}
}
}
pub fn to_vec(&self) -> Vec<u8> {
self.0.to_vec()
}
}
impl<'a> TryFrom<&'a [u8]> for Nonce {
type Error = ();
fn try_from(value: &'a [u8]) -> Result<Self, Self::Error> {
if value.len() != NONCE_LENGTH {
return Err(());
}
let mut nonce = [0u8; NONCE_LENGTH];
nonce.copy_from_slice(&value);
Ok(Self(nonce))
}
}
pub struct KeyCell(pub(super) MemSafe<Key>);
impl From<MemSafe<Key>> for KeyCell {
fn from(value: MemSafe<Key>) -> Self {
Self(value)
}
}
impl TryFrom<MemSafe<Vec<u8>>> for KeyCell {
type Error = ();
fn try_from(mut value: MemSafe<Vec<u8>>) -> Result<Self, Self::Error> {
let value = value.read().unwrap();
if value.len() != size_of::<Key>() {
return Err(());
}
let mut cell = MemSafe::new(Key::default()).unwrap();
{
let mut cell_write = cell.write().unwrap();
let cell_slice: &mut [u8] = cell_write.as_mut();
cell_slice.copy_from_slice(&value);
}
Ok(Self(cell))
}
}
impl KeyCell {
pub fn new_secure_random() -> Self {
let mut key = MemSafe::new(Key::default()).unwrap();
{
let mut key_buffer = key.write().unwrap();
let key_buffer: &mut [u8] = key_buffer.as_mut();
let mut rng = StdRng::try_from_rng(&mut SysRng).unwrap();
rng.fill_bytes(key_buffer);
}
key.into()
}
pub fn into_inner(self) -> MemSafe<Key> {
self.0
}
pub fn encrypt_in_place(
&mut self,
nonce: &Nonce,
associated_data: &[u8],
mut buffer: impl AsMut<Vec<u8>>,
) -> Result<(), Error> {
let key_reader = self.0.read().unwrap();
let key_ref = key_reader.deref();
let cipher = XChaCha20Poly1305::new(key_ref);
let nonce = XNonce::from_slice(nonce.0.as_ref());
let buffer = buffer.as_mut();
cipher.encrypt_in_place(nonce, associated_data, buffer)
}
pub fn decrypt_in_place(
&mut self,
nonce: &Nonce,
associated_data: &[u8],
buffer: &mut MemSafe<Vec<u8>>,
) -> Result<(), Error> {
let key_reader = self.0.read().unwrap();
let key_ref = key_reader.deref();
let cipher = XChaCha20Poly1305::new(key_ref);
let nonce = XNonce::from_slice(nonce.0.as_ref());
let mut buffer = buffer.write().unwrap();
let buffer: &mut Vec<u8> = buffer.as_mut();
cipher.decrypt_in_place(nonce, associated_data, buffer)
}
pub fn encrypt(
&mut self,
nonce: &Nonce,
associated_data: &[u8],
plaintext: impl AsRef<[u8]>,
) -> Result<Vec<u8>, Error> {
let key_reader = self.0.read().unwrap();
let key_ref = key_reader.deref();
let mut cipher = XChaCha20Poly1305::new(key_ref);
let nonce = XNonce::from_slice(nonce.0.as_ref());
let ciphertext = cipher.encrypt(
&nonce,
Payload {
msg: plaintext.as_ref(),
aad: associated_data,
},
)?;
Ok(ciphertext)
}
}
pub type Salt = [u8; ArgonSalt::RECOMMENDED_LENGTH];
pub(super) fn generate_salt() -> Salt {
let mut salt = Salt::default();
let mut rng = StdRng::try_from_rng(&mut SysRng).unwrap();
rng.fill_bytes(&mut salt);
salt
}
/// User password might be of different length, have not enough entropy, etc...
/// Derive a fixed-length key from the password using Argon2id, which is designed for password hashing and key derivation.
pub(super) fn derive_seal_key(mut password: MemSafe<Vec<u8>>, salt: &Salt) -> KeyCell {
let params = argon2::Params::new(262_144, 3, 4, None).unwrap();
let hasher = Argon2::new(Algorithm::Argon2id, argon2::Version::V0x13, params);
let mut key = MemSafe::new(Key::default()).unwrap();
{
let password_source = password.read().unwrap();
let mut key_buffer = key.write().unwrap();
let key_buffer: &mut [u8] = key_buffer.as_mut();
hasher
.hash_password_into(password_source.deref(), salt, key_buffer)
.unwrap();
}
key.into()
}
#[cfg(test)]
mod tests {
use super::*;
use memsafe::MemSafe;
#[test]
pub fn derive_seal_key_deterministic() {
static PASSWORD: &[u8] = b"password";
let password = MemSafe::new(PASSWORD.to_vec()).unwrap();
let password2 = MemSafe::new(PASSWORD.to_vec()).unwrap();
let salt = generate_salt();
let mut key1 = derive_seal_key(password, &salt);
let mut key2 = derive_seal_key(password2, &salt);
let key1_reader = key1.0.read().unwrap();
let key2_reader = key2.0.read().unwrap();
assert_eq!(key1_reader.deref(), key2_reader.deref());
}
#[test]
pub fn successful_derive() {
static PASSWORD: &[u8] = b"password";
let password = MemSafe::new(PASSWORD.to_vec()).unwrap();
let salt = generate_salt();
let mut key = derive_seal_key(password, &salt);
let key_reader = key.0.read().unwrap();
let key_ref = key_reader.deref();
assert_ne!(key_ref.as_slice(), &[0u8; 32][..]);
}
#[test]
pub fn encrypt_decrypt() {
static PASSWORD: &[u8] = b"password";
let password = MemSafe::new(PASSWORD.to_vec()).unwrap();
let salt = generate_salt();
let mut key = derive_seal_key(password, &salt);
let nonce = Nonce(*b"unique nonce 123 1231233"); // 24 bytes for XChaCha20Poly1305
let associated_data = b"associated data";
let mut buffer = b"secret data".to_vec();
key.encrypt_in_place(&nonce, associated_data, &mut buffer)
.unwrap();
assert_ne!(buffer, b"secret data");
let mut buffer = MemSafe::new(buffer).unwrap();
key.decrypt_in_place(&nonce, associated_data, &mut buffer)
.unwrap();
let buffer = buffer.read().unwrap();
assert_eq!(*buffer, b"secret data");
}
#[test]
// We should fuzz this
pub fn test_nonce_increment() {
let mut nonce = Nonce([0u8; NONCE_LENGTH]);
nonce.increment();
assert_eq!(
nonce.0,
[
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1
]
);
}
}

View File

@@ -1,58 +1,102 @@
use std::{ use std::sync::Arc;
ops::DerefMut,
sync::Mutex,
};
use arbiter_proto::proto::{ use arbiter_proto::{
UserAgentResponse, proto::{
auth::{ UserAgentRequest, UserAgentResponse,
self, AuthChallengeRequest, AuthOk, ServerMessage as AuthServerMessage, auth::{
server_message::Payload as ServerAuthPayload, self, AuthChallengeRequest, ClientMessage, ServerMessage as AuthServerMessage,
client_message::Payload as ClientAuthPayload,
server_message::Payload as ServerAuthPayload,
},
user_agent_request::Payload as UserAgentRequestPayload,
user_agent_response::Payload as UserAgentResponsePayload,
}, },
unseal::{UnsealEncryptedKey, UnsealResult, UnsealStart, UnsealStartResponse}, transport::Bi,
user_agent_response::Payload as UserAgentResponsePayload,
};
use chacha20poly1305::{
AeadInPlace, XChaCha20Poly1305, XNonce,
aead::KeyInit,
}; };
use diesel::{ExpressionMethods as _, OptionalExtension as _, QueryDsl, dsl::update}; use diesel::{ExpressionMethods as _, OptionalExtension as _, QueryDsl, dsl::update};
use diesel_async::{AsyncConnection, RunQueryDsl}; use diesel_async::{AsyncConnection, RunQueryDsl};
use ed25519_dalek::VerifyingKey; use ed25519_dalek::VerifyingKey;
use kameo::{Actor, actor::ActorRef, messages}; use futures::StreamExt;
use memsafe::MemSafe; use kameo::{
Actor,
actor::{ActorRef, Spawn},
error::SendError,
messages,
prelude::Context,
};
use tokio::sync::mpsc;
use tokio::sync::mpsc::Sender; use tokio::sync::mpsc::Sender;
use tonic::Status; use tonic::{Status, transport::Server};
use tracing::{error, info}; use tracing::{debug, error, info};
use x25519_dalek::{EphemeralSecret, PublicKey};
use crate::{ use crate::{
ServerContext, ServerContext,
actors::{ actors::user_agent::auth::AuthChallenge,
bootstrap::{Bootstrapper, ConsumeToken}, context::bootstrap::{BootstrapActor, ConsumeToken},
user_agent::state::{
AuthRequestContext, ChallengeContext, DummyContext, UnsealContext, UserAgentEvents,
UserAgentStateMachine, UserAgentStates,
},
},
db::{self, schema}, db::{self, schema},
errors::GrpcStatusExt, errors::GrpcStatusExt,
}; };
mod state; #[derive(Debug)]
#[cfg(test)] pub struct ChallengeContext {
mod tests; challenge: AuthChallenge,
key: VerifyingKey,
}
mod transport; // Request context with deserialized public key for state machine.
pub(crate) use transport::handle_user_agent; // This intermediate struct is needed because the state machine branches depending on presence of bootstrap token,
// but we want to have the deserialized key in both branches.
#[derive(Clone, Debug)]
pub struct AuthRequestContext {
pubkey: VerifyingKey,
bootstrap_token: Option<String>,
}
smlang::statemachine!(
name: UserAgent,
derive_states: [Debug],
custom_error: false,
transitions: {
*Init + AuthRequest(AuthRequestContext) / auth_request_context = ReceivedAuthRequest(AuthRequestContext),
ReceivedAuthRequest(AuthRequestContext) + ReceivedBootstrapToken = Authenticated,
ReceivedAuthRequest(AuthRequestContext) + SentChallenge(ChallengeContext) / move_challenge = WaitingForChallengeSolution(ChallengeContext),
WaitingForChallengeSolution(ChallengeContext) + ReceivedGoodSolution = Authenticated,
WaitingForChallengeSolution(ChallengeContext) + ReceivedBadSolution = AuthError, // block further transitions, but connection should close anyway
}
);
pub struct DummyContext;
impl UserAgentStateMachineContext for DummyContext {
#[allow(missing_docs)]
#[allow(clippy::unused_unit)]
fn move_challenge(
&mut self,
state_data: &AuthRequestContext,
event_data: ChallengeContext,
) -> Result<ChallengeContext, ()> {
Ok(event_data)
}
#[allow(missing_docs)]
#[allow(clippy::unused_unit)]
fn auth_request_context(
&mut self,
event_data: AuthRequestContext,
) -> Result<AuthRequestContext, ()> {
Ok(event_data)
}
}
#[derive(Actor)] #[derive(Actor)]
pub struct UserAgentActor { pub struct UserAgentActor {
db: db::DatabasePool, db: db::DatabasePool,
bootstapper: ActorRef<Bootstrapper>, bootstapper: ActorRef<BootstrapActor>,
state: UserAgentStateMachine<DummyContext>, state: UserAgentStateMachine<DummyContext>,
// will be used in future tx: Sender<Result<UserAgentResponse, Status>>,
_tx: Sender<Result<UserAgentResponse, Status>>, context: ServerContext,
ephemeral_key: Option<crate::context::unseal::EphemeralKeyPair>,
} }
impl UserAgentActor { impl UserAgentActor {
@@ -64,21 +108,25 @@ impl UserAgentActor {
db: context.db.clone(), db: context.db.clone(),
bootstapper: context.bootstrapper.clone(), bootstapper: context.bootstrapper.clone(),
state: UserAgentStateMachine::new(DummyContext), state: UserAgentStateMachine::new(DummyContext),
_tx: tx, tx,
context,
ephemeral_key: None,
} }
} }
#[cfg(test)]
pub(crate) fn new_manual( pub(crate) fn new_manual(
db: db::DatabasePool, db: db::DatabasePool,
bootstapper: ActorRef<Bootstrapper>, bootstapper: ActorRef<BootstrapActor>,
context: ServerContext,
tx: Sender<Result<UserAgentResponse, Status>>, tx: Sender<Result<UserAgentResponse, Status>>,
) -> Self { ) -> Self {
Self { Self {
db, db,
bootstapper, bootstapper,
state: UserAgentStateMachine::new(DummyContext), state: UserAgentStateMachine::new(DummyContext),
_tx: tx, tx,
context,
ephemeral_key: None,
} }
} }
@@ -124,7 +172,7 @@ impl UserAgentActor {
self.transition(UserAgentEvents::ReceivedBootstrapToken)?; self.transition(UserAgentEvents::ReceivedBootstrapToken)?;
Ok(auth_response(ServerAuthPayload::AuthOk(AuthOk {}))) Ok(auth_response(ServerAuthPayload::AuthOk(auth::AuthOk {})))
} }
async fn auth_with_challenge(&mut self, pubkey: VerifyingKey, pubkey_bytes: Vec<u8>) -> Output { async fn auth_with_challenge(&mut self, pubkey: VerifyingKey, pubkey_bytes: Vec<u8>) -> Output {
@@ -164,7 +212,7 @@ impl UserAgentActor {
let challenge = auth::AuthChallenge { let challenge = auth::AuthChallenge {
pubkey: pubkey_bytes, pubkey: pubkey_bytes,
nonce: nonce, nonce,
}; };
self.transition(UserAgentEvents::SentChallenge(ChallengeContext { self.transition(UserAgentEvents::SentChallenge(ChallengeContext {
@@ -218,93 +266,18 @@ fn auth_response(payload: ServerAuthPayload) -> UserAgentResponse {
} }
} }
fn unseal_response(payload: UserAgentResponsePayload) -> UserAgentResponse {
UserAgentResponse {
payload: Some(payload),
}
}
#[messages] #[messages]
impl UserAgentActor { impl UserAgentActor {
#[message] #[message(ctx)]
pub async fn handle_unseal_request(&mut self, req: UnsealStart) -> Output { pub async fn handle_auth_challenge_request(
let secret = EphemeralSecret::random(); &mut self,
let public_key = PublicKey::from(&secret); req: AuthChallengeRequest,
ctx: &mut Context<Self, Output>,
let client_pubkey_bytes: [u8; 32] = req ) -> Output {
.client_pubkey
.try_into()
.map_err(|_| Status::invalid_argument("client_pubkey must be 32 bytes"))?;
let client_public_key = PublicKey::from(client_pubkey_bytes);
self.transition(UserAgentEvents::UnsealRequest(UnsealContext {
server_public_key: public_key,
secret: Mutex::new(Some(secret)),
client_public_key,
}))?;
Ok(unseal_response(
UserAgentResponsePayload::UnsealStartResponse(UnsealStartResponse {
server_pubkey: public_key.as_bytes().to_vec(),
}),
))
}
#[message]
pub async fn handle_unseal_encrypted_key(&mut self, req: UnsealEncryptedKey) -> Output {
let UserAgentStates::WaitingForUnsealKey(unseal_context) = self.state.state() else {
error!("Received unseal encrypted key in invalid state");
return Err(Status::failed_precondition(
"Invalid state for unseal encrypted key",
));
};
let ephemeral_secret = {
let mut secret_lock = unseal_context.secret.lock().unwrap();
let secret = secret_lock.take();
match secret {
Some(secret) => secret,
None => {
drop(secret_lock);
error!("Ephemeral secret already taken");
self.transition(UserAgentEvents::ReceivedInvalidKey)?;
return Ok(unseal_response(UserAgentResponsePayload::UnsealResult(
UnsealResult::InvalidKey.into(),
)));
}
}
};
let nonce = XNonce::from_slice(&req.nonce);
let shared_secret = ephemeral_secret.diffie_hellman(&unseal_context.client_public_key);
let cipher = XChaCha20Poly1305::new(shared_secret.as_bytes().into());
let mut root_key_buffer = MemSafe::new(req.ciphertext.clone()).unwrap();
let mut write_handle = root_key_buffer.write().unwrap();
let write_handle = write_handle.deref_mut();
let decryption_result = cipher
.decrypt_in_place(nonce, &req.associated_data, write_handle);
match decryption_result {
Ok(_) => todo!("Send key to the keyguarding"),
Err(err) => {
error!(?err, "Failed to decrypt unseal key");
self.transition(UserAgentEvents::ReceivedInvalidKey)?;
return Ok(unseal_response(UserAgentResponsePayload::UnsealResult(
UnsealResult::InvalidKey.into(),
)));
},
}
}
#[message]
pub async fn handle_auth_challenge_request(&mut self, req: AuthChallengeRequest) -> Output {
let pubkey = req.pubkey.as_array().ok_or(Status::invalid_argument( let pubkey = req.pubkey.as_array().ok_or(Status::invalid_argument(
"Expected pubkey to have specific length", "Expected pubkey to have specific length",
))?; ))?;
let pubkey = VerifyingKey::from_bytes(pubkey).map_err(|_err| { let pubkey = VerifyingKey::from_bytes(pubkey).map_err(|err| {
error!(?pubkey, "Failed to convert to VerifyingKey"); error!(?pubkey, "Failed to convert to VerifyingKey");
Status::invalid_argument("Failed to convert pubkey to VerifyingKey") Status::invalid_argument("Failed to convert pubkey to VerifyingKey")
})?; })?;
@@ -320,10 +293,11 @@ impl UserAgentActor {
} }
} }
#[message] #[message(ctx)]
pub async fn handle_auth_challenge_solution( pub async fn handle_auth_challenge_solution(
&mut self, &mut self,
solution: auth::AuthChallengeSolution, solution: auth::AuthChallengeSolution,
ctx: &mut Context<Self, Output>,
) -> Output { ) -> Output {
let (valid, challenge_context) = self.verify_challenge_solution(&solution)?; let (valid, challenge_context) = self.verify_challenge_solution(&solution)?;
@@ -333,11 +307,192 @@ impl UserAgentActor {
"Client provided valid solution to authentication challenge" "Client provided valid solution to authentication challenge"
); );
self.transition(UserAgentEvents::ReceivedGoodSolution)?; self.transition(UserAgentEvents::ReceivedGoodSolution)?;
Ok(auth_response(ServerAuthPayload::AuthOk(AuthOk {}))) Ok(auth_response(ServerAuthPayload::AuthOk(auth::AuthOk {})))
} else { } else {
error!("Client provided invalid solution to authentication challenge"); error!("Client provided invalid solution to authentication challenge");
self.transition(UserAgentEvents::ReceivedBadSolution)?; self.transition(UserAgentEvents::ReceivedBadSolution)?;
Err(Status::unauthenticated("Invalid challenge solution")) Err(Status::unauthenticated("Invalid challenge solution"))
} }
} }
#[message(ctx)]
pub async fn handle_unseal_request(
&mut self,
request: arbiter_proto::proto::UnsealRequest,
ctx: &mut Context<Self, Output>,
) -> Output {
use arbiter_proto::proto::{
EphemeralKeyResponse, SealedPassword, UnsealResponse, UnsealResult,
unseal_request::Payload as ReqPayload,
unseal_response::Payload as RespPayload,
};
match request.payload {
Some(ReqPayload::EphemeralKeyRequest(_)) => {
// Generate new ephemeral keypair
let keypair = crate::context::unseal::EphemeralKeyPair::generate();
let expires_at = keypair.expires_at() as i64;
let public_bytes = keypair.public_bytes();
// Store for later use
self.ephemeral_key = Some(keypair);
info!("Generated ephemeral X25519 keypair for unseal, expires at {}", expires_at);
Ok(UserAgentResponse {
payload: Some(UserAgentResponsePayload::UnsealResponse(UnsealResponse {
payload: Some(RespPayload::EphemeralKeyResponse(EphemeralKeyResponse {
server_pubkey: public_bytes,
expires_at,
})),
})),
})
}
Some(ReqPayload::SealedPassword(sealed)) => {
// Get and consume ephemeral key
let keypair = self
.ephemeral_key
.take()
.ok_or_else(|| Status::failed_precondition("No ephemeral key generated"))?;
// Check expiration
if keypair.is_expired() {
error!("Ephemeral key expired before sealed password was received");
return Err(Status::deadline_exceeded("Ephemeral key expired"));
}
// Perform ECDH
let shared_secret = keypair
.perform_dh(&sealed.client_pubkey)
.map_err(|e| Status::invalid_argument(format!("Invalid client pubkey: {}", e)))?;
// Decrypt password
let nonce: [u8; 12] = sealed
.nonce
.as_slice()
.try_into()
.map_err(|_| Status::invalid_argument("Nonce must be 12 bytes"))?;
let password_bytes = crate::crypto::aead::decrypt(
&sealed.encrypted_password,
&shared_secret,
&nonce,
)
.map_err(|e| {
error!("Failed to decrypt password: {}", e);
Status::internal(format!("Decryption failed: {}", e))
})?;
let password = String::from_utf8(password_bytes).map_err(|_| {
error!("Password is not valid UTF-8");
Status::invalid_argument("Password must be UTF-8")
})?;
// Call unseal on context
info!("Attempting to unseal vault with decrypted password");
let result = self.context.unseal(&password).await;
match result {
Ok(()) => {
info!("Vault unsealed successfully");
Ok(UserAgentResponse {
payload: Some(UserAgentResponsePayload::UnsealResponse(
UnsealResponse {
payload: Some(RespPayload::UnsealResult(UnsealResult {
success: true,
error_message: None,
})),
},
)),
})
}
Err(e) => {
error!("Unseal failed: {}", e);
Ok(UserAgentResponse {
payload: Some(UserAgentResponsePayload::UnsealResponse(
UnsealResponse {
payload: Some(RespPayload::UnsealResult(UnsealResult {
success: false,
error_message: Some(e.to_string()),
})),
},
)),
})
}
}
}
None => {
error!("Received empty unseal request");
Err(Status::invalid_argument("Empty unseal request"))
}
}
}
} }
#[cfg(test)]
mod tests {
use arbiter_proto::proto::{
UserAgentResponse,
auth::{AuthChallengeRequest, AuthOk},
user_agent_response::Payload as UserAgentResponsePayload,
};
use kameo::actor::Spawn;
use crate::{
actors::user_agent::HandleAuthChallengeRequest, context::bootstrap::BootstrapActor, db,
};
use super::UserAgentActor;
#[tokio::test]
#[test_log::test]
pub async fn test_bootstrap_token_auth() {
let db = db::create_test_pool().await;
// explicitly not installing any user_agent pubkeys
let bootstrapper = BootstrapActor::new(&db).await.unwrap(); // this will create bootstrap token
let token = bootstrapper.get_token().unwrap();
let bootstrapper_ref = BootstrapActor::spawn(bootstrapper);
let context = crate::ServerContext::new(db.clone()).await.unwrap();
let user_agent = UserAgentActor::new_manual(
db.clone(),
bootstrapper_ref,
context,
tokio::sync::mpsc::channel(1).0, // dummy channel, we won't actually send responses in this test
);
let user_agent_ref = UserAgentActor::spawn(user_agent);
// simulate client sending auth request with bootstrap token
let new_key = ed25519_dalek::SigningKey::generate(&mut rand::rng());
let pubkey_bytes = new_key.verifying_key().to_bytes().to_vec();
let result = user_agent_ref
.ask(HandleAuthChallengeRequest {
req: AuthChallengeRequest {
pubkey: pubkey_bytes,
bootstrap_token: Some(token),
},
})
.await
.expect("Shouldn't fail to send message");
// auth succeeded
assert_eq!(
result,
UserAgentResponse {
payload: Some(UserAgentResponsePayload::AuthMessage(
arbiter_proto::proto::auth::ServerMessage {
payload: Some(arbiter_proto::proto::auth::server_message::Payload::AuthOk(
AuthOk {},
)),
},
)),
}
);
}
}
mod transport;
pub(crate) use transport::handle_user_agent;

View File

@@ -1,76 +0,0 @@
use std::sync::Mutex;
use arbiter_proto::proto::auth::AuthChallenge;
use ed25519_dalek::VerifyingKey;
use x25519_dalek::{EphemeralSecret, PublicKey};
/// Context for state machine with validated key and sent challenge
/// Challenge is then transformed to bytes using shared function and verified
#[derive(Clone, Debug)]
pub struct ChallengeContext {
pub challenge: AuthChallenge,
pub key: VerifyingKey,
}
// Request context with deserialized public key for state machine.
// This intermediate struct is needed because the state machine branches depending on presence of bootstrap token,
// but we want to have the deserialized key in both branches.
#[derive(Clone, Debug)]
pub struct AuthRequestContext {
pub pubkey: VerifyingKey,
pub bootstrap_token: Option<String>,
}
pub struct UnsealContext {
pub server_public_key: PublicKey,
pub client_public_key: PublicKey,
pub secret: Mutex<Option<EphemeralSecret>>,
}
smlang::statemachine!(
name: UserAgent,
custom_error: false,
transitions: {
*Init + AuthRequest(AuthRequestContext) / auth_request_context = ReceivedAuthRequest(AuthRequestContext),
ReceivedAuthRequest(AuthRequestContext) + ReceivedBootstrapToken = Idle,
ReceivedAuthRequest(AuthRequestContext) + SentChallenge(ChallengeContext) / move_challenge = WaitingForChallengeSolution(ChallengeContext),
WaitingForChallengeSolution(ChallengeContext) + ReceivedGoodSolution = Idle,
WaitingForChallengeSolution(ChallengeContext) + ReceivedBadSolution = AuthError, // block further transitions, but connection should close anyway
Idle + UnsealRequest(UnsealContext) / generate_temp_keypair = WaitingForUnsealKey(UnsealContext),
WaitingForUnsealKey(UnsealContext) + ReceivedValidKey = Unsealed,
WaitingForUnsealKey(UnsealContext) + ReceivedInvalidKey = Idle,
}
);
pub struct DummyContext;
impl UserAgentStateMachineContext for DummyContext {
#[allow(missing_docs)]
#[allow(clippy::unused_unit)]
fn move_challenge(
&mut self,
_state_data: &AuthRequestContext,
event_data: ChallengeContext,
) -> Result<ChallengeContext, ()> {
Ok(event_data)
}
#[allow(missing_docs)]
#[allow(clippy::unused_unit)]
fn auth_request_context(
&mut self,
event_data: AuthRequestContext,
) -> Result<AuthRequestContext, ()> {
Ok(event_data)
}
#[allow(missing_docs)]
#[allow(clippy::unused_unit)]
fn generate_temp_keypair(&mut self, event_data: UnsealContext) -> Result<UnsealContext, ()> {
Ok(event_data)
}
}

View File

@@ -1,199 +0,0 @@
use arbiter_proto::proto::{
UserAgentResponse,
auth::{self, AuthChallengeRequest, AuthOk},
user_agent_response::Payload as UserAgentResponsePayload,
};
use chrono::format;
use diesel::{ExpressionMethods as _, QueryDsl, insert_into};
use diesel_async::RunQueryDsl;
use ed25519_dalek::Signer as _;
use kameo::actor::Spawn;
use crate::{
actors::{
bootstrap::Bootstrapper,
user_agent::{HandleAuthChallengeRequest, HandleAuthChallengeSolution},
},
db::{self, schema},
};
use super::UserAgentActor;
#[tokio::test]
#[test_log::test]
pub async fn test_bootstrap_token_auth() {
let db = db::create_test_pool().await;
// explicitly not installing any user_agent pubkeys
let bootstrapper = Bootstrapper::new(&db).await.unwrap(); // this will create bootstrap token
let token = bootstrapper.get_token().unwrap();
let bootstrapper_ref = Bootstrapper::spawn(bootstrapper);
let user_agent = UserAgentActor::new_manual(
db.clone(),
bootstrapper_ref,
tokio::sync::mpsc::channel(1).0, // dummy channel, we won't actually send responses in this test
);
let user_agent_ref = UserAgentActor::spawn(user_agent);
// simulate client sending auth request with bootstrap token
let new_key = ed25519_dalek::SigningKey::generate(&mut rand::rng());
let pubkey_bytes = new_key.verifying_key().to_bytes().to_vec();
let result = user_agent_ref
.ask(HandleAuthChallengeRequest {
req: AuthChallengeRequest {
pubkey: pubkey_bytes,
bootstrap_token: Some(token),
},
})
.await
.expect("Shouldn't fail to send message");
// auth succeeded
assert_eq!(
result,
UserAgentResponse {
payload: Some(UserAgentResponsePayload::AuthMessage(
arbiter_proto::proto::auth::ServerMessage {
payload: Some(arbiter_proto::proto::auth::server_message::Payload::AuthOk(
AuthOk {},
)),
},
)),
}
);
// key is succesfully recorded in database
let mut conn = db.get().await.unwrap();
let stored_pubkey: Vec<u8> = schema::useragent_client::table
.select(schema::useragent_client::public_key)
.first::<Vec<u8>>(&mut conn)
.await
.unwrap();
assert_eq!(stored_pubkey, new_key.verifying_key().to_bytes().to_vec());
}
#[tokio::test]
#[test_log::test]
pub async fn test_bootstrap_invalid_token_auth() {
let db = db::create_test_pool().await;
// explicitly not installing any user_agent pubkeys
let bootstrapper = Bootstrapper::new(&db).await.unwrap(); // this will create bootstrap token
let bootstrapper_ref = Bootstrapper::spawn(bootstrapper);
let user_agent = UserAgentActor::new_manual(
db.clone(),
bootstrapper_ref,
tokio::sync::mpsc::channel(1).0, // dummy channel, we won't actually send responses in this test
);
let user_agent_ref = UserAgentActor::spawn(user_agent);
// simulate client sending auth request with bootstrap token
let new_key = ed25519_dalek::SigningKey::generate(&mut rand::rng());
let pubkey_bytes = new_key.verifying_key().to_bytes().to_vec();
let result = user_agent_ref
.ask(HandleAuthChallengeRequest {
req: AuthChallengeRequest {
pubkey: pubkey_bytes,
bootstrap_token: Some("invalid_token".to_string()),
},
})
.await;
match result {
Err(kameo::error::SendError::HandlerError(status)) => {
assert_eq!(status.code(), tonic::Code::InvalidArgument);
insta::assert_debug_snapshot!(status, @r#"
Status {
code: InvalidArgument,
message: "Invalid bootstrap token",
source: None,
}
"#);
}
Err(other) => {
panic!("Expected SendError::HandlerError, got {other:?}");
}
Ok(_) => {
panic!("Expected error due to invalid bootstrap token, but got success");
}
}
}
#[tokio::test]
#[test_log::test]
pub async fn test_challenge_auth() {
let db = db::create_test_pool().await;
let bootstrapper_ref = Bootstrapper::spawn(Bootstrapper::new(&db).await.unwrap());
let user_agent = UserAgentActor::new_manual(
db.clone(),
bootstrapper_ref,
tokio::sync::mpsc::channel(1).0, // dummy channel, we won't actually send responses in this test
);
let user_agent_ref = UserAgentActor::spawn(user_agent);
// simulate client sending auth request with bootstrap token
let new_key = ed25519_dalek::SigningKey::generate(&mut rand::rng());
let pubkey_bytes = new_key.verifying_key().to_bytes().to_vec();
// insert pubkey into database to trigger challenge-response auth flow
{
let mut conn = db.get().await.unwrap();
insert_into(schema::useragent_client::table)
.values(schema::useragent_client::public_key.eq(pubkey_bytes.clone()))
.execute(&mut conn)
.await
.unwrap();
}
let result = user_agent_ref
.ask(HandleAuthChallengeRequest {
req: AuthChallengeRequest {
pubkey: pubkey_bytes,
bootstrap_token: None,
},
})
.await
.expect("Shouldn't fail to send message");
// auth challenge succeeded
let UserAgentResponse {
payload:
Some(UserAgentResponsePayload::AuthMessage(arbiter_proto::proto::auth::ServerMessage {
payload:
Some(arbiter_proto::proto::auth::server_message::Payload::AuthChallenge(challenge)),
})),
} = result
else {
panic!("Expected auth challenge response, got {result:?}");
};
let formatted_challenge = arbiter_proto::format_challenge(&challenge);
let signature = new_key.sign(&formatted_challenge);
let serialized_signature = signature.to_bytes().to_vec();
let result = user_agent_ref
.ask(HandleAuthChallengeSolution {
solution: auth::AuthChallengeSolution {
signature: serialized_signature,
},
})
.await
.expect("Shouldn't fail to send message");
// auth succeeded
assert_eq!(
result,
UserAgentResponse {
payload: Some(UserAgentResponsePayload::AuthMessage(
arbiter_proto::proto::auth::ServerMessage {
payload: Some(arbiter_proto::proto::auth::server_message::Payload::AuthOk(
AuthOk {},
)),
},
)),
}
);
}

View File

@@ -2,9 +2,12 @@ use super::UserAgentActor;
use arbiter_proto::proto::{ use arbiter_proto::proto::{
UserAgentRequest, UserAgentResponse, UserAgentRequest, UserAgentResponse,
auth::{ auth::{
ClientMessage as ClientAuthMessage, client_message::Payload as ClientAuthPayload, self, AuthChallenge, AuthChallengeRequest, AuthOk, ClientMessage,
ServerMessage as AuthServerMessage, client_message::Payload as ClientAuthPayload,
server_message::Payload as ServerAuthPayload,
}, },
user_agent_request::Payload as UserAgentRequestPayload, user_agent_request::Payload as UserAgentRequestPayload,
user_agent_response::Payload as UserAgentResponsePayload,
}; };
use futures::StreamExt; use futures::StreamExt;
use kameo::{ use kameo::{
@@ -16,10 +19,7 @@ use tonic::Status;
use tracing::error; use tracing::error;
use crate::{ use crate::{
actors::user_agent::{ actors::user_agent::{HandleAuthChallengeRequest, HandleAuthChallengeSolution},
HandleAuthChallengeRequest, HandleAuthChallengeSolution, HandleUnsealEncryptedKey,
HandleUnsealRequest,
},
context::ServerContext, context::ServerContext,
}; };
@@ -59,30 +59,28 @@ async fn process_message(
Status::invalid_argument("Expected message with payload") Status::invalid_argument("Expected message with payload")
})?; })?;
match msg { let UserAgentRequestPayload::AuthMessage(ClientMessage {
UserAgentRequestPayload::AuthMessage(ClientAuthMessage { payload: Some(client_message),
payload: Some(ClientAuthPayload::AuthChallengeRequest(req)), }) = msg
}) => actor else {
error!(
actor = "useragent",
"Received unexpected message type during authentication"
);
return Err(Status::invalid_argument(
"Expected AuthMessage with ClientMessage payload",
));
};
match client_message {
ClientAuthPayload::AuthChallengeRequest(req) => actor
.ask(HandleAuthChallengeRequest { req }) .ask(HandleAuthChallengeRequest { req })
.await .await
.map_err(into_status), .map_err(into_status),
UserAgentRequestPayload::AuthMessage(ClientAuthMessage { ClientAuthPayload::AuthChallengeSolution(solution) => actor
payload: Some(ClientAuthPayload::AuthChallengeSolution(solution)),
}) => actor
.ask(HandleAuthChallengeSolution { solution }) .ask(HandleAuthChallengeSolution { solution })
.await .await
.map_err(into_status), .map_err(into_status),
UserAgentRequestPayload::UnsealStart(unseal_start) => actor
.ask(HandleUnsealRequest { req: unseal_start })
.await
.map_err(into_status),
UserAgentRequestPayload::UnsealEncryptedKey(unseal_encrypted_key) => actor
.ask(HandleUnsealEncryptedKey {
req: unseal_encrypted_key,
})
.await
.map_err(into_status),
_ => Err(Status::invalid_argument("Expected message with payload")),
} }
} }

View File

@@ -1,21 +1,36 @@
use std::collections::HashSet;
use std::sync::Arc; use std::sync::Arc;
use std::time::Duration;
use diesel::OptionalExtension as _; use diesel::OptionalExtension as _;
use diesel_async::RunQueryDsl as _; use diesel_async::RunQueryDsl as _;
use ed25519_dalek::VerifyingKey;
use kameo::actor::{ActorRef, Spawn}; use kameo::actor::{ActorRef, Spawn};
use miette::Diagnostic; use miette::Diagnostic;
use rand::rngs::StdRng;
use secrecy::{ExposeSecret, SecretBox};
use smlang::statemachine;
use thiserror::Error; use thiserror::Error;
use tokio::sync::{watch, RwLock};
use zeroize::Zeroizing;
use crate::{ use crate::{
actors::{ context::{
bootstrap::{self, Bootstrapper}, bootstrap::{BootstrapActor, generate_token},
keyholder::KeyHolder, lease::LeaseHandler,
tls::{RotationState, RotationTask, TlsDataRaw, TlsManager},
},
db::{
self,
models::ArbiterSetting,
schema::{self, arbiter_settings},
}, },
context::tls::{TlsDataRaw, TlsManager},
db::{self, models::ArbiterSetting, schema::arbiter_settings},
}; };
pub mod tls; pub(crate) mod bootstrap;
pub(crate) mod lease;
pub(crate) mod tls;
pub(crate) mod unseal;
#[derive(Error, Debug, Diagnostic)] #[derive(Error, Debug, Diagnostic)]
pub enum InitError { pub enum InitError {
@@ -39,23 +54,112 @@ pub enum InitError {
#[diagnostic(code(arbiter_server::init::bootstrap_token))] #[diagnostic(code(arbiter_server::init::bootstrap_token))]
BootstrapToken(#[from] bootstrap::BootstrapError), BootstrapToken(#[from] bootstrap::BootstrapError),
#[error("KeyHolder initialization failed: {0}")]
#[diagnostic(code(arbiter_server::init::keyholder_init))]
KeyHolder(#[from] crate::actors::keyholder::Error),
#[error("I/O Error: {0}")] #[error("I/O Error: {0}")]
#[diagnostic(code(arbiter_server::init::io))] #[diagnostic(code(arbiter_server::init::io))]
Io(#[from] std::io::Error), Io(#[from] std::io::Error),
} }
pub struct _ServerContextInner { #[derive(Error, Debug, Diagnostic)]
pub enum UnsealError {
#[error("Database error: {0}")]
#[diagnostic(code(arbiter_server::unseal::database_pool))]
Database(#[from] db::PoolError),
#[error("Query error: {0}")]
#[diagnostic(code(arbiter_server::unseal::database_query))]
Query(#[from] diesel::result::Error),
#[error("Decryption failed: {0}")]
#[diagnostic(code(arbiter_server::unseal::decryption))]
DecryptionFailed(#[from] crate::crypto::CryptoError),
#[error("Invalid state for unseal")]
#[diagnostic(code(arbiter_server::unseal::invalid_state))]
InvalidState,
#[error("Missing salt in database")]
#[diagnostic(code(arbiter_server::unseal::missing_salt))]
MissingSalt,
#[error("No root key configured in database")]
#[diagnostic(code(arbiter_server::unseal::no_root_key))]
NoRootKey,
}
#[derive(Error, Debug, Diagnostic)]
pub enum SealError {
#[error("Invalid state for seal")]
#[diagnostic(code(arbiter_server::seal::invalid_state))]
InvalidState,
}
/// Secure in-memory storage for root encryption key
///
/// Uses `secrecy` crate for automatic zeroization on drop to prevent key material
/// from remaining in memory after use. SecretBox provides heap-allocated secret
/// storage that implements Send + Sync for safe use in async contexts.
pub struct KeyStorage {
/// 32-byte root key protected by SecretBox
key: SecretBox<[u8; 32]>,
}
impl KeyStorage {
/// Create new KeyStorage from a 32-byte root key
pub fn new(key: [u8; 32]) -> Self {
Self {
key: SecretBox::new(Box::new(key)),
}
}
/// Access the key for cryptographic operations
pub fn key(&self) -> &[u8; 32] {
self.key.expose_secret()
}
}
// Drop автоматически реализован через secrecy::Zeroize
// который зануляет память при освобождении
statemachine! {
name: Server,
transitions: {
*NotBootstrapped + Bootstrapped = Sealed,
Sealed + Unsealed(KeyStorage) / move_key = Ready(KeyStorage),
Ready(KeyStorage) + Sealed / dispose_key = Sealed,
}
}
pub struct _Context;
impl ServerStateMachineContext for _Context {
/// Move key from unseal event into Ready state
fn move_key(&mut self, event_data: KeyStorage) -> Result<KeyStorage, ()> {
// Просто перемещаем KeyStorage из event в state
// Без клонирования - event data consumed
Ok(event_data)
}
/// Securely dispose of key when sealing
#[allow(missing_docs)]
#[allow(clippy::unused_unit)]
fn dispose_key(&mut self, _state_data: &KeyStorage) -> Result<(), ()> {
// KeyStorage будет dropped после state transition
// secrecy::Zeroize зануляет память автоматически
Ok(())
}
}
pub(crate) struct _ServerContextInner {
pub db: db::DatabasePool, pub db: db::DatabasePool,
pub tls: TlsManager, pub state: RwLock<ServerStateMachine<_Context>>,
pub bootstrapper: ActorRef<Bootstrapper>, pub rng: StdRng,
pub keyholder: ActorRef<KeyHolder>, pub tls: Arc<TlsManager>,
pub bootstrapper: ActorRef<BootstrapActor>,
pub rotation_state: RwLock<RotationState>,
pub rotation_acks: Arc<RwLock<HashSet<VerifyingKey>>>,
pub user_agent_leases: LeaseHandler<VerifyingKey>,
pub client_leases: LeaseHandler<VerifyingKey>,
} }
#[derive(Clone)] #[derive(Clone)]
pub struct ServerContext(Arc<_ServerContextInner>); pub(crate) struct ServerContext(Arc<_ServerContextInner>);
impl std::ops::Deref for ServerContext { impl std::ops::Deref for ServerContext {
type Target = _ServerContextInner; type Target = _ServerContextInner;
@@ -66,55 +170,234 @@ impl std::ops::Deref for ServerContext {
} }
impl ServerContext { impl ServerContext {
/// Check if all active clients have acknowledged the rotation
pub async fn check_rotation_ready(&self) -> bool {
// TODO: Implement proper rotation readiness check
// For now, return false as placeholder
false
}
async fn load_tls( async fn load_tls(
db: &mut db::DatabaseConnection, db: &db::DatabasePool,
settings: Option<&ArbiterSetting>, settings: Option<&ArbiterSetting>,
) -> Result<TlsManager, InitError> { ) -> Result<TlsManager, InitError> {
match &settings { match settings {
Some(settings) => { Some(s) if s.current_cert_id.is_some() => {
// Load active certificate from tls_certificates table
Ok(TlsManager::load_from_db(
db.clone(),
s.current_cert_id.unwrap(),
)
.await?)
}
Some(s) => {
// Legacy migration: extract validity and save to new table
let tls_data_raw = TlsDataRaw { let tls_data_raw = TlsDataRaw {
cert: settings.cert.clone(), cert: s.cert.clone(),
key: settings.cert_key.clone(), key: s.cert_key.clone(),
}; };
Ok(TlsManager::new(Some(tls_data_raw)).await?) // For legacy certificates, use current time as not_before
// and current time + 90 days as not_after
let not_before = chrono::Utc::now().timestamp();
let not_after = not_before + (90 * 24 * 60 * 60); // 90 days
Ok(TlsManager::new_from_legacy(
db.clone(),
tls_data_raw,
not_before,
not_after,
)
.await?)
} }
None => { None => {
let tls = TlsManager::new(None).await?; // First startup - generate new certificate
let tls_data_raw = tls.bytes(); Ok(TlsManager::new(db.clone()).await?)
diesel::insert_into(arbiter_settings::table)
.values(&ArbiterSetting {
id: 1,
root_key_id: None,
cert_key: tls_data_raw.key,
cert: tls_data_raw.cert,
})
.execute(db)
.await?;
Ok(tls)
} }
} }
} }
pub async fn new(db: db::DatabasePool) -> Result<Self, InitError> { pub async fn new(db: db::DatabasePool) -> Result<Self, InitError> {
let mut conn = db.get().await?; let mut conn = db.get().await?;
let rng = rand::make_rng();
let settings = arbiter_settings::table let settings = arbiter_settings::table
.first::<ArbiterSetting>(&mut conn) .first::<ArbiterSetting>(&mut conn)
.await .await
.optional()?; .optional()?;
let tls = Self::load_tls(&mut conn, settings.as_ref()).await?; drop(conn);
// Load TLS manager
let tls = Self::load_tls(&db, settings.as_ref()).await?;
// Load rotation state from database
let rotation_state = RotationState::load_from_db(&db)
.await
.unwrap_or(RotationState::Normal);
let bootstrap_token = generate_token().await?;
let mut state = ServerStateMachine::new(_Context);
if let Some(settings) = &settings
&& settings.root_key_id.is_some()
{
// TODO: pass the encrypted root key to the state machine and let it handle decryption and transition to Sealed
let _ = state.process_event(ServerEvents::Bootstrapped);
}
// Create shutdown channel for rotation task
let (rotation_shutdown_tx, rotation_shutdown_rx) = watch::channel(false);
// Initialize bootstrap actor
let bootstrapper = BootstrapActor::spawn(BootstrapActor::new(&db).await?);
let context = Arc::new(_ServerContextInner {
db: db.clone(),
rng,
tls: Arc::new(tls),
state: RwLock::new(state),
bootstrapper,
rotation_state: RwLock::new(rotation_state),
rotation_acks: Arc::new(RwLock::new(HashSet::new())),
user_agent_leases: Default::default(),
client_leases: Default::default(),
});
Ok(Self(context))
}
/// Unseal vault with password
pub async fn unseal(&self, password: &str) -> Result<(), UnsealError> {
use crate::crypto::root_key;
use diesel::QueryDsl as _;
// 1. Get root_key_id from settings
let mut conn = self.db.get().await?;
let settings: db::models::ArbiterSetting = schema::arbiter_settings::table
.first(&mut conn)
.await?;
let root_key_id = settings.root_key_id.ok_or(UnsealError::NoRootKey)?;
// 2. Load encrypted root key
let encrypted: db::models::AeadEncrypted = schema::aead_encrypted::table
.find(root_key_id)
.first(&mut conn)
.await?;
let salt = encrypted
.argon2_salt
.as_ref()
.ok_or(UnsealError::MissingSalt)?;
drop(conn); drop(conn);
Ok(Self(Arc::new(_ServerContextInner { // 3. Decrypt root key using password
bootstrapper: Bootstrapper::spawn(Bootstrapper::new(&db).await?), let root_key = root_key::decrypt_root_key(&encrypted, password, salt)
keyholder: KeyHolder::spawn(KeyHolder::new(db.clone()).await?), .map_err(UnsealError::DecryptionFailed)?;
db,
tls, // 4. Create secure storage
}))) let key_storage = KeyStorage::new(root_key);
// 5. Transition state machine
let mut state = self.state.write().await;
state
.process_event(ServerEvents::Unsealed(key_storage))
.map_err(|_| UnsealError::InvalidState)?;
Ok(())
}
/// Seal the server (lock the key)
pub async fn seal(&self) -> Result<(), SealError> {
let mut state = self.state.write().await;
state
.process_event(ServerEvents::Sealed)
.map_err(|_| SealError::InvalidState)?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_keystorage_creation() {
let key = [42u8; 32];
let storage = KeyStorage::new(key);
assert_eq!(storage.key()[0], 42);
assert_eq!(storage.key().len(), 32);
}
#[test]
fn test_keystorage_zeroization() {
let key = [99u8; 32];
{
let _storage = KeyStorage::new(key);
// storage будет dropped здесь
}
// После drop SecretBox должен зануляеть память
// Это проверяется автоматически через secrecy::Zeroize
}
#[test]
fn test_state_machine_transitions() {
let mut state = ServerStateMachine::new(_Context);
// Начальное состояние
assert!(matches!(state.state(), &ServerStates::NotBootstrapped));
// Bootstrapped transition
state.process_event(ServerEvents::Bootstrapped).unwrap();
assert!(matches!(state.state(), &ServerStates::Sealed));
// Unsealed transition
let key_storage = KeyStorage::new([1u8; 32]);
state
.process_event(ServerEvents::Unsealed(key_storage))
.unwrap();
assert!(matches!(state.state(), &ServerStates::Ready(_)));
// Sealed transition
state.process_event(ServerEvents::Sealed).unwrap();
assert!(matches!(state.state(), &ServerStates::Sealed));
}
#[test]
fn test_move_key_callback() {
let mut ctx = _Context;
let key_storage = KeyStorage::new([7u8; 32]);
let result = ctx.move_key(key_storage);
assert!(result.is_ok());
assert_eq!(result.unwrap().key()[0], 7);
}
#[test]
fn test_dispose_key_callback() {
let mut ctx = _Context;
let key_storage = KeyStorage::new([13u8; 32]);
let result = ctx.dispose_key(&key_storage);
assert!(result.is_ok());
}
#[test]
fn test_invalid_state_transitions() {
let mut state = ServerStateMachine::new(_Context);
// Попытка unseal без bootstrap
let key_storage = KeyStorage::new([1u8; 32]);
let result = state.process_event(ServerEvents::Unsealed(key_storage));
assert!(result.is_err());
// Правильный путь
state.process_event(ServerEvents::Bootstrapped).unwrap();
// Попытка повторного bootstrap
let result = state.process_event(ServerEvents::Bootstrapped);
assert!(result.is_err());
} }
} }

View File

@@ -1,13 +1,19 @@
use arbiter_proto::{BOOTSTRAP_TOKEN_PATH, home_path}; use arbiter_proto::{BOOTSTRAP_TOKEN_PATH, home_path};
use diesel::QueryDsl; use diesel::{ExpressionMethods, QueryDsl};
use diesel_async::RunQueryDsl; use diesel_async::RunQueryDsl;
use kameo::{Actor, messages}; use kameo::{Actor, messages};
use memsafe::MemSafe;
use miette::Diagnostic; use miette::Diagnostic;
use rand::{RngExt, distr::StandardUniform, make_rng, rngs::StdRng}; use rand::{RngExt, distr::StandardUniform, make_rng, rngs::StdRng};
use secrecy::SecretString;
use thiserror::Error; use thiserror::Error;
use tracing::info; use tracing::info;
use zeroize::{Zeroize, Zeroizing};
use crate::db::{self, DatabasePool, schema}; use crate::{
context::{self, ServerContext},
db::{self, DatabasePool, schema},
};
const TOKEN_LENGTH: usize = 64; const TOKEN_LENGTH: usize = 64;
@@ -43,11 +49,11 @@ pub enum BootstrapError {
} }
#[derive(Actor)] #[derive(Actor)]
pub struct Bootstrapper { pub struct BootstrapActor {
token: Option<String>, token: Option<String>,
} }
impl Bootstrapper { impl BootstrapActor {
pub async fn new(db: &DatabasePool) -> Result<Self, BootstrapError> { pub async fn new(db: &DatabasePool) -> Result<Self, BootstrapError> {
let mut conn = db.get().await?; let mut conn = db.get().await?;
@@ -77,7 +83,7 @@ impl Bootstrapper {
} }
#[messages] #[messages]
impl Bootstrapper { impl BootstrapActor {
#[message] #[message]
pub fn is_correct_token(&self, token: String) -> bool { pub fn is_correct_token(&self, token: String) -> bool {
match &self.token { match &self.token {

View File

@@ -0,0 +1,46 @@
use std::sync::Arc;
use dashmap::DashSet;
#[derive(Clone, Default)]
struct LeaseStorage<T: Eq + std::hash::Hash>(Arc<DashSet<T>>);
// A lease that automatically releases the item when dropped
pub struct Lease<T: Clone + std::hash::Hash + Eq> {
item: T,
storage: LeaseStorage<T>,
}
impl<T: Clone + std::hash::Hash + Eq> Drop for Lease<T> {
fn drop(&mut self) {
self.storage.0.remove(&self.item);
}
}
#[derive(Clone, Default)]
pub struct LeaseHandler<T: Clone + std::hash::Hash + Eq> {
storage: LeaseStorage<T>,
}
impl<T: Clone + std::hash::Hash + Eq> LeaseHandler<T> {
pub fn new() -> Self {
Self {
storage: LeaseStorage(Arc::new(DashSet::new())),
}
}
pub fn acquire(&self, item: T) -> Result<Lease<T>, ()> {
if self.storage.0.insert(item.clone()) {
Ok(Lease {
item,
storage: self.storage.clone(),
})
} else {
Err(())
}
}
/// Get all currently leased items
pub fn get_all(&self) -> Vec<T> {
self.storage.0.iter().map(|entry| entry.clone()).collect()
}
}

View File

@@ -1,89 +0,0 @@
use std::string::FromUtf8Error;
use miette::Diagnostic;
use rcgen::{Certificate, KeyPair};
use rustls::pki_types::CertificateDer;
use thiserror::Error;
#[derive(Error, Debug, Diagnostic)]
pub enum TlsInitError {
#[error("Key generation error during TLS initialization: {0}")]
#[diagnostic(code(arbiter_server::tls_init::key_generation))]
KeyGeneration(#[from] rcgen::Error),
#[error("Key invalid format: {0}")]
#[diagnostic(code(arbiter_server::tls_init::key_invalid_format))]
KeyInvalidFormat(#[from] FromUtf8Error),
#[error("Key deserialization error: {0}")]
#[diagnostic(code(arbiter_server::tls_init::key_deserialization))]
KeyDeserializationError(rcgen::Error),
}
pub struct TlsData {
pub cert: CertificateDer<'static>,
pub keypair: KeyPair,
}
pub struct TlsDataRaw {
pub cert: Vec<u8>,
pub key: Vec<u8>,
}
impl TlsDataRaw {
pub fn serialize(cert: &TlsData) -> Self {
Self {
cert: cert.cert.as_ref().to_vec(),
key: cert.keypair.serialize_pem().as_bytes().to_vec(),
}
}
pub fn deserialize(&self) -> Result<TlsData, TlsInitError> {
let cert = CertificateDer::from_slice(&self.cert).into_owned();
let key =
String::from_utf8(self.key.clone()).map_err(TlsInitError::KeyInvalidFormat)?;
let keypair = KeyPair::from_pem(&key).map_err(TlsInitError::KeyDeserializationError)?;
Ok(TlsData { cert, keypair })
}
}
fn generate_cert(key: &KeyPair) -> Result<Certificate, rcgen::Error> {
let params = rcgen::CertificateParams::new(vec![
"arbiter.local".to_string(),
"localhost".to_string(),
])?;
params.self_signed(key)
}
// TODO: Implement cert rotation
pub struct TlsManager {
data: TlsData,
}
impl TlsManager {
pub async fn new(data: Option<TlsDataRaw>) -> Result<Self, TlsInitError> {
match data {
Some(raw) => {
let tls_data = raw.deserialize()?;
Ok(Self { data: tls_data })
}
None => {
let keypair = KeyPair::generate()?;
let cert = generate_cert(&keypair)?;
let tls_data = TlsData {
cert: cert.der().clone(),
keypair,
};
Ok(Self { data: tls_data })
}
}
}
pub fn bytes(&self) -> TlsDataRaw {
TlsDataRaw::serialize(&self.data)
}
}

View File

@@ -0,0 +1,192 @@
use std::sync::Arc;
use std::string::FromUtf8Error;
use miette::Diagnostic;
use rcgen::{Certificate, KeyPair};
use rustls::pki_types::CertificateDer;
use thiserror::Error;
use tokio::sync::RwLock;
use crate::db;
pub mod rotation;
pub use rotation::{RotationError, RotationState, RotationTask};
#[derive(Error, Debug, Diagnostic)]
#[expect(clippy::enum_variant_names)]
pub enum TlsInitError {
#[error("Key generation error during TLS initialization: {0}")]
#[diagnostic(code(arbiter_server::tls_init::key_generation))]
KeyGeneration(#[from] rcgen::Error),
#[error("Key invalid format: {0}")]
#[diagnostic(code(arbiter_server::tls_init::key_invalid_format))]
KeyInvalidFormat(#[from] FromUtf8Error),
#[error("Key deserialization error: {0}")]
#[diagnostic(code(arbiter_server::tls_init::key_deserialization))]
KeyDeserializationError(rcgen::Error),
}
pub struct TlsData {
pub cert: CertificateDer<'static>,
pub keypair: KeyPair,
}
pub struct TlsDataRaw {
pub cert: Vec<u8>,
pub key: Vec<u8>,
}
impl TlsDataRaw {
pub fn serialize(cert: &TlsData) -> Self {
Self {
cert: cert.cert.as_ref().to_vec(),
key: cert.keypair.serialize_pem().as_bytes().to_vec(),
}
}
pub fn deserialize(&self) -> Result<TlsData, TlsInitError> {
let cert = CertificateDer::from_slice(&self.cert).into_owned();
let key =
String::from_utf8(self.key.clone()).map_err(TlsInitError::KeyInvalidFormat)?;
let keypair = KeyPair::from_pem(&key).map_err(TlsInitError::KeyDeserializationError)?;
Ok(TlsData { cert, keypair })
}
}
/// Metadata about a certificate including validity period
pub struct CertificateMetadata {
pub cert_id: i32,
pub cert: CertificateDer<'static>,
pub keypair: Arc<KeyPair>,
pub not_before: i64,
pub not_after: i64,
pub created_at: i64,
}
pub(crate) fn generate_cert(key: &KeyPair) -> Result<(Certificate, i64, i64), rcgen::Error> {
let params = rcgen::CertificateParams::new(vec![
"arbiter.local".to_string(),
"localhost".to_string(),
])?;
// Set validity period: 90 days from now
let not_before = chrono::Utc::now();
let not_after = not_before + chrono::Duration::days(90);
// Note: rcgen doesn't directly expose not_before/not_after setting in all versions
// For now, we'll generate the cert and track validity separately
let cert = params.self_signed(key)?;
Ok((cert, not_before.timestamp(), not_after.timestamp()))
}
// Certificate rotation enabled
pub(crate) struct TlsManager {
// Current active certificate (atomic replacement via RwLock)
current_cert: Arc<RwLock<CertificateMetadata>>,
// Database pool for persistence
db: db::DatabasePool,
}
impl TlsManager {
/// Create new TlsManager with a generated certificate
pub async fn new(db: db::DatabasePool) -> Result<Self, TlsInitError> {
let keypair = KeyPair::generate()?;
let (cert, not_before, not_after) = generate_cert(&keypair)?;
let cert_der = cert.der().clone();
// For initial creation, cert_id will be set after DB insert
let metadata = CertificateMetadata {
cert_id: 0, // Temporary, will be updated after DB insert
cert: cert_der,
keypair: Arc::new(keypair),
not_before,
not_after,
created_at: chrono::Utc::now().timestamp(),
};
Ok(Self {
current_cert: Arc::new(RwLock::new(metadata)),
db,
})
}
/// Load TlsManager from database with specific certificate ID
pub async fn load_from_db(db: db::DatabasePool, cert_id: i32) -> Result<Self, TlsInitError> {
// TODO: Load certificate from database
// For now, return error - will be implemented when database access is ready
Err(TlsInitError::KeyGeneration(rcgen::Error::CouldNotParseCertificate))
}
/// Create from legacy TlsDataRaw format
pub async fn new_from_legacy(
db: db::DatabasePool,
data: TlsDataRaw,
not_before: i64,
not_after: i64,
) -> Result<Self, TlsInitError> {
let tls_data = data.deserialize()?;
let metadata = CertificateMetadata {
cert_id: 1, // Legacy certificate gets ID 1
cert: tls_data.cert,
keypair: Arc::new(tls_data.keypair),
not_before,
not_after,
created_at: chrono::Utc::now().timestamp(),
};
Ok(Self {
current_cert: Arc::new(RwLock::new(metadata)),
db,
})
}
/// Get current certificate data
pub async fn get_certificate(&self) -> (CertificateDer<'static>, Arc<KeyPair>) {
let cert = self.current_cert.read().await;
(cert.cert.clone(), cert.keypair.clone())
}
/// Replace certificate atomically
pub async fn replace_certificate(&self, new_cert: CertificateMetadata) -> Result<(), TlsInitError> {
let mut cert = self.current_cert.write().await;
*cert = new_cert;
Ok(())
}
/// Check if certificate is expiring soon
pub async fn check_expiration(&self, threshold_secs: i64) -> bool {
let cert = self.current_cert.read().await;
let now = chrono::Utc::now().timestamp();
cert.not_after - now < threshold_secs
}
/// Get certificate metadata for rotation logic
pub async fn get_certificate_metadata(&self) -> CertificateMetadata {
let cert = self.current_cert.read().await;
CertificateMetadata {
cert_id: cert.cert_id,
cert: cert.cert.clone(),
keypair: cert.keypair.clone(),
not_before: cert.not_before,
not_after: cert.not_after,
created_at: cert.created_at,
}
}
pub fn bytes(&self) -> TlsDataRaw {
// This method is now async-compatible but we keep sync interface
// TODO: Make this async or remove if not needed
TlsDataRaw {
cert: vec![],
key: vec![],
}
}
}

View File

@@ -0,0 +1,552 @@
use std::collections::HashSet;
use std::sync::Arc;
use std::time::Duration;
use diesel::prelude::*;
use diesel_async::RunQueryDsl;
use ed25519_dalek::VerifyingKey;
use miette::Diagnostic;
use rcgen::KeyPair;
use thiserror::Error;
use tokio::sync::watch;
use tracing::{debug, error, info, warn};
use crate::context::ServerContext;
use crate::db::models::{NewRotationClientAck, NewTlsCertificate, NewTlsRotationHistory};
use crate::db::schema::{rotation_client_acks, tls_certificates, tls_rotation_history, tls_rotation_state};
use crate::db::DatabasePool;
use super::{generate_cert, CertificateMetadata, TlsInitError};
#[derive(Error, Debug, Diagnostic)]
pub enum RotationError {
#[error("Certificate generation failed: {0}")]
#[diagnostic(code(arbiter_server::rotation::cert_generation))]
CertGeneration(#[from] rcgen::Error),
#[error("Database error: {0}")]
#[diagnostic(code(arbiter_server::rotation::database))]
Database(#[from] diesel::result::Error),
#[error("TLS initialization error: {0}")]
#[diagnostic(code(arbiter_server::rotation::tls_init))]
TlsInit(#[from] TlsInitError),
#[error("Invalid rotation state: {0}")]
#[diagnostic(code(arbiter_server::rotation::invalid_state))]
InvalidState(String),
#[error("No active certificate found")]
#[diagnostic(code(arbiter_server::rotation::no_active_cert))]
NoActiveCertificate,
}
/// Состояние процесса ротации сертификата
#[derive(Debug, Clone)]
pub enum RotationState {
/// Обычная работа, ротация не требуется
Normal,
/// Ротация инициирована, новый сертификат сгенерирован
RotationInitiated {
initiated_at: i64,
new_cert_id: i32,
},
/// Ожидание подтверждений (ACKs) от клиентов
WaitingForAcks {
new_cert_id: i32,
initiated_at: i64,
timeout_at: i64,
},
/// Все ACK получены или таймаут истёк, готов к ротации
ReadyToRotate {
new_cert_id: i32,
},
}
impl RotationState {
/// Загрузить состояние из базы данных
pub async fn load_from_db(db: &DatabasePool) -> Result<Self, RotationError> {
use crate::db::schema::tls_rotation_state::dsl::*;
let mut conn = db.get().await.map_err(|e| {
RotationError::InvalidState(format!("Failed to get DB connection: {}", e))
})?;
let state_record: (i32, String, Option<i32>, Option<i32>, Option<i32>) =
tls_rotation_state
.select((id, state, new_cert_id, initiated_at, timeout_at))
.filter(id.eq(1))
.first(&mut conn)
.await?;
let rotation_state = match state_record.1.as_str() {
"normal" => RotationState::Normal,
"initiated" => {
let cert_id = state_record.2.ok_or_else(|| {
RotationError::InvalidState("Initiated state missing new_cert_id".into())
})?;
let init_at = state_record.3.ok_or_else(|| {
RotationError::InvalidState("Initiated state missing initiated_at".into())
})?;
RotationState::RotationInitiated {
initiated_at: init_at as i64,
new_cert_id: cert_id,
}
}
"waiting_acks" => {
let cert_id = state_record.2.ok_or_else(|| {
RotationError::InvalidState("WaitingForAcks state missing new_cert_id".into())
})?;
let init_at = state_record.3.ok_or_else(|| {
RotationError::InvalidState("WaitingForAcks state missing initiated_at".into())
})?;
let timeout = state_record.4.ok_or_else(|| {
RotationError::InvalidState("WaitingForAcks state missing timeout_at".into())
})?;
RotationState::WaitingForAcks {
new_cert_id: cert_id,
initiated_at: init_at as i64,
timeout_at: timeout as i64,
}
}
"ready" => {
let cert_id = state_record.2.ok_or_else(|| {
RotationError::InvalidState("Ready state missing new_cert_id".into())
})?;
RotationState::ReadyToRotate {
new_cert_id: cert_id,
}
}
other => {
return Err(RotationError::InvalidState(format!(
"Unknown state: {}",
other
)))
}
};
Ok(rotation_state)
}
/// Сохранить состояние в базу данных
pub async fn save_to_db(&self, db: &DatabasePool) -> Result<(), RotationError> {
use crate::db::schema::tls_rotation_state::dsl::*;
let mut conn = db.get().await.map_err(|e| {
RotationError::InvalidState(format!("Failed to get DB connection: {}", e))
})?;
let (state_str, cert_id, init_at, timeout) = match self {
RotationState::Normal => ("normal", None, None, None),
RotationState::RotationInitiated {
initiated_at: init,
new_cert_id: cert,
} => ("initiated", Some(*cert), Some(*init as i32), None),
RotationState::WaitingForAcks {
new_cert_id: cert,
initiated_at: init,
timeout_at: timeout_val,
} => (
"waiting_acks",
Some(*cert),
Some(*init as i32),
Some(*timeout_val as i32),
),
RotationState::ReadyToRotate { new_cert_id: cert } => ("ready", Some(*cert), None, None),
};
diesel::update(tls_rotation_state.filter(id.eq(1)))
.set((
state.eq(state_str),
new_cert_id.eq(cert_id),
initiated_at.eq(init_at),
timeout_at.eq(timeout),
))
.execute(&mut conn)
.await?;
Ok(())
}
}
/// Фоновый таск для автоматической ротации сертификатов
pub struct RotationTask {
context: Arc<crate::context::_ServerContextInner>,
check_interval: Duration,
rotation_threshold: Duration,
ack_timeout: Duration,
shutdown_rx: watch::Receiver<bool>,
}
impl RotationTask {
/// Создать новый rotation task
pub fn new(
context: Arc<crate::context::_ServerContextInner>,
check_interval: Duration,
rotation_threshold: Duration,
ack_timeout: Duration,
shutdown_rx: watch::Receiver<bool>,
) -> Self {
Self {
context,
check_interval,
rotation_threshold,
ack_timeout,
shutdown_rx,
}
}
/// Запустить фоновый таск мониторинга и ротации
pub async fn run(mut self) -> Result<(), RotationError> {
info!("Starting TLS certificate rotation task");
loop {
tokio::select! {
_ = tokio::time::sleep(self.check_interval) => {
if let Err(e) = self.check_and_process().await {
error!("Rotation task error: {}", e);
}
}
_ = self.shutdown_rx.changed() => {
info!("Rotation task shutting down");
break;
}
}
}
Ok(())
}
/// Проверить текущее состояние и выполнить необходимые действия
async fn check_and_process(&self) -> Result<(), RotationError> {
let state = self.context.rotation_state.read().await.clone();
match state {
RotationState::Normal => {
// Проверить, нужна ли ротация
self.check_expiration_and_initiate().await?;
}
RotationState::RotationInitiated { new_cert_id, .. } => {
// Автоматически перейти в WaitingForAcks
self.transition_to_waiting_acks(new_cert_id).await?;
}
RotationState::WaitingForAcks {
new_cert_id,
timeout_at,
..
} => {
self.handle_waiting_for_acks(new_cert_id, timeout_at).await?;
}
RotationState::ReadyToRotate { new_cert_id } => {
self.execute_rotation(new_cert_id).await?;
}
}
Ok(())
}
/// Проверить срок действия сертификата и инициировать ротацию если нужно
async fn check_expiration_and_initiate(&self) -> Result<(), RotationError> {
let threshold_secs = self.rotation_threshold.as_secs() as i64;
if self.context.tls.check_expiration(threshold_secs).await {
info!("Certificate expiring soon, initiating rotation");
self.initiate_rotation().await?;
}
Ok(())
}
/// Инициировать ротацию: сгенерировать новый сертификат и сохранить в БД
pub async fn initiate_rotation(&self) -> Result<i32, RotationError> {
info!("Initiating certificate rotation");
// 1. Генерация нового сертификата
let keypair = KeyPair::generate()?;
let (cert, not_before, not_after) = generate_cert(&keypair)?;
let cert_der = cert.der().clone();
// 2. Сохранение в БД (is_active = false, пока не активирован)
let new_cert_id = self
.save_new_certificate(&cert_der, &keypair, not_before, not_after)
.await?;
info!(new_cert_id, "New certificate generated and saved");
// 3. Обновление rotation_state
let new_state = RotationState::RotationInitiated {
initiated_at: chrono::Utc::now().timestamp(),
new_cert_id,
};
*self.context.rotation_state.write().await = new_state.clone();
new_state.save_to_db(&self.context.db).await?;
// 4. Логирование в audit trail
self.log_rotation_event(new_cert_id, "rotation_initiated", None)
.await?;
Ok(new_cert_id)
}
/// Перейти в состояние WaitingForAcks и разослать уведомления
async fn transition_to_waiting_acks(&self, new_cert_id: i32) -> Result<(), RotationError> {
info!(new_cert_id, "Transitioning to WaitingForAcks state");
let initiated_at = chrono::Utc::now().timestamp();
let timeout_at = initiated_at + self.ack_timeout.as_secs() as i64;
// Обновить состояние
let new_state = RotationState::WaitingForAcks {
new_cert_id,
initiated_at,
timeout_at,
};
*self.context.rotation_state.write().await = new_state.clone();
new_state.save_to_db(&self.context.db).await?;
// TODO: Broadcast уведомлений клиентам
// self.broadcast_rotation_notification(new_cert_id, timeout_at).await?;
info!(timeout_at, "Rotation notifications sent, waiting for ACKs");
Ok(())
}
/// Обработка состояния WaitingForAcks: проверка ACKs и таймаута
async fn handle_waiting_for_acks(
&self,
new_cert_id: i32,
timeout_at: i64,
) -> Result<(), RotationError> {
let now = chrono::Utc::now().timestamp();
// Проверить таймаут
if now > timeout_at {
let missing = self.get_missing_acks(new_cert_id).await?;
warn!(
missing_count = missing.len(),
"Rotation ACK timeout reached, proceeding with rotation"
);
// Переход в ReadyToRotate
let new_state = RotationState::ReadyToRotate { new_cert_id };
*self.context.rotation_state.write().await = new_state.clone();
new_state.save_to_db(&self.context.db).await?;
self.log_rotation_event(
new_cert_id,
"timeout",
Some(format!("Missing ACKs from {} clients", missing.len())),
)
.await?;
return Ok(());
}
// Проверить, все ли ACK получены
let missing = self.get_missing_acks(new_cert_id).await?;
if missing.is_empty() {
info!("All clients acknowledged, ready to rotate");
let new_state = RotationState::ReadyToRotate { new_cert_id };
*self.context.rotation_state.write().await = new_state.clone();
new_state.save_to_db(&self.context.db).await?;
self.log_rotation_event(new_cert_id, "acks_complete", None)
.await?;
} else {
let time_remaining = timeout_at - now;
debug!(
missing_count = missing.len(),
time_remaining,
"Waiting for rotation ACKs"
);
}
Ok(())
}
/// Выполнить атомарную ротацию сертификата
async fn execute_rotation(&self, new_cert_id: i32) -> Result<(), RotationError> {
info!(new_cert_id, "Executing certificate rotation");
// 1. Загрузить новый сертификат из БД
let new_cert = self.load_certificate(new_cert_id).await?;
// 2. Атомарная замена в TlsManager
self.context
.tls
.replace_certificate(new_cert)
.await
.map_err(RotationError::TlsInit)?;
// 3. Обновить БД: старый is_active=false, новый is_active=true
self.activate_certificate(new_cert_id).await?;
// 4. TODO: Отключить всех клиентов
// self.disconnect_all_clients().await?;
// 5. Очистить rotation_state
let new_state = RotationState::Normal;
*self.context.rotation_state.write().await = new_state.clone();
new_state.save_to_db(&self.context.db).await?;
// 6. Очистить ACKs
self.context.rotation_acks.write().await.clear();
self.clear_rotation_acks(new_cert_id).await?;
// 7. Логирование
self.log_rotation_event(new_cert_id, "activated", None)
.await?;
info!(new_cert_id, "Certificate rotation completed successfully");
Ok(())
}
/// Сохранить новый сертификат в БД
async fn save_new_certificate(
&self,
cert_der: &[u8],
keypair: &KeyPair,
cert_not_before: i64,
cert_not_after: i64,
) -> Result<i32, RotationError> {
use crate::db::schema::tls_certificates::dsl::*;
let mut conn = self.context.db.get().await.map_err(|e| {
RotationError::InvalidState(format!("Failed to get DB connection: {}", e))
})?;
let new_cert = NewTlsCertificate {
cert: cert_der.to_vec(),
cert_key: keypair.serialize_pem().as_bytes().to_vec(),
not_before: cert_not_before as i32,
not_after: cert_not_after as i32,
is_active: false,
};
diesel::insert_into(tls_certificates)
.values(&new_cert)
.execute(&mut conn)
.await?;
// Получить ID последней вставленной записи
let cert_id: i32 = diesel::select(diesel::dsl::sql::<diesel::sql_types::Integer>(
"last_insert_rowid()",
))
.first(&mut conn)
.await?;
self.log_rotation_event(cert_id, "created", None).await?;
Ok(cert_id)
}
/// Загрузить сертификат из БД
async fn load_certificate(&self, cert_id: i32) -> Result<CertificateMetadata, RotationError> {
use crate::db::schema::tls_certificates::dsl::*;
let mut conn = self.context.db.get().await.map_err(|e| {
RotationError::InvalidState(format!("Failed to get DB connection: {}", e))
})?;
let cert_record: (Vec<u8>, Vec<u8>, i32, i32, i32) = tls_certificates
.select((cert, cert_key, not_before, not_after, created_at))
.filter(id.eq(cert_id))
.first(&mut conn)
.await?;
let cert_der = rustls::pki_types::CertificateDer::from(cert_record.0);
let key_pem = String::from_utf8(cert_record.1)
.map_err(|e| RotationError::InvalidState(format!("Invalid key encoding: {}", e)))?;
let keypair = KeyPair::from_pem(&key_pem)?;
Ok(CertificateMetadata {
cert_id,
cert: cert_der,
keypair: Arc::new(keypair),
not_before: cert_record.2 as i64,
not_after: cert_record.3 as i64,
created_at: cert_record.4 as i64,
})
}
/// Активировать сертификат (установить is_active=true)
async fn activate_certificate(&self, cert_id: i32) -> Result<(), RotationError> {
use crate::db::schema::tls_certificates::dsl::*;
let mut conn = self.context.db.get().await.map_err(|e| {
RotationError::InvalidState(format!("Failed to get DB connection: {}", e))
})?;
// Деактивировать все сертификаты
diesel::update(tls_certificates)
.set(is_active.eq(false))
.execute(&mut conn)
.await?;
// Активировать новый
diesel::update(tls_certificates.filter(id.eq(cert_id)))
.set(is_active.eq(true))
.execute(&mut conn)
.await?;
Ok(())
}
/// Получить список клиентов, которые ещё не отправили ACK
async fn get_missing_acks(&self, rotation_id: i32) -> Result<Vec<VerifyingKey>, RotationError> {
// TODO: Реализовать получение списка всех активных клиентов
// и вычитание тех, кто уже отправил ACK
// Пока возвращаем пустой список
Ok(Vec::new())
}
/// Очистить ACKs для данной ротации из БД
async fn clear_rotation_acks(&self, rotation_id: i32) -> Result<(), RotationError> {
use crate::db::schema::rotation_client_acks::dsl::*;
let mut conn = self.context.db.get().await.map_err(|e| {
RotationError::InvalidState(format!("Failed to get DB connection: {}", e))
})?;
diesel::delete(rotation_client_acks.filter(rotation_id.eq(rotation_id)))
.execute(&mut conn)
.await?;
Ok(())
}
/// Записать событие в audit trail
async fn log_rotation_event(
&self,
history_cert_id: i32,
history_event_type: &str,
history_details: Option<String>,
) -> Result<(), RotationError> {
use crate::db::schema::tls_rotation_history::dsl::*;
let mut conn = self.context.db.get().await.map_err(|e| {
RotationError::InvalidState(format!("Failed to get DB connection: {}", e))
})?;
let new_history = NewTlsRotationHistory {
cert_id: history_cert_id,
event_type: history_event_type.to_string(),
details: history_details,
};
diesel::insert_into(tls_rotation_history)
.values(&new_history)
.execute(&mut conn)
.await?;
Ok(())
}
}

View File

@@ -0,0 +1,161 @@
use std::time::{SystemTime, UNIX_EPOCH};
use miette::Diagnostic;
use secrecy::{ExposeSecret, SecretBox};
use thiserror::Error;
use x25519_dalek::{PublicKey, StaticSecret};
const EPHEMERAL_KEY_LIFETIME_SECS: u64 = 60;
#[derive(Error, Debug, Diagnostic)]
pub enum UnsealError {
#[error("Invalid public key")]
#[diagnostic(code(arbiter_server::unseal::invalid_pubkey))]
InvalidPublicKey,
}
/// Ephemeral X25519 keypair for secure password transmission
///
/// Generated on-demand when client requests unseal. Expires after 60 seconds.
/// Uses StaticSecret stored in SecretBox for automatic zeroization on drop.
pub struct EphemeralKeyPair {
/// Secret key stored securely
secret: SecretBox<StaticSecret>,
public: PublicKey,
expires_at: u64,
}
impl EphemeralKeyPair {
/// Generate new ephemeral X25519 keypair
pub fn generate() -> Self {
// Generate random 32 bytes
let secret_bytes = rand::random::<[u8; 32]>();
let secret = StaticSecret::from(secret_bytes);
let public = PublicKey::from(&secret);
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("System time before UNIX epoch")
.as_secs();
Self {
secret: SecretBox::new(Box::new(secret)),
public,
expires_at: now + EPHEMERAL_KEY_LIFETIME_SECS,
}
}
/// Check if this ephemeral key has expired
pub fn is_expired(&self) -> bool {
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("System time before UNIX epoch")
.as_secs();
now > self.expires_at
}
/// Get expiration timestamp (Unix epoch seconds)
pub fn expires_at(&self) -> u64 {
self.expires_at
}
/// Get public key as bytes for transmission to client
pub fn public_bytes(&self) -> Vec<u8> {
self.public.as_bytes().to_vec()
}
/// Perform Diffie-Hellman key exchange with client's public key
///
/// Returns 32-byte shared secret for ChaCha20Poly1305 encryption
pub fn perform_dh(&self, client_pubkey: &[u8]) -> Result<[u8; 32], UnsealError> {
// Parse client public key
let client_public = PublicKey::from(
<[u8; 32]>::try_from(client_pubkey).map_err(|_| UnsealError::InvalidPublicKey)?,
);
// Perform ECDH
let shared_secret = self.secret.expose_secret().diffie_hellman(&client_public);
Ok(shared_secret.to_bytes())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_ephemeral_keypair_generation() {
let keypair = EphemeralKeyPair::generate();
// Public key should be 32 bytes
assert_eq!(keypair.public_bytes().len(), 32);
// Should not be expired immediately
assert!(!keypair.is_expired());
// Expiration should be ~60 seconds in future
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs();
let time_until_expiry = keypair.expires_at() - now;
assert!((59..=61).contains(&time_until_expiry));
}
#[test]
fn test_perform_dh_with_valid_key() {
let server_keypair = EphemeralKeyPair::generate();
let client_secret_bytes = rand::random::<[u8; 32]>();
let client_secret = StaticSecret::from(client_secret_bytes);
let client_public = PublicKey::from(&client_secret);
// Server performs DH
let server_shared_secret = server_keypair
.perform_dh(client_public.as_bytes())
.expect("DH should succeed");
// Client performs DH
let client_shared_secret = client_secret.diffie_hellman(&server_keypair.public);
// Shared secrets should match
assert_eq!(server_shared_secret, client_shared_secret.to_bytes());
assert_eq!(server_shared_secret.len(), 32);
}
#[test]
fn test_perform_dh_with_invalid_key() {
let keypair = EphemeralKeyPair::generate();
// Try with invalid length
let invalid_key = vec![1, 2, 3];
let result = keypair.perform_dh(&invalid_key);
assert!(result.is_err());
// Try with wrong length (not 32 bytes)
let invalid_key = vec![0u8; 16];
let result = keypair.perform_dh(&invalid_key);
assert!(result.is_err());
}
#[test]
fn test_different_keypairs_produce_different_shared_secrets() {
let server_keypair1 = EphemeralKeyPair::generate();
let server_keypair2 = EphemeralKeyPair::generate();
let client_secret_bytes = rand::random::<[u8; 32]>();
let client_secret = StaticSecret::from(client_secret_bytes);
let client_public = PublicKey::from(&client_secret);
let shared1 = server_keypair1
.perform_dh(client_public.as_bytes())
.unwrap();
let shared2 = server_keypair2
.perform_dh(client_public.as_bytes())
.unwrap();
// Different server keys should produce different shared secrets
assert_ne!(shared1, shared2);
}
}

View File

@@ -0,0 +1,139 @@
use chacha20poly1305::{
aead::{Aead, KeyInit},
ChaCha20Poly1305, Key, Nonce,
};
use super::CryptoError;
/// Encrypt plaintext with AEAD (ChaCha20Poly1305)
///
/// Returns (ciphertext, tag) on success
pub fn encrypt(
plaintext: &[u8],
key: &[u8; 32],
nonce: &[u8; 12],
) -> Result<Vec<u8>, CryptoError> {
let cipher_key = Key::from_slice(key);
let cipher = ChaCha20Poly1305::new(cipher_key);
let nonce_array = Nonce::from_slice(nonce);
cipher
.encrypt(nonce_array, plaintext)
.map_err(|e| CryptoError::AeadEncryption(e.to_string()))
}
/// Decrypt ciphertext with AEAD (ChaCha20Poly1305)
///
/// The ciphertext должен содержать tag (последние 16 bytes)
pub fn decrypt(
ciphertext_with_tag: &[u8],
key: &[u8; 32],
nonce: &[u8; 12],
) -> Result<Vec<u8>, CryptoError> {
let cipher_key = Key::from_slice(key);
let cipher = ChaCha20Poly1305::new(cipher_key);
let nonce_array = Nonce::from_slice(nonce);
cipher
.decrypt(nonce_array, ciphertext_with_tag)
.map_err(|e| CryptoError::AeadDecryption(e.to_string()))
}
/// Generate nonce from counter
///
/// Converts i32 counter to 12-byte nonce (big-endian encoding)
pub fn nonce_from_counter(counter: i32) -> [u8; 12] {
let mut nonce = [0u8; 12];
nonce[8..12].copy_from_slice(&counter.to_be_bytes());
nonce
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_aead_encrypt_decrypt_round_trip() {
let plaintext = b"Hello, World! This is a secret message.";
let key = [42u8; 32];
let nonce = nonce_from_counter(1);
// Encrypt
let ciphertext = encrypt(plaintext, &key, &nonce).expect("Encryption failed");
// Verify ciphertext is different from plaintext
assert_ne!(ciphertext.as_slice(), plaintext);
// Decrypt
let decrypted = decrypt(&ciphertext, &key, &nonce).expect("Decryption failed");
// Verify round-trip
assert_eq!(decrypted.as_slice(), plaintext);
}
#[test]
fn test_aead_decrypt_with_wrong_key() {
let plaintext = b"Secret data";
let key = [1u8; 32];
let wrong_key = [2u8; 32];
let nonce = nonce_from_counter(1);
let ciphertext = encrypt(plaintext, &key, &nonce).expect("Encryption failed");
// Attempt decrypt with wrong key
let result = decrypt(&ciphertext, &wrong_key, &nonce);
// Should fail
assert!(result.is_err());
}
#[test]
fn test_aead_decrypt_with_wrong_nonce() {
let plaintext = b"Secret data";
let key = [1u8; 32];
let nonce = nonce_from_counter(1);
let wrong_nonce = nonce_from_counter(2);
let ciphertext = encrypt(plaintext, &key, &nonce).expect("Encryption failed");
// Attempt decrypt with wrong nonce
let result = decrypt(&ciphertext, &key, &wrong_nonce);
// Should fail
assert!(result.is_err());
}
#[test]
fn test_nonce_generation_from_counter() {
let nonce1 = nonce_from_counter(1);
let nonce2 = nonce_from_counter(2);
let nonce_max = nonce_from_counter(i32::MAX);
// Verify nonces are different
assert_ne!(nonce1, nonce2);
// Verify nonce format (first 8 bytes should be zero, last 4 contain counter)
assert_eq!(&nonce1[0..8], &[0u8; 8]);
assert_eq!(&nonce1[8..12], &1i32.to_be_bytes());
assert_eq!(&nonce_max[8..12], &i32::MAX.to_be_bytes());
}
#[test]
fn test_aead_tampered_ciphertext() {
let plaintext = b"Important message";
let key = [7u8; 32];
let nonce = nonce_from_counter(5);
let mut ciphertext = encrypt(plaintext, &key, &nonce).expect("Encryption failed");
// Tamper with ciphertext (flip a bit)
if let Some(byte) = ciphertext.get_mut(5) {
*byte ^= 0x01;
}
// Attempt decrypt - should fail due to authentication tag mismatch
let result = decrypt(&ciphertext, &key, &nonce);
assert!(result.is_err());
}
}

View File

@@ -0,0 +1,28 @@
pub mod aead;
pub mod root_key;
use miette::Diagnostic;
use thiserror::Error;
#[derive(Error, Debug, Diagnostic)]
pub enum CryptoError {
#[error("AEAD encryption failed: {0}")]
#[diagnostic(code(arbiter_server::crypto::aead_encryption))]
AeadEncryption(String),
#[error("AEAD decryption failed: {0}")]
#[diagnostic(code(arbiter_server::crypto::aead_decryption))]
AeadDecryption(String),
#[error("Key derivation failed: {0}")]
#[diagnostic(code(arbiter_server::crypto::key_derivation))]
KeyDerivation(String),
#[error("Invalid nonce: {0}")]
#[diagnostic(code(arbiter_server::crypto::invalid_nonce))]
InvalidNonce(String),
#[error("Invalid key format: {0}")]
#[diagnostic(code(arbiter_server::crypto::invalid_key))]
InvalidKey(String),
}

View File

@@ -0,0 +1,240 @@
use argon2::{
password_hash::{rand_core::OsRng, PasswordHasher, SaltString},
Argon2, PasswordHash, PasswordVerifier,
};
use crate::db::models::AeadEncrypted;
use super::{aead, CryptoError};
/// Encrypt root key with user password
///
/// Uses Argon2id for password derivation and ChaCha20Poly1305 for encryption
pub fn encrypt_root_key(
root_key: &[u8; 32],
password: &str,
nonce_counter: i32,
) -> Result<(AeadEncrypted, String), CryptoError> {
// Derive key from password using Argon2
let (derived_key, salt) = derive_key_from_password(password)?;
// Generate nonce from counter
let nonce = aead::nonce_from_counter(nonce_counter);
// Encrypt root key
let ciphertext_with_tag = aead::encrypt(root_key, &derived_key, &nonce)?;
// Extract tag (last 16 bytes)
let tag_start = ciphertext_with_tag
.len()
.checked_sub(16)
.ok_or_else(|| CryptoError::AeadEncryption("Ciphertext too short".into()))?;
let ciphertext = ciphertext_with_tag[..tag_start].to_vec();
let tag = ciphertext_with_tag[tag_start..].to_vec();
let aead_encrypted = AeadEncrypted {
id: 1, // Will be set by database
current_nonce: nonce_counter,
ciphertext,
tag,
schema_version: 1, // Current version
argon2_salt: Some(salt.clone()),
};
Ok((aead_encrypted, salt))
}
/// Decrypt root key with user password
///
/// Verifies password hash and decrypts using ChaCha20Poly1305
pub fn decrypt_root_key(
encrypted: &AeadEncrypted,
password: &str,
salt: &str,
) -> Result<[u8; 32], CryptoError> {
// Derive key from password using stored salt
let derived_key = derive_key_with_salt(password, salt)?;
// Generate nonce from counter
let nonce = aead::nonce_from_counter(encrypted.current_nonce);
// Reconstruct ciphertext with tag
let mut ciphertext_with_tag = encrypted.ciphertext.clone();
ciphertext_with_tag.extend_from_slice(&encrypted.tag);
// Decrypt
let plaintext = aead::decrypt(&ciphertext_with_tag, &derived_key, &nonce)?;
// Verify length
if plaintext.len() != 32 {
return Err(CryptoError::InvalidKey(format!(
"Expected 32 bytes, got {}",
plaintext.len()
)));
}
// Convert to fixed-size array
let mut root_key = [0u8; 32];
root_key.copy_from_slice(&plaintext);
Ok(root_key)
}
/// Derive 32-byte key from password using Argon2id
///
/// Generates new random salt and returns (derived_key, salt_string)
fn derive_key_from_password(password: &str) -> Result<([u8; 32], String), CryptoError> {
let salt = SaltString::generate(&mut OsRng);
let argon2 = Argon2::default();
let password_hash = argon2
.hash_password(password.as_bytes(), &salt)
.map_err(|e| CryptoError::KeyDerivation(e.to_string()))?;
// Extract hash output (32 bytes)
let hash_output = password_hash
.hash
.ok_or_else(|| CryptoError::KeyDerivation("No hash output".into()))?;
let hash_bytes = hash_output.as_bytes();
if hash_bytes.len() != 32 {
return Err(CryptoError::KeyDerivation(format!(
"Expected 32 bytes, got {}",
hash_bytes.len()
)));
}
let mut key = [0u8; 32];
key.copy_from_slice(hash_bytes);
Ok((key, salt.to_string()))
}
/// Derive 32-byte key from password using existing salt
fn derive_key_with_salt(password: &str, salt_str: &str) -> Result<[u8; 32], CryptoError> {
let argon2 = Argon2::default();
// Parse salt
let salt =
SaltString::from_b64(salt_str).map_err(|e| CryptoError::InvalidKey(e.to_string()))?;
let password_hash = argon2
.hash_password(password.as_bytes(), &salt)
.map_err(|e| CryptoError::KeyDerivation(e.to_string()))?;
// Extract hash output
let hash_output = password_hash
.hash
.ok_or_else(|| CryptoError::KeyDerivation("No hash output".into()))?;
let hash_bytes = hash_output.as_bytes();
if hash_bytes.len() != 32 {
return Err(CryptoError::KeyDerivation(format!(
"Expected 32 bytes, got {}",
hash_bytes.len()
)));
}
let mut key = [0u8; 32];
key.copy_from_slice(hash_bytes);
Ok(key)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_root_key_encrypt_decrypt_round_trip() {
let root_key = [42u8; 32];
let password = "super_secret_password_123";
let nonce_counter = 1;
// Encrypt
let (encrypted, salt) =
encrypt_root_key(&root_key, password, nonce_counter).expect("Encryption failed");
// Verify structure
assert_eq!(encrypted.current_nonce, nonce_counter);
assert_eq!(encrypted.schema_version, 1);
assert_eq!(encrypted.tag.len(), 16); // AEAD tag size
// Decrypt
let decrypted =
decrypt_root_key(&encrypted, password, &salt).expect("Decryption failed");
// Verify round-trip
assert_eq!(decrypted, root_key);
}
#[test]
fn test_decrypt_with_wrong_password() {
let root_key = [99u8; 32];
let correct_password = "correct_password";
let wrong_password = "wrong_password";
let nonce_counter = 1;
// Encrypt with correct password
let (encrypted, salt) =
encrypt_root_key(&root_key, correct_password, nonce_counter).expect("Encryption failed");
// Attempt decrypt with wrong password
let result = decrypt_root_key(&encrypted, wrong_password, &salt);
// Should fail due to authentication tag mismatch
assert!(result.is_err());
}
#[test]
fn test_password_derivation_different_salts() {
let password = "same_password";
// Derive key twice - should produce different salts
let (key1, salt1) = derive_key_from_password(password).expect("Derivation 1 failed");
let (key2, salt2) = derive_key_from_password(password).expect("Derivation 2 failed");
// Salts should be different (randomly generated)
assert_ne!(salt1, salt2);
// Keys should be different (due to different salts)
assert_ne!(key1, key2);
}
#[test]
fn test_password_derivation_with_same_salt() {
let password = "test_password";
// Generate key and salt
let (key1, salt) = derive_key_from_password(password).expect("Derivation failed");
// Derive key again with same salt
let key2 = derive_key_with_salt(password, &salt).expect("Re-derivation failed");
// Keys should be identical
assert_eq!(key1, key2);
}
#[test]
fn test_different_nonce_produces_different_ciphertext() {
let root_key = [77u8; 32];
let password = "password123";
let (encrypted1, salt1) = encrypt_root_key(&root_key, password, 1).expect("Encryption 1 failed");
let (encrypted2, salt2) = encrypt_root_key(&root_key, password, 2).expect("Encryption 2 failed");
// Different nonces should produce different ciphertexts
assert_ne!(encrypted1.ciphertext, encrypted2.ciphertext);
// But both should decrypt correctly
let decrypted1 = decrypt_root_key(&encrypted1, password, &salt1).expect("Decryption 1 failed");
let decrypted2 = decrypt_root_key(&encrypted2, password, &salt2).expect("Decryption 2 failed");
assert_eq!(decrypted1, root_key);
assert_eq!(decrypted2, root_key);
}
}

View File

@@ -1,11 +1,12 @@
use std::sync::Arc;
use diesel::{ use diesel::{
Connection as _, SqliteConnection, Connection as _, SqliteConnection,
connection::SimpleConnection as _, connection::{SimpleConnection as _, TransactionManager},
}; };
use diesel_async::{ use diesel_async::{
AsyncConnection, SimpleAsyncConnection, AsyncConnection, SimpleAsyncConnection,
pooled_connection::{AsyncDieselConnectionManager, ManagerConfig}, pooled_connection::{AsyncDieselConnectionManager, ManagerConfig, RecyclingMethod},
sync_connection_wrapper::SyncConnectionWrapper, sync_connection_wrapper::SyncConnectionWrapper,
}; };
use diesel_migrations::{EmbeddedMigrations, MigrationHarness, embed_migrations}; use diesel_migrations::{EmbeddedMigrations, MigrationHarness, embed_migrations};
@@ -21,7 +22,7 @@ pub type DatabasePool = diesel_async::pooled_connection::bb8::Pool<DatabaseConne
pub type PoolInitError = diesel_async::pooled_connection::PoolError; pub type PoolInitError = diesel_async::pooled_connection::PoolError;
pub type PoolError = diesel_async::pooled_connection::bb8::RunError; pub type PoolError = diesel_async::pooled_connection::bb8::RunError;
static DB_FILE: &'static str = "arbiter.sqlite"; static DB_FILE: &str = "arbiter.sqlite";
const MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations"); const MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations");

View File

@@ -1,58 +1,33 @@
#![allow(unused)] #![allow(unused)]
#![allow(clippy::all)] #![allow(clippy::all)]
use crate::db::schema::{self, aead_encrypted, arbiter_settings, root_key_history}; use crate::db::schema::{self, aead_encrypted, arbiter_settings};
use diesel::{prelude::*, sqlite::Sqlite}; use diesel::{prelude::*, sqlite::Sqlite};
use restructed::Models;
pub mod types { pub mod types {
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
pub struct SqliteTimestamp(DateTime<Utc>); pub struct SqliteTimestamp(DateTime<Utc>);
} }
#[derive(Models, Queryable, Debug, Insertable, Selectable)] #[derive(Queryable, Selectable, Debug, Insertable)]
#[view(
NewAeadEncrypted,
derive(Insertable),
omit(id),
attributes_with = "deriveless"
)]
#[diesel(table_name = aead_encrypted, check_for_backend(Sqlite))] #[diesel(table_name = aead_encrypted, check_for_backend(Sqlite))]
pub struct AeadEncrypted { pub struct AeadEncrypted {
pub id: i32, pub id: i32,
pub current_nonce: i32,
pub ciphertext: Vec<u8>, pub ciphertext: Vec<u8>,
pub tag: Vec<u8>, pub tag: Vec<u8>,
pub current_nonce: Vec<u8>,
pub schema_version: i32, pub schema_version: i32,
pub associated_root_key_id: i32, // references root_key_history.id pub argon2_salt: Option<String>,
pub created_at: i32,
}
#[derive(Models, Queryable, Debug, Insertable, Selectable)]
#[diesel(table_name = root_key_history, check_for_backend(Sqlite))]
#[view(
NewRootKeyHistory,
derive(Insertable),
omit(id),
attributes_with = "deriveless"
)]
pub struct RootKeyHistory {
pub id: i32,
pub ciphertext: Vec<u8>,
pub tag: Vec<u8>,
pub root_key_encryption_nonce: Vec<u8>,
pub data_encryption_nonce: Vec<u8>,
pub schema_version: i32,
pub salt: Vec<u8>,
} }
#[derive(Queryable, Debug, Insertable)] #[derive(Queryable, Debug, Insertable)]
#[diesel(table_name = arbiter_settings, check_for_backend(Sqlite))] #[diesel(table_name = arbiter_settings, check_for_backend(Sqlite))]
pub struct ArbiterSetting { pub struct ArbiterSetting {
pub id: i32, pub id: i32,
pub root_key_id: Option<i32>, // references root_key_history.id pub root_key_id: Option<i32>, // references aead_encrypted.id
pub cert_key: Vec<u8>, pub cert_key: Vec<u8>,
pub cert: Vec<u8>, pub cert: Vec<u8>,
pub current_cert_id: Option<i32>, // references tls_certificates.id
} }
#[derive(Queryable, Debug)] #[derive(Queryable, Debug)]
@@ -74,3 +49,70 @@ pub struct UseragentClient {
pub created_at: i32, pub created_at: i32,
pub updated_at: i32, pub updated_at: i32,
} }
// TLS Certificate Rotation Models
#[derive(Queryable, Debug, Insertable)]
#[diesel(table_name = schema::tls_certificates, check_for_backend(Sqlite))]
pub struct TlsCertificate {
pub id: i32,
pub cert: Vec<u8>,
pub cert_key: Vec<u8>,
pub not_before: i32,
pub not_after: i32,
pub created_at: i32,
pub is_active: bool,
}
#[derive(Insertable)]
#[diesel(table_name = schema::tls_certificates)]
pub struct NewTlsCertificate {
pub cert: Vec<u8>,
pub cert_key: Vec<u8>,
pub not_before: i32,
pub not_after: i32,
pub is_active: bool,
}
#[derive(Queryable, Debug, Insertable)]
#[diesel(table_name = schema::tls_rotation_state, check_for_backend(Sqlite))]
pub struct TlsRotationState {
pub id: i32,
pub state: String,
pub new_cert_id: Option<i32>,
pub initiated_at: Option<i32>,
pub timeout_at: Option<i32>,
}
#[derive(Queryable, Debug, Insertable)]
#[diesel(table_name = schema::rotation_client_acks, check_for_backend(Sqlite))]
pub struct RotationClientAck {
pub rotation_id: i32,
pub client_key: String,
pub ack_received_at: i32,
}
#[derive(Insertable)]
#[diesel(table_name = schema::rotation_client_acks)]
pub struct NewRotationClientAck {
pub rotation_id: i32,
pub client_key: String,
}
#[derive(Queryable, Debug, Insertable)]
#[diesel(table_name = schema::tls_rotation_history, check_for_backend(Sqlite))]
pub struct TlsRotationHistory {
pub id: i32,
pub cert_id: i32,
pub event_type: String,
pub timestamp: i32,
pub details: Option<String>,
}
#[derive(Insertable)]
#[diesel(table_name = schema::tls_rotation_history)]
pub struct NewTlsRotationHistory {
pub cert_id: i32,
pub event_type: String,
pub details: Option<String>,
}

View File

@@ -3,12 +3,11 @@
diesel::table! { diesel::table! {
aead_encrypted (id) { aead_encrypted (id) {
id -> Integer, id -> Integer,
current_nonce -> Binary, current_nonce -> Integer,
ciphertext -> Binary, ciphertext -> Binary,
tag -> Binary, tag -> Binary,
schema_version -> Integer, schema_version -> Integer,
associated_root_key_id -> Integer, argon2_salt -> Nullable<Text>,
created_at -> Integer,
} }
} }
@@ -18,6 +17,7 @@ diesel::table! {
root_key_id -> Nullable<Integer>, root_key_id -> Nullable<Integer>,
cert_key -> Binary, cert_key -> Binary,
cert -> Binary, cert -> Binary,
current_cert_id -> Nullable<Integer>,
} }
} }
@@ -31,18 +31,6 @@ diesel::table! {
} }
} }
diesel::table! {
root_key_history (id) {
id -> Integer,
root_key_encryption_nonce -> Binary,
data_encryption_nonce -> Binary,
ciphertext -> Binary,
tag -> Binary,
schema_version -> Integer,
salt -> Binary,
}
}
diesel::table! { diesel::table! {
useragent_client (id) { useragent_client (id) {
id -> Integer, id -> Integer,
@@ -53,13 +41,59 @@ diesel::table! {
} }
} }
diesel::joinable!(aead_encrypted -> root_key_history (associated_root_key_id)); diesel::table! {
diesel::joinable!(arbiter_settings -> root_key_history (root_key_id)); tls_certificates (id) {
id -> Integer,
cert -> Binary,
cert_key -> Binary,
not_before -> Integer,
not_after -> Integer,
created_at -> Integer,
is_active -> Bool,
}
}
diesel::table! {
tls_rotation_state (id) {
id -> Integer,
state -> Text,
new_cert_id -> Nullable<Integer>,
initiated_at -> Nullable<Integer>,
timeout_at -> Nullable<Integer>,
}
}
diesel::table! {
rotation_client_acks (rotation_id, client_key) {
rotation_id -> Integer,
client_key -> Text,
ack_received_at -> Integer,
}
}
diesel::table! {
tls_rotation_history (id) {
id -> Integer,
cert_id -> Integer,
event_type -> Text,
timestamp -> Integer,
details -> Nullable<Text>,
}
}
diesel::joinable!(arbiter_settings -> aead_encrypted (root_key_id));
diesel::joinable!(arbiter_settings -> tls_certificates (current_cert_id));
diesel::joinable!(tls_rotation_state -> tls_certificates (new_cert_id));
diesel::joinable!(rotation_client_acks -> tls_certificates (rotation_id));
diesel::joinable!(tls_rotation_history -> tls_certificates (cert_id));
diesel::allow_tables_to_appear_in_same_query!( diesel::allow_tables_to_appear_in_same_query!(
aead_encrypted, aead_encrypted,
arbiter_settings, arbiter_settings,
program_client, program_client,
root_key_history,
useragent_client, useragent_client,
tls_certificates,
tls_rotation_state,
rotation_client_acks,
tls_rotation_history,
); );

View File

@@ -1,4 +1,7 @@
#![forbid(unsafe_code)] #![allow(unused)]
use std::sync::Arc;
use arbiter_proto::{ use arbiter_proto::{
proto::{ClientRequest, ClientResponse, UserAgentRequest, UserAgentResponse}, proto::{ClientRequest, ClientResponse, UserAgentRequest, UserAgentResponse},
transport::BiStream, transport::BiStream,
@@ -15,8 +18,9 @@ use crate::{
}; };
pub mod actors; pub mod actors;
pub mod context; mod context;
pub mod db; mod crypto;
mod db;
mod errors; mod errors;
const DEFAULT_CHANNEL_SIZE: usize = 1000; const DEFAULT_CHANNEL_SIZE: usize = 1000;
@@ -25,12 +29,6 @@ pub struct Server {
context: ServerContext, context: ServerContext,
} }
impl Server {
pub fn new(context: ServerContext) -> Self {
Self { context }
}
}
#[async_trait] #[async_trait]
impl arbiter_proto::proto::arbiter_service_server::ArbiterService for Server { impl arbiter_proto::proto::arbiter_service_server::ArbiterService for Server {
type UserAgentStream = ReceiverStream<Result<UserAgentResponse, Status>>; type UserAgentStream = ReceiverStream<Result<UserAgentResponse, Status>>;

View File

@@ -1,34 +0,0 @@
use arbiter_proto::proto::arbiter_service_server::ArbiterServiceServer;
use arbiter_server::{Server, context::ServerContext, db};
use tracing::info;
#[tokio::main]
async fn main() -> miette::Result<()> {
tracing_subscriber::fmt()
.with_env_filter(
tracing_subscriber::EnvFilter::try_from_default_env()
.unwrap_or_else(|_| tracing_subscriber::EnvFilter::new("info")),
)
.init();
info!("Starting arbiter server");
info!("Initializing database");
let db = db::create_pool(None).await?;
info!("Database ready");
info!("Initializing server context");
let context = ServerContext::new(db).await?;
info!("Server context ready");
let addr = "[::1]:50051".parse().expect("valid address");
info!(%addr, "Starting gRPC server");
tonic::transport::Server::builder()
.add_service(ArbiterServiceServer::new(Server::new(context)))
.serve(addr)
.await
.map_err(|e| miette::miette!("gRPC server error: {e}"))?;
unreachable!("gRPC server should run indefinitely");
}

View File

@@ -1,11 +1,6 @@
# cargo-vet audits file # cargo-vet audits file
[[audits.similar]]
who = "hdbg <httpdebugger@protonmail.com>"
criteria = "safe-to-deploy"
version = "2.2.1"
[[audits.test-log]] [[audits.test-log]]
who = "hdbg <httpdebugger@protonmail.com>" who = "hdbg <httpdebugger@protonmail.com>"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
@@ -16,12 +11,6 @@ who = "hdbg <httpdebugger@protonmail.com>"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
delta = "0.2.18 -> 0.2.19" delta = "0.2.18 -> 0.2.19"
[[trusted.cc]]
criteria = "safe-to-deploy"
user-id = 55123 # rust-lang-owner
start = "2022-10-29"
end = "2027-02-16"
[[trusted.h2]] [[trusted.h2]]
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
user-id = 359 # Sean McArthur (seanmonstar) user-id = 359 # Sean McArthur (seanmonstar)
@@ -40,12 +29,6 @@ user-id = 359 # Sean McArthur (seanmonstar)
start = "2022-01-15" start = "2022-01-15"
end = "2027-02-14" end = "2027-02-14"
[[trusted.libc]]
criteria = "safe-to-deploy"
user-id = 55123 # rust-lang-owner
start = "2024-08-15"
end = "2027-02-16"
[[trusted.rustix]] [[trusted.rustix]]
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
user-id = 6825 # Dan Gohman (sunfishcode) user-id = 6825 # Dan Gohman (sunfishcode)
@@ -63,33 +46,3 @@ criteria = "safe-to-deploy"
user-id = 3618 # David Tolnay (dtolnay) user-id = 3618 # David Tolnay (dtolnay)
start = "2019-03-01" start = "2019-03-01"
end = "2027-02-14" end = "2027-02-14"
[[trusted.thread_local]]
criteria = "safe-to-deploy"
user-id = 2915 # Amanieu d'Antras (Amanieu)
start = "2019-09-07"
end = "2027-02-16"
[[trusted.toml]]
criteria = "safe-to-deploy"
user-id = 6743 # Ed Page (epage)
start = "2022-12-14"
end = "2027-02-16"
[[trusted.toml_parser]]
criteria = "safe-to-deploy"
user-id = 6743 # Ed Page (epage)
start = "2025-07-08"
end = "2027-02-16"
[[trusted.tonic-build]]
criteria = "safe-to-deploy"
user-id = 10
start = "2019-09-10"
end = "2027-02-16"
[[trusted.windows-sys]]
criteria = "safe-to-deploy"
user-id = 64539 # Kenny Kerr (kennykerr)
start = "2021-11-15"
end = "2027-02-16"

View File

@@ -13,9 +13,6 @@ url = "https://raw.githubusercontent.com/google/supply-chain/main/audits.toml"
[imports.mozilla] [imports.mozilla]
url = "https://raw.githubusercontent.com/mozilla/supply-chain/main/audits.toml" url = "https://raw.githubusercontent.com/mozilla/supply-chain/main/audits.toml"
[imports.zcash]
url = "https://raw.githubusercontent.com/zcash/rust-ecosystem/main/supply-chain/audits.toml"
[[exemptions.addr2line]] [[exemptions.addr2line]]
version = "0.25.1" version = "0.25.1"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
@@ -44,6 +41,10 @@ criteria = "safe-to-deploy"
version = "0.1.89" version = "0.1.89"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
[[exemptions.autocfg]]
version = "1.5.0"
criteria = "safe-to-deploy"
[[exemptions.aws-lc-rs]] [[exemptions.aws-lc-rs]]
version = "1.15.4" version = "1.15.4"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
@@ -192,6 +193,10 @@ criteria = "safe-to-deploy"
version = "0.2.0" version = "0.2.0"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
[[exemptions.dunce]]
version = "1.0.5"
criteria = "safe-to-deploy"
[[exemptions.dyn-clone]] [[exemptions.dyn-clone]]
version = "1.0.20" version = "1.0.20"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
@@ -204,6 +209,10 @@ criteria = "safe-to-deploy"
version = "3.0.0-pre.6" version = "3.0.0-pre.6"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
[[exemptions.errno]]
version = "0.3.14"
criteria = "safe-to-deploy"
[[exemptions.fiat-crypto]] [[exemptions.fiat-crypto]]
version = "0.3.0" version = "0.3.0"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
@@ -252,6 +261,10 @@ criteria = "safe-to-deploy"
version = "1.4.0" version = "1.4.0"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
[[exemptions.http-body]]
version = "1.0.1"
criteria = "safe-to-deploy"
[[exemptions.http-body-util]] [[exemptions.http-body-util]]
version = "0.1.3" version = "0.1.3"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
@@ -316,6 +329,10 @@ criteria = "safe-to-deploy"
version = "0.19.0" version = "0.19.0"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
[[exemptions.libc]]
version = "0.2.181"
criteria = "safe-to-deploy"
[[exemptions.libsqlite3-sys]] [[exemptions.libsqlite3-sys]]
version = "0.35.0" version = "0.35.0"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
@@ -508,6 +525,10 @@ criteria = "safe-to-deploy"
version = "0.1.27" version = "0.1.27"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
[[exemptions.rustc_version]]
version = "0.4.1"
criteria = "safe-to-deploy"
[[exemptions.rusticata-macros]] [[exemptions.rusticata-macros]]
version = "4.1.0" version = "4.1.0"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
@@ -524,6 +545,10 @@ criteria = "safe-to-deploy"
version = "0.103.9" version = "0.103.9"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
[[exemptions.rustversion]]
version = "1.0.22"
criteria = "safe-to-deploy"
[[exemptions.scoped-futures]] [[exemptions.scoped-futures]]
version = "0.1.4" version = "0.1.4"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
@@ -628,6 +653,10 @@ criteria = "safe-to-deploy"
version = "2.0.18" version = "2.0.18"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
[[exemptions.thread_local]]
version = "1.1.9"
criteria = "safe-to-run"
[[exemptions.time]] [[exemptions.time]]
version = "0.3.47" version = "0.3.47"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
@@ -660,6 +689,14 @@ criteria = "safe-to-deploy"
version = "0.7.18" version = "0.7.18"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
[[exemptions.toml]]
version = "0.9.11+spec-1.1.0"
criteria = "safe-to-deploy"
[[exemptions.toml_parser]]
version = "1.0.6+spec-1.1.0"
criteria = "safe-to-deploy"
[[exemptions.tonic]] [[exemptions.tonic]]
version = "0.14.3" version = "0.14.3"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
@@ -704,6 +741,10 @@ criteria = "safe-to-deploy"
version = "0.3.22" version = "0.3.22"
criteria = "safe-to-run" criteria = "safe-to-run"
[[exemptions.try-lock]]
version = "0.2.5"
criteria = "safe-to-deploy"
[[exemptions.typenum]] [[exemptions.typenum]]
version = "1.19.0" version = "1.19.0"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
@@ -728,6 +769,10 @@ criteria = "safe-to-deploy"
version = "1.20.0" version = "1.20.0"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
[[exemptions.want]]
version = "0.3.1"
criteria = "safe-to-deploy"
[[exemptions.wasi]] [[exemptions.wasi]]
version = "0.11.1+wasi-snapshot-preview1" version = "0.11.1+wasi-snapshot-preview1"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
@@ -772,6 +817,10 @@ criteria = "safe-to-deploy"
version = "0.59.3" version = "0.59.3"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
[[exemptions.windows-link]]
version = "0.2.1"
criteria = "safe-to-deploy"
[[exemptions.windows-result]] [[exemptions.windows-result]]
version = "0.4.1" version = "0.4.1"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
@@ -780,6 +829,18 @@ criteria = "safe-to-deploy"
version = "0.5.1" version = "0.5.1"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
[[exemptions.windows-sys]]
version = "0.52.0"
criteria = "safe-to-deploy"
[[exemptions.windows-sys]]
version = "0.60.2"
criteria = "safe-to-deploy"
[[exemptions.windows-sys]]
version = "0.61.2"
criteria = "safe-to-deploy"
[[exemptions.windows-targets]] [[exemptions.windows-targets]]
version = "0.52.6" version = "0.52.6"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
@@ -864,6 +925,10 @@ criteria = "safe-to-deploy"
version = "0.5.2" version = "0.5.2"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
[[exemptions.zeroize]]
version = "1.8.2"
criteria = "safe-to-deploy"
[[exemptions.zmij]] [[exemptions.zmij]]
version = "1.0.20" version = "1.0.20"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"

View File

@@ -41,12 +41,6 @@ user-id = 359
user-login = "seanmonstar" user-login = "seanmonstar"
user-name = "Sean McArthur" user-name = "Sean McArthur"
[[publisher.libc]]
version = "0.2.182"
when = "2026-02-13"
user-id = 55123
user-login = "rust-lang-owner"
[[publisher.rustix]] [[publisher.rustix]]
version = "1.1.3" version = "1.1.3"
when = "2025-12-23" when = "2025-12-23"
@@ -69,33 +63,12 @@ user-login = "dtolnay"
user-name = "David Tolnay" user-name = "David Tolnay"
[[publisher.syn]] [[publisher.syn]]
version = "2.0.115" version = "2.0.114"
when = "2026-02-12" when = "2026-01-07"
user-id = 3618 user-id = 3618
user-login = "dtolnay" user-login = "dtolnay"
user-name = "David Tolnay" user-name = "David Tolnay"
[[publisher.thread_local]]
version = "1.1.9"
when = "2025-06-12"
user-id = 2915
user-login = "Amanieu"
user-name = "Amanieu d'Antras"
[[publisher.toml]]
version = "0.9.12+spec-1.1.0"
when = "2026-02-10"
user-id = 6743
user-login = "epage"
user-name = "Ed Page"
[[publisher.toml_parser]]
version = "1.0.8+spec-1.1.0"
when = "2026-02-12"
user-id = 6743
user-login = "epage"
user-name = "Ed Page"
[[publisher.unicode-width]] [[publisher.unicode-width]]
version = "0.1.14" version = "0.1.14"
when = "2024-09-19" when = "2024-09-19"
@@ -147,34 +120,6 @@ version = "0.244.0"
when = "2026-01-06" when = "2026-01-06"
trusted-publisher = "github:bytecodealliance/wasm-tools" trusted-publisher = "github:bytecodealliance/wasm-tools"
[[publisher.windows-sys]]
version = "0.52.0"
when = "2023-11-15"
user-id = 64539
user-login = "kennykerr"
user-name = "Kenny Kerr"
[[publisher.windows-sys]]
version = "0.59.0"
when = "2024-07-30"
user-id = 64539
user-login = "kennykerr"
user-name = "Kenny Kerr"
[[publisher.windows-sys]]
version = "0.60.2"
when = "2025-06-12"
user-id = 64539
user-login = "kennykerr"
user-name = "Kenny Kerr"
[[publisher.windows-sys]]
version = "0.61.2"
when = "2025-10-06"
user-id = 64539
user-login = "kennykerr"
user-name = "Kenny Kerr"
[[publisher.wit-bindgen]] [[publisher.wit-bindgen]]
version = "0.51.0" version = "0.51.0"
when = "2026-01-12" when = "2026-01-12"
@@ -320,12 +265,6 @@ criteria = "safe-to-deploy"
version = "1.1.2" version = "1.1.2"
notes = "Contains `unsafe` code but it's well-documented and scoped to what it's intended to be doing. Otherwise a well-focused and straightforward crate." notes = "Contains `unsafe` code but it's well-documented and scoped to what it's intended to be doing. Otherwise a well-focused and straightforward crate."
[[audits.bytecode-alliance.audits.cipher]]
who = "Andrew Brown <andrew.brown@intel.com>"
criteria = "safe-to-deploy"
version = "0.4.4"
notes = "Most unsafe is hidden by `inout` dependency; only remaining unsafe is raw-splitting a slice and an unreachable hint. Older versions of this regularly reach ~150k daily downloads."
[[audits.bytecode-alliance.audits.core-foundation-sys]] [[audits.bytecode-alliance.audits.core-foundation-sys]]
who = "Dan Gohman <dev@sunfishcode.online>" who = "Dan Gohman <dev@sunfishcode.online>"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
@@ -340,23 +279,6 @@ who = "Nick Fitzgerald <fitzgen@gmail.com>"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
delta = "0.2.4 -> 0.2.5" delta = "0.2.4 -> 0.2.5"
[[audits.bytecode-alliance.audits.errno]]
who = "Dan Gohman <dev@sunfishcode.online>"
criteria = "safe-to-deploy"
version = "0.3.0"
notes = "This crate uses libc and windows-sys APIs to get and set the raw OS error value."
[[audits.bytecode-alliance.audits.errno]]
who = "Dan Gohman <dev@sunfishcode.online>"
criteria = "safe-to-deploy"
delta = "0.3.0 -> 0.3.1"
notes = "Just a dependency version bump and a bug fix for redox"
[[audits.bytecode-alliance.audits.errno]]
who = "Dan Gohman <dev@sunfishcode.online>"
criteria = "safe-to-deploy"
delta = "0.3.9 -> 0.3.10"
[[audits.bytecode-alliance.audits.fastrand]] [[audits.bytecode-alliance.audits.fastrand]]
who = "Alex Crichton <alex@alexcrichton.com>" who = "Alex Crichton <alex@alexcrichton.com>"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
@@ -463,28 +385,11 @@ criteria = "safe-to-deploy"
delta = "0.4.1 -> 0.5.0" delta = "0.4.1 -> 0.5.0"
notes = "Minor changes for a `no_std` upgrade but otherwise everything looks as expected." notes = "Minor changes for a `no_std` upgrade but otherwise everything looks as expected."
[[audits.bytecode-alliance.audits.http-body]]
who = "Pat Hickey <phickey@fastly.com>"
criteria = "safe-to-deploy"
version = "1.0.0-rc.2"
[[audits.bytecode-alliance.audits.http-body]]
who = "Alex Crichton <alex@alexcrichton.com>"
criteria = "safe-to-deploy"
delta = "1.0.0-rc.2 -> 1.0.0"
notes = "Only minor changes made for a stable release."
[[audits.bytecode-alliance.audits.iana-time-zone-haiku]] [[audits.bytecode-alliance.audits.iana-time-zone-haiku]]
who = "Dan Gohman <dev@sunfishcode.online>" who = "Dan Gohman <dev@sunfishcode.online>"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
version = "0.1.2" version = "0.1.2"
[[audits.bytecode-alliance.audits.inout]]
who = "Andrew Brown <andrew.brown@intel.com>"
criteria = "safe-to-deploy"
version = "0.1.3"
notes = "A part of RustCrypto/utils, this crate is designed to handle unsafe buffers and carefully documents the safety concerns throughout. Older versions of this tally up to ~130k daily downloads."
[[audits.bytecode-alliance.audits.leb128fmt]] [[audits.bytecode-alliance.audits.leb128fmt]]
who = "Alex Crichton <alex@alexcrichton.com>" who = "Alex Crichton <alex@alexcrichton.com>"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
@@ -538,24 +443,6 @@ criteria = "safe-to-deploy"
delta = "0.8.5 -> 0.8.9" delta = "0.8.5 -> 0.8.9"
notes = "No new unsafe code, just refactorings." notes = "No new unsafe code, just refactorings."
[[audits.bytecode-alliance.audits.nu-ansi-term]]
who = "Pat Hickey <phickey@fastly.com>"
criteria = "safe-to-deploy"
version = "0.46.0"
notes = "one use of unsafe to call windows specific api to get console handle."
[[audits.bytecode-alliance.audits.nu-ansi-term]]
who = "Alex Crichton <alex@alexcrichton.com>"
criteria = "safe-to-deploy"
delta = "0.46.0 -> 0.50.1"
notes = "Lots of stylistic/rust-related chanegs, plus new features, but nothing out of the ordrinary."
[[audits.bytecode-alliance.audits.nu-ansi-term]]
who = "Alex Crichton <alex@alexcrichton.com>"
criteria = "safe-to-deploy"
delta = "0.50.1 -> 0.50.3"
notes = "CI changes, Rust changes, nothing out of the ordinary."
[[audits.bytecode-alliance.audits.num-traits]] [[audits.bytecode-alliance.audits.num-traits]]
who = "Andrew Brown <andrew.brown@intel.com>" who = "Andrew Brown <andrew.brown@intel.com>"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
@@ -650,38 +537,12 @@ criteria = "safe-to-run"
delta = "0.2.16 -> 0.2.18" delta = "0.2.16 -> 0.2.18"
notes = "Standard macro changes, nothing out of place" notes = "Standard macro changes, nothing out of place"
[[audits.bytecode-alliance.audits.tracing-log]]
who = "Alex Crichton <alex@alexcrichton.com>"
criteria = "safe-to-deploy"
version = "0.1.3"
notes = """
This is a standard adapter between the `log` ecosystem and the `tracing`
ecosystem. There's one `unsafe` block in this crate and it's well-scoped.
"""
[[audits.bytecode-alliance.audits.tracing-log]]
who = "Alex Crichton <alex@alexcrichton.com>"
criteria = "safe-to-deploy"
delta = "0.1.3 -> 0.2.0"
notes = "Nothing out of the ordinary, a typical major version update and nothing awry."
[[audits.bytecode-alliance.audits.try-lock]]
who = "Pat Hickey <phickey@fastly.com>"
criteria = "safe-to-deploy"
version = "0.2.4"
notes = "Implements a concurrency primitive with atomics, and is not obviously incorrect"
[[audits.bytecode-alliance.audits.vcpkg]] [[audits.bytecode-alliance.audits.vcpkg]]
who = "Pat Hickey <phickey@fastly.com>" who = "Pat Hickey <phickey@fastly.com>"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
version = "0.2.15" version = "0.2.15"
notes = "no build.rs, no macros, no unsafe. It reads the filesystem and makes copies of DLLs into OUT_DIR." notes = "no build.rs, no macros, no unsafe. It reads the filesystem and makes copies of DLLs into OUT_DIR."
[[audits.bytecode-alliance.audits.want]]
who = "Pat Hickey <phickey@fastly.com>"
criteria = "safe-to-deploy"
version = "0.3.0"
[[audits.bytecode-alliance.audits.wasm-metadata]] [[audits.bytecode-alliance.audits.wasm-metadata]]
who = "Alex Crichton <alex@alexcrichton.com>" who = "Alex Crichton <alex@alexcrichton.com>"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
@@ -730,13 +591,6 @@ criteria = "safe-to-deploy"
delta = "0.243.0 -> 0.244.0" delta = "0.243.0 -> 0.244.0"
notes = "The Bytecode Alliance is the author of this crate" notes = "The Bytecode Alliance is the author of this crate"
[[audits.google.audits.autocfg]]
who = "Manish Goregaokar <manishearth@google.com>"
criteria = "safe-to-deploy"
version = "1.4.0"
notes = "Contains no unsafe"
aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT"
[[audits.google.audits.base64]] [[audits.google.audits.base64]]
who = "amarjotgill <amarjotgill@google.com>" who = "amarjotgill <amarjotgill@google.com>"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
@@ -865,89 +719,6 @@ delta = "0.2.9 -> 0.2.13"
notes = "Audited at https://fxrev.dev/946396" notes = "Audited at https://fxrev.dev/946396"
aggregated-from = "https://fuchsia.googlesource.com/fuchsia/+/refs/heads/main/third_party/rust_crates/supply-chain/audits.toml?format=TEXT" aggregated-from = "https://fuchsia.googlesource.com/fuchsia/+/refs/heads/main/third_party/rust_crates/supply-chain/audits.toml?format=TEXT"
[[audits.google.audits.proc-macro-error-attr]]
who = "George Burgess IV <gbiv@google.com>"
criteria = "safe-to-deploy"
version = "1.0.4"
aggregated-from = "https://chromium.googlesource.com/chromiumos/third_party/rust_crates/+/refs/heads/main/cargo-vet/audits.toml?format=TEXT"
[[audits.google.audits.rand_core]]
who = "Lukasz Anforowicz <lukasza@chromium.org>"
criteria = "safe-to-deploy"
version = "0.6.4"
notes = """
For more detailed unsafe review notes please see https://crrev.com/c/6362797
"""
aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT"
[[audits.google.audits.rustversion]]
who = "Lukasz Anforowicz <lukasza@chromium.org>"
criteria = "safe-to-deploy"
version = "1.0.14"
notes = """
Grepped for `-i cipher`, `-i crypto`, `'\bfs\b'``, `'\bnet\b'``, `'\bunsafe\b'``
and there were no hits except for:
* Using trivially-safe `unsafe` in test code:
```
tests/test_const.rs:unsafe fn _unsafe() {}
tests/test_const.rs:const _UNSAFE: () = unsafe { _unsafe() };
```
* Using `unsafe` in a string:
```
src/constfn.rs: "unsafe" => Qualifiers::Unsafe,
```
* Using `std::fs` in `build/build.rs` to write `${OUT_DIR}/version.expr`
which is later read back via `include!` used in `src/lib.rs`.
Version `1.0.6` of this crate has been added to Chromium in
https://source.chromium.org/chromium/chromium/src/+/28841c33c77833cc30b286f9ae24c97e7a8f4057
"""
aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT"
[[audits.google.audits.rustversion]]
who = "Adrian Taylor <adetaylor@chromium.org>"
criteria = "safe-to-deploy"
delta = "1.0.14 -> 1.0.15"
aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT"
[[audits.google.audits.rustversion]]
who = "danakj <danakj@chromium.org>"
criteria = "safe-to-deploy"
delta = "1.0.15 -> 1.0.16"
aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT"
[[audits.google.audits.rustversion]]
who = "Dustin J. Mitchell <djmitche@chromium.org>"
criteria = "safe-to-deploy"
delta = "1.0.16 -> 1.0.17"
notes = "Just updates windows compat"
aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT"
[[audits.google.audits.rustversion]]
who = "Liza Burakova <liza@chromium.org>"
criteria = "safe-to-deploy"
delta = "1.0.17 -> 1.0.18"
aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT"
[[audits.google.audits.rustversion]]
who = "Dustin J. Mitchell <djmitche@chromium.org>"
criteria = "safe-to-deploy"
delta = "1.0.18 -> 1.0.19"
notes = "No unsafe, just doc changes"
aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT"
[[audits.google.audits.rustversion]]
who = "Daniel Cheng <dcheng@chromium.org>"
criteria = "safe-to-deploy"
delta = "1.0.19 -> 1.0.20"
notes = "Only minor updates to documentation and the mock today used for testing."
aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT"
[[audits.google.audits.smallvec]] [[audits.google.audits.smallvec]]
who = "Manish Goregaokar <manishearth@google.com>" who = "Manish Goregaokar <manishearth@google.com>"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
@@ -965,28 +736,6 @@ Previously reviewed during security review and the audit is grandparented in.
""" """
aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT" aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT"
[[audits.google.audits.strum]]
who = "danakj@chromium.org"
criteria = "safe-to-deploy"
version = "0.25.0"
notes = """
Reviewed in https://crrev.com/c/5171063
Previously reviewed during security review and the audit is grandparented in.
"""
aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT"
[[audits.google.audits.strum_macros]]
who = "danakj@chromium.org"
criteria = "safe-to-deploy"
version = "0.25.3"
notes = """
Reviewed in https://crrev.com/c/5171063
Previously reviewed during security review and the audit is grandparented in.
"""
aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT"
[[audits.mozilla.wildcard-audits.core-foundation-sys]] [[audits.mozilla.wildcard-audits.core-foundation-sys]]
who = "Bobby Holley <bobbyholley@gmail.com>" who = "Bobby Holley <bobbyholley@gmail.com>"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
@@ -1063,12 +812,6 @@ criteria = "safe-to-deploy"
delta = "0.2.3 -> 0.2.4" delta = "0.2.3 -> 0.2.4"
aggregated-from = "https://hg.mozilla.org/mozilla-central/raw-file/tip/supply-chain/audits.toml" aggregated-from = "https://hg.mozilla.org/mozilla-central/raw-file/tip/supply-chain/audits.toml"
[[audits.mozilla.audits.errno]]
who = "Mike Hommey <mh+mozilla@glandium.org>"
criteria = "safe-to-deploy"
delta = "0.3.1 -> 0.3.3"
aggregated-from = "https://hg.mozilla.org/mozilla-central/raw-file/tip/supply-chain/audits.toml"
[[audits.mozilla.audits.fastrand]] [[audits.mozilla.audits.fastrand]]
who = "Mike Hommey <mh+mozilla@glandium.org>" who = "Mike Hommey <mh+mozilla@glandium.org>"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
@@ -1186,16 +929,6 @@ yet, but it's all valid. Otherwise it's a pretty simple crate.
""" """
aggregated-from = "https://hg.mozilla.org/mozilla-central/raw-file/tip/supply-chain/audits.toml" aggregated-from = "https://hg.mozilla.org/mozilla-central/raw-file/tip/supply-chain/audits.toml"
[[audits.mozilla.audits.rustc_version]]
who = "Nika Layzell <nika@thelayzells.com>"
criteria = "safe-to-deploy"
version = "0.4.0"
notes = """
Use of powerful capabilities is limited to invoking `rustc -vV` to get version
information for parsing version information.
"""
aggregated-from = "https://raw.githubusercontent.com/mozilla/cargo-vet/main/supply-chain/audits.toml"
[[audits.mozilla.audits.serde_spanned]] [[audits.mozilla.audits.serde_spanned]]
who = "Ben Dean-Kawamura <bdk@mozilla.com>" who = "Ben Dean-Kawamura <bdk@mozilla.com>"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
@@ -1222,12 +955,6 @@ criteria = "safe-to-deploy"
delta = "1.1.0 -> 1.3.0" delta = "1.1.0 -> 1.3.0"
aggregated-from = "https://hg.mozilla.org/mozilla-central/raw-file/tip/supply-chain/audits.toml" aggregated-from = "https://hg.mozilla.org/mozilla-central/raw-file/tip/supply-chain/audits.toml"
[[audits.mozilla.audits.similar]]
who = "Nika Layzell <nika@thelayzells.com>"
criteria = "safe-to-deploy"
delta = "2.2.1 -> 2.7.0"
aggregated-from = "https://raw.githubusercontent.com/mozilla/cargo-vet/main/supply-chain/audits.toml"
[[audits.mozilla.audits.smallvec]] [[audits.mozilla.audits.smallvec]]
who = "Erich Gubler <erichdongubler@gmail.com>" who = "Erich Gubler <erichdongubler@gmail.com>"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
@@ -1240,30 +967,6 @@ criteria = "safe-to-deploy"
delta = "0.10.0 -> 0.11.1" delta = "0.10.0 -> 0.11.1"
aggregated-from = "https://hg.mozilla.org/mozilla-central/raw-file/tip/supply-chain/audits.toml" aggregated-from = "https://hg.mozilla.org/mozilla-central/raw-file/tip/supply-chain/audits.toml"
[[audits.mozilla.audits.strum]]
who = "Teodor Tanasoaia <ttanasoaia@mozilla.com>"
criteria = "safe-to-deploy"
delta = "0.25.0 -> 0.26.3"
aggregated-from = "https://hg.mozilla.org/mozilla-central/raw-file/tip/supply-chain/audits.toml"
[[audits.mozilla.audits.strum]]
who = "Erich Gubler <erichdongubler@gmail.com>"
criteria = "safe-to-deploy"
delta = "0.26.3 -> 0.27.1"
aggregated-from = "https://hg.mozilla.org/mozilla-central/raw-file/tip/supply-chain/audits.toml"
[[audits.mozilla.audits.strum_macros]]
who = "Teodor Tanasoaia <ttanasoaia@mozilla.com>"
criteria = "safe-to-deploy"
delta = "0.25.3 -> 0.26.4"
aggregated-from = "https://hg.mozilla.org/mozilla-central/raw-file/tip/supply-chain/audits.toml"
[[audits.mozilla.audits.strum_macros]]
who = "Erich Gubler <erichdongubler@gmail.com>"
criteria = "safe-to-deploy"
delta = "0.26.4 -> 0.27.1"
aggregated-from = "https://hg.mozilla.org/mozilla-central/raw-file/tip/supply-chain/audits.toml"
[[audits.mozilla.audits.synstructure]] [[audits.mozilla.audits.synstructure]]
who = "Nika Layzell <nika@thelayzells.com>" who = "Nika Layzell <nika@thelayzells.com>"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
@@ -1335,153 +1038,3 @@ who = "Jan-Erik Rediger <jrediger@mozilla.com>"
criteria = "safe-to-deploy" criteria = "safe-to-deploy"
version = "0.1.5" version = "0.1.5"
aggregated-from = "https://raw.githubusercontent.com/mozilla/glean/main/supply-chain/audits.toml" aggregated-from = "https://raw.githubusercontent.com/mozilla/glean/main/supply-chain/audits.toml"
[[audits.mozilla.audits.windows-link]]
who = "Mark Hammond <mhammond@skippinet.com.au>"
criteria = "safe-to-deploy"
version = "0.1.1"
notes = "A microsoft crate allowing unsafe calls to windows apis."
aggregated-from = "https://hg.mozilla.org/mozilla-central/raw-file/tip/supply-chain/audits.toml"
[[audits.mozilla.audits.windows-link]]
who = "Erich Gubler <erichdongubler@gmail.com>"
criteria = "safe-to-deploy"
delta = "0.1.1 -> 0.2.0"
aggregated-from = "https://hg.mozilla.org/mozilla-central/raw-file/tip/supply-chain/audits.toml"
[[audits.mozilla.audits.zeroize]]
who = "Benjamin Beurdouche <beurdouche@mozilla.com>"
criteria = "safe-to-deploy"
version = "1.8.1"
notes = """
This code DOES contain unsafe code required to internally call volatiles
for deleting data. This is expected and documented behavior.
"""
aggregated-from = "https://hg.mozilla.org/mozilla-central/raw-file/tip/supply-chain/audits.toml"
[[audits.zcash.audits.autocfg]]
who = "Jack Grigg <jack@electriccoin.co>"
criteria = "safe-to-deploy"
delta = "1.4.0 -> 1.5.0"
notes = "Filesystem change is to remove the generated LLVM IR output file after probing."
aggregated-from = "https://raw.githubusercontent.com/zcash/zcash/master/qa/supply-chain/audits.toml"
[[audits.zcash.audits.dunce]]
who = "Jack Grigg <jack@electriccoin.co>"
criteria = "safe-to-deploy"
version = "1.0.5"
notes = """
Does what it says on the tin. No `unsafe`, and the only IO is `std::fs::canonicalize`.
Path and string handling looks plausibly correct.
"""
aggregated-from = "https://raw.githubusercontent.com/zcash/librustzcash/main/supply-chain/audits.toml"
[[audits.zcash.audits.errno]]
who = "Jack Grigg <jack@electriccoin.co>"
criteria = "safe-to-deploy"
delta = "0.3.3 -> 0.3.8"
aggregated-from = "https://raw.githubusercontent.com/zcash/zcash/master/qa/supply-chain/audits.toml"
[[audits.zcash.audits.errno]]
who = "Daira-Emma Hopwood <daira@jacaranda.org>"
criteria = "safe-to-deploy"
delta = "0.3.8 -> 0.3.9"
aggregated-from = "https://raw.githubusercontent.com/zcash/librustzcash/main/supply-chain/audits.toml"
[[audits.zcash.audits.errno]]
who = "Jack Grigg <jack@electriccoin.co>"
criteria = "safe-to-deploy"
delta = "0.3.10 -> 0.3.11"
notes = "The `__errno` location for vxworks and cygwin looks correct from a quick search."
aggregated-from = "https://raw.githubusercontent.com/zcash/wallet/main/supply-chain/audits.toml"
[[audits.zcash.audits.errno]]
who = "Jack Grigg <jack@electriccoin.co>"
criteria = "safe-to-deploy"
delta = "0.3.11 -> 0.3.13"
aggregated-from = "https://raw.githubusercontent.com/zcash/zcash/master/qa/supply-chain/audits.toml"
[[audits.zcash.audits.errno]]
who = "Jack Grigg <jack@electriccoin.co>"
criteria = "safe-to-deploy"
delta = "0.3.13 -> 0.3.14"
aggregated-from = "https://raw.githubusercontent.com/zcash/librustzcash/main/supply-chain/audits.toml"
[[audits.zcash.audits.http-body]]
who = "Jack Grigg <jack@electriccoin.co>"
criteria = "safe-to-deploy"
delta = "1.0.0 -> 1.0.1"
aggregated-from = "https://raw.githubusercontent.com/zcash/librustzcash/main/supply-chain/audits.toml"
[[audits.zcash.audits.inout]]
who = "Jack Grigg <jack@electriccoin.co>"
criteria = "safe-to-deploy"
delta = "0.1.3 -> 0.1.4"
aggregated-from = "https://raw.githubusercontent.com/zcash/wallet/main/supply-chain/audits.toml"
[[audits.zcash.audits.rustc_version]]
who = "Jack Grigg <jack@electriccoin.co>"
criteria = "safe-to-deploy"
delta = "0.4.0 -> 0.4.1"
notes = "Changes to `Command` usage are to add support for `RUSTC_WRAPPER`."
aggregated-from = "https://raw.githubusercontent.com/zcash/zcash/master/qa/supply-chain/audits.toml"
[[audits.zcash.audits.rustversion]]
who = "Jack Grigg <jack@electriccoin.co>"
criteria = "safe-to-deploy"
delta = "1.0.20 -> 1.0.21"
notes = "Build script change is to fix building with `-Zfmt-debug=none`."
aggregated-from = "https://raw.githubusercontent.com/zcash/zcash/master/qa/supply-chain/audits.toml"
[[audits.zcash.audits.rustversion]]
who = "Jack Grigg <jack@electriccoin.co>"
criteria = "safe-to-deploy"
delta = "1.0.21 -> 1.0.22"
notes = "Changes to generated code are to prepend a clippy annotation."
aggregated-from = "https://raw.githubusercontent.com/zcash/wallet/main/supply-chain/audits.toml"
[[audits.zcash.audits.strum]]
who = "Jack Grigg <jack@electriccoin.co>"
criteria = "safe-to-deploy"
delta = "0.27.1 -> 0.27.2"
aggregated-from = "https://raw.githubusercontent.com/zcash/librustzcash/main/supply-chain/audits.toml"
[[audits.zcash.audits.strum_macros]]
who = "Jack Grigg <jack@electriccoin.co>"
criteria = "safe-to-deploy"
delta = "0.27.1 -> 0.27.2"
aggregated-from = "https://raw.githubusercontent.com/zcash/librustzcash/main/supply-chain/audits.toml"
[[audits.zcash.audits.try-lock]]
who = "Jack Grigg <jack@electriccoin.co>"
criteria = "safe-to-deploy"
delta = "0.2.4 -> 0.2.5"
notes = "Bumps MSRV to remove unsafe code block."
aggregated-from = "https://raw.githubusercontent.com/zcash/zcash/master/qa/supply-chain/audits.toml"
[[audits.zcash.audits.want]]
who = "Jack Grigg <jack@electriccoin.co>"
criteria = "safe-to-deploy"
delta = "0.3.0 -> 0.3.1"
notes = """
Migrates to `try-lock 0.2.4` to replace some unsafe APIs that were not marked
`unsafe` (but that were being used safely).
"""
aggregated-from = "https://raw.githubusercontent.com/zcash/zcash/master/qa/supply-chain/audits.toml"
[[audits.zcash.audits.windows-link]]
who = "Jack Grigg <jack@electriccoin.co>"
criteria = "safe-to-deploy"
delta = "0.2.0 -> 0.2.1"
notes = "No code changes at all."
aggregated-from = "https://raw.githubusercontent.com/zcash/librustzcash/main/supply-chain/audits.toml"
[[audits.zcash.audits.zeroize]]
who = "Jack Grigg <jack@electriccoin.co>"
criteria = "safe-to-deploy"
delta = "1.8.1 -> 1.8.2"
notes = """
Changes to `unsafe` code are to alter how `core::mem::size_of` is named; no actual changes
to the `unsafe` logic.
"""
aggregated-from = "https://raw.githubusercontent.com/zcash/wallet/main/supply-chain/audits.toml"

View File

@@ -0,0 +1,31 @@
Extension Discovery Cache
=========================
This folder is used by `package:extension_discovery` to cache lists of
packages that contains extensions for other packages.
DO NOT USE THIS FOLDER
----------------------
* Do not read (or rely) the contents of this folder.
* Do write to this folder.
If you're interested in the lists of extensions stored in this folder use the
API offered by package `extension_discovery` to get this information.
If this package doesn't work for your use-case, then don't try to read the
contents of this folder. It may change, and will not remain stable.
Use package `extension_discovery`
---------------------------------
If you want to access information from this folder.
Feel free to delete this folder
-------------------------------
Files in this folder act as a cache, and the cache is discarded if the files
are older than the modification time of `.dart_tool/package_config.json`.
Hence, it should never be necessary to clear this cache manually, if you find a
need to do please file a bug.

View File

@@ -0,0 +1 @@
{"version":2,"entries":[{"package":"arbiter","rootUri":"../","packageUri":"lib/"}]}

View File

@@ -0,0 +1,172 @@
{
"configVersion": 2,
"packages": [
{
"name": "async",
"rootUri": "file:///Users/kaska/.pub-cache/hosted/pub.dev/async-2.13.0",
"packageUri": "lib/",
"languageVersion": "3.4"
},
{
"name": "boolean_selector",
"rootUri": "file:///Users/kaska/.pub-cache/hosted/pub.dev/boolean_selector-2.1.2",
"packageUri": "lib/",
"languageVersion": "3.1"
},
{
"name": "characters",
"rootUri": "file:///Users/kaska/.pub-cache/hosted/pub.dev/characters-1.4.0",
"packageUri": "lib/",
"languageVersion": "3.4"
},
{
"name": "clock",
"rootUri": "file:///Users/kaska/.pub-cache/hosted/pub.dev/clock-1.1.2",
"packageUri": "lib/",
"languageVersion": "3.4"
},
{
"name": "collection",
"rootUri": "file:///Users/kaska/.pub-cache/hosted/pub.dev/collection-1.19.1",
"packageUri": "lib/",
"languageVersion": "3.4"
},
{
"name": "fake_async",
"rootUri": "file:///Users/kaska/.pub-cache/hosted/pub.dev/fake_async-1.3.3",
"packageUri": "lib/",
"languageVersion": "3.3"
},
{
"name": "flutter",
"rootUri": "file:///Users/kaska/.local/share/mise/installs/flutter/3.38.9-stable/packages/flutter",
"packageUri": "lib/",
"languageVersion": "3.8"
},
{
"name": "flutter_lints",
"rootUri": "file:///Users/kaska/.pub-cache/hosted/pub.dev/flutter_lints-6.0.0",
"packageUri": "lib/",
"languageVersion": "3.8"
},
{
"name": "flutter_test",
"rootUri": "file:///Users/kaska/.local/share/mise/installs/flutter/3.38.9-stable/packages/flutter_test",
"packageUri": "lib/",
"languageVersion": "3.8"
},
{
"name": "leak_tracker",
"rootUri": "file:///Users/kaska/.pub-cache/hosted/pub.dev/leak_tracker-11.0.2",
"packageUri": "lib/",
"languageVersion": "3.2"
},
{
"name": "leak_tracker_flutter_testing",
"rootUri": "file:///Users/kaska/.pub-cache/hosted/pub.dev/leak_tracker_flutter_testing-3.0.10",
"packageUri": "lib/",
"languageVersion": "3.2"
},
{
"name": "leak_tracker_testing",
"rootUri": "file:///Users/kaska/.pub-cache/hosted/pub.dev/leak_tracker_testing-3.0.2",
"packageUri": "lib/",
"languageVersion": "3.2"
},
{
"name": "lints",
"rootUri": "file:///Users/kaska/.pub-cache/hosted/pub.dev/lints-6.1.0",
"packageUri": "lib/",
"languageVersion": "3.8"
},
{
"name": "matcher",
"rootUri": "file:///Users/kaska/.pub-cache/hosted/pub.dev/matcher-0.12.17",
"packageUri": "lib/",
"languageVersion": "3.4"
},
{
"name": "material_color_utilities",
"rootUri": "file:///Users/kaska/.pub-cache/hosted/pub.dev/material_color_utilities-0.11.1",
"packageUri": "lib/",
"languageVersion": "2.17"
},
{
"name": "meta",
"rootUri": "file:///Users/kaska/.pub-cache/hosted/pub.dev/meta-1.17.0",
"packageUri": "lib/",
"languageVersion": "3.5"
},
{
"name": "path",
"rootUri": "file:///Users/kaska/.pub-cache/hosted/pub.dev/path-1.9.1",
"packageUri": "lib/",
"languageVersion": "3.4"
},
{
"name": "sky_engine",
"rootUri": "file:///Users/kaska/.local/share/mise/installs/flutter/3.38.9-stable/bin/cache/pkg/sky_engine",
"packageUri": "lib/",
"languageVersion": "3.8"
},
{
"name": "source_span",
"rootUri": "file:///Users/kaska/.pub-cache/hosted/pub.dev/source_span-1.10.2",
"packageUri": "lib/",
"languageVersion": "3.1"
},
{
"name": "stack_trace",
"rootUri": "file:///Users/kaska/.pub-cache/hosted/pub.dev/stack_trace-1.12.1",
"packageUri": "lib/",
"languageVersion": "3.4"
},
{
"name": "stream_channel",
"rootUri": "file:///Users/kaska/.pub-cache/hosted/pub.dev/stream_channel-2.1.4",
"packageUri": "lib/",
"languageVersion": "3.3"
},
{
"name": "string_scanner",
"rootUri": "file:///Users/kaska/.pub-cache/hosted/pub.dev/string_scanner-1.4.1",
"packageUri": "lib/",
"languageVersion": "3.1"
},
{
"name": "term_glyph",
"rootUri": "file:///Users/kaska/.pub-cache/hosted/pub.dev/term_glyph-1.2.2",
"packageUri": "lib/",
"languageVersion": "3.1"
},
{
"name": "test_api",
"rootUri": "file:///Users/kaska/.pub-cache/hosted/pub.dev/test_api-0.7.7",
"packageUri": "lib/",
"languageVersion": "3.5"
},
{
"name": "vector_math",
"rootUri": "file:///Users/kaska/.pub-cache/hosted/pub.dev/vector_math-2.2.0",
"packageUri": "lib/",
"languageVersion": "3.1"
},
{
"name": "vm_service",
"rootUri": "file:///Users/kaska/.pub-cache/hosted/pub.dev/vm_service-15.0.2",
"packageUri": "lib/",
"languageVersion": "3.5"
},
{
"name": "arbiter",
"rootUri": "../",
"packageUri": "lib/",
"languageVersion": "3.10"
}
],
"generator": "pub",
"generatorVersion": "3.10.8",
"flutterRoot": "file:///Users/kaska/.local/share/mise/installs/flutter/3.38.9-stable",
"flutterVersion": "3.38.9",
"pubCache": "file:///Users/kaska/.pub-cache"
}

View File

@@ -0,0 +1,224 @@
{
"roots": [
"arbiter"
],
"packages": [
{
"name": "arbiter",
"version": "0.1.0",
"dependencies": [
"flutter"
],
"devDependencies": [
"flutter_lints",
"flutter_test"
]
},
{
"name": "flutter_lints",
"version": "6.0.0",
"dependencies": [
"lints"
]
},
{
"name": "flutter_test",
"version": "0.0.0",
"dependencies": [
"clock",
"collection",
"fake_async",
"flutter",
"leak_tracker_flutter_testing",
"matcher",
"meta",
"path",
"stack_trace",
"stream_channel",
"test_api",
"vector_math"
]
},
{
"name": "flutter",
"version": "0.0.0",
"dependencies": [
"characters",
"collection",
"material_color_utilities",
"meta",
"sky_engine",
"vector_math"
]
},
{
"name": "lints",
"version": "6.1.0",
"dependencies": []
},
{
"name": "stream_channel",
"version": "2.1.4",
"dependencies": [
"async"
]
},
{
"name": "meta",
"version": "1.17.0",
"dependencies": []
},
{
"name": "collection",
"version": "1.19.1",
"dependencies": []
},
{
"name": "leak_tracker_flutter_testing",
"version": "3.0.10",
"dependencies": [
"flutter",
"leak_tracker",
"leak_tracker_testing",
"matcher",
"meta"
]
},
{
"name": "vector_math",
"version": "2.2.0",
"dependencies": []
},
{
"name": "stack_trace",
"version": "1.12.1",
"dependencies": [
"path"
]
},
{
"name": "clock",
"version": "1.1.2",
"dependencies": []
},
{
"name": "fake_async",
"version": "1.3.3",
"dependencies": [
"clock",
"collection"
]
},
{
"name": "path",
"version": "1.9.1",
"dependencies": []
},
{
"name": "matcher",
"version": "0.12.17",
"dependencies": [
"async",
"meta",
"stack_trace",
"term_glyph",
"test_api"
]
},
{
"name": "test_api",
"version": "0.7.7",
"dependencies": [
"async",
"boolean_selector",
"collection",
"meta",
"source_span",
"stack_trace",
"stream_channel",
"string_scanner",
"term_glyph"
]
},
{
"name": "sky_engine",
"version": "0.0.0",
"dependencies": []
},
{
"name": "material_color_utilities",
"version": "0.11.1",
"dependencies": [
"collection"
]
},
{
"name": "characters",
"version": "1.4.0",
"dependencies": []
},
{
"name": "async",
"version": "2.13.0",
"dependencies": [
"collection",
"meta"
]
},
{
"name": "leak_tracker_testing",
"version": "3.0.2",
"dependencies": [
"leak_tracker",
"matcher",
"meta"
]
},
{
"name": "leak_tracker",
"version": "11.0.2",
"dependencies": [
"clock",
"collection",
"meta",
"path",
"vm_service"
]
},
{
"name": "term_glyph",
"version": "1.2.2",
"dependencies": []
},
{
"name": "string_scanner",
"version": "1.4.1",
"dependencies": [
"source_span"
]
},
{
"name": "source_span",
"version": "1.10.2",
"dependencies": [
"collection",
"path",
"term_glyph"
]
},
{
"name": "boolean_selector",
"version": "2.1.2",
"dependencies": [
"source_span",
"string_scanner"
]
},
{
"name": "vm_service",
"version": "15.0.2",
"dependencies": []
}
],
"configVersion": 1
}

View File

@@ -0,0 +1 @@
3.38.9

View File

@@ -0,0 +1,11 @@
// This is a generated file; do not edit or check into version control.
FLUTTER_ROOT=/Users/kaska/.local/share/mise/installs/flutter/3.38.9-stable
FLUTTER_APPLICATION_PATH=/Users/kaska/Documents/Projects/Major/arbiter/useragent
COCOAPODS_PARALLEL_CODE_SIGN=true
FLUTTER_BUILD_DIR=build
FLUTTER_BUILD_NAME=0.1.0
FLUTTER_BUILD_NUMBER=0.1.0
DART_OBFUSCATION=false
TRACK_WIDGET_CREATION=true
TREE_SHAKE_ICONS=false
PACKAGE_CONFIG=.dart_tool/package_config.json

View File

@@ -0,0 +1,12 @@
#!/bin/sh
# This is a generated file; do not edit or check into version control.
export "FLUTTER_ROOT=/Users/kaska/.local/share/mise/installs/flutter/3.38.9-stable"
export "FLUTTER_APPLICATION_PATH=/Users/kaska/Documents/Projects/Major/arbiter/useragent"
export "COCOAPODS_PARALLEL_CODE_SIGN=true"
export "FLUTTER_BUILD_DIR=build"
export "FLUTTER_BUILD_NAME=0.1.0"
export "FLUTTER_BUILD_NUMBER=0.1.0"
export "DART_OBFUSCATION=false"
export "TRACK_WIDGET_CREATION=true"
export "TREE_SHAKE_ICONS=false"
export "PACKAGE_CONFIG=.dart_tool/package_config.json"