Vault initial implementation #17
24
.woodpecker/server-lint.yaml
Normal file
24
.woodpecker/server-lint.yaml
Normal file
@@ -0,0 +1,24 @@
|
||||
when:
|
||||
- event: pull_request
|
||||
path:
|
||||
include: ['.woodpecker/server-*.yaml', 'server/**']
|
||||
- event: push
|
||||
branch: main
|
||||
path:
|
||||
include: ['.woodpecker/server-*.yaml', 'server/**']
|
||||
|
||||
steps:
|
||||
- name: test
|
||||
image: jdxcode/mise:latest
|
||||
directory: server
|
||||
environment:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_TARGET_DIR: /usr/local/cargo/target
|
||||
CARGO_HOME: /usr/local/cargo/registry
|
||||
volumes:
|
||||
- cargo-target:/usr/local/cargo/target
|
||||
- cargo-registry:/usr/local/cargo/registry
|
||||
commands:
|
||||
- apt-get update && apt-get install -y pkg-config
|
||||
- mise install rust
|
||||
- mise exec rust -- cargo clippy --all-targets --all-features -- -D warnings
|
||||
@@ -10,6 +10,10 @@ backend = "cargo:cargo-features"
|
||||
version = "0.11.1"
|
||||
backend = "cargo:cargo-features-manager"
|
||||
|
||||
[[tools."cargo:cargo-insta"]]
|
||||
version = "1.46.3"
|
||||
backend = "cargo:cargo-insta"
|
||||
|
||||
[[tools."cargo:cargo-nextest"]]
|
||||
version = "0.9.126"
|
||||
backend = "cargo:cargo-nextest"
|
||||
|
||||
@@ -9,3 +9,4 @@ rust = "1.93.0"
|
||||
"cargo:cargo-features-manager" = "0.11.1"
|
||||
"cargo:cargo-nextest" = "0.9.126"
|
||||
"cargo:cargo-shear" = "latest"
|
||||
"cargo:cargo-insta" = "1.46.3"
|
||||
|
||||
@@ -3,29 +3,8 @@ syntax = "proto3";
|
||||
package arbiter;
|
||||
|
||||
import "auth.proto";
|
||||
|
||||
message ClientRequest {
|
||||
oneof payload {
|
||||
arbiter.auth.ClientMessage auth_message = 1;
|
||||
}
|
||||
}
|
||||
|
||||
message ClientResponse {
|
||||
oneof payload {
|
||||
arbiter.auth.ServerMessage auth_message = 1;
|
||||
}
|
||||
}
|
||||
|
||||
message UserAgentRequest {
|
||||
oneof payload {
|
||||
arbiter.auth.ClientMessage auth_message = 1;
|
||||
}
|
||||
}
|
||||
message UserAgentResponse {
|
||||
oneof payload {
|
||||
arbiter.auth.ServerMessage auth_message = 1;
|
||||
}
|
||||
}
|
||||
import "client.proto";
|
||||
import "user_agent.proto";
|
||||
|
||||
message ServerInfo {
|
||||
string version = 1;
|
||||
|
||||
17
protobufs/client.proto
Normal file
17
protobufs/client.proto
Normal file
@@ -0,0 +1,17 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package arbiter;
|
||||
|
||||
import "auth.proto";
|
||||
|
||||
message ClientRequest {
|
||||
oneof payload {
|
||||
arbiter.auth.ClientMessage auth_message = 1;
|
||||
}
|
||||
}
|
||||
|
||||
message ClientResponse {
|
||||
oneof payload {
|
||||
arbiter.auth.ServerMessage auth_message = 1;
|
||||
}
|
||||
}
|
||||
@@ -2,13 +2,24 @@ syntax = "proto3";
|
||||
|
||||
package arbiter.unseal;
|
||||
|
||||
message UserAgentKeyRequest {}
|
||||
import "google/protobuf/empty.proto";
|
||||
|
||||
message ServerKeyResponse {
|
||||
bytes pubkey = 1;
|
||||
message UnsealStart {
|
||||
bytes client_pubkey = 1;
|
||||
}
|
||||
message UserAgentSealedKey {
|
||||
bytes sealed_key = 1;
|
||||
bytes pubkey = 2;
|
||||
bytes nonce = 3;
|
||||
|
||||
message UnsealStartResponse {
|
||||
bytes server_pubkey = 1;
|
||||
}
|
||||
message UnsealEncryptedKey {
|
||||
bytes nonce = 1;
|
||||
bytes ciphertext = 2;
|
||||
bytes associated_data = 3;
|
||||
}
|
||||
|
||||
enum UnsealResult {
|
||||
UNSEAL_RESULT_UNSPECIFIED = 0;
|
||||
UNSEAL_RESULT_SUCCESS = 1;
|
||||
UNSEAL_RESULT_INVALID_KEY = 2;
|
||||
UNSEAL_RESULT_UNBOOTSTRAPPED = 3;
|
||||
}
|
||||
|
||||
21
protobufs/user_agent.proto
Normal file
21
protobufs/user_agent.proto
Normal file
@@ -0,0 +1,21 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package arbiter;
|
||||
|
||||
import "auth.proto";
|
||||
import "unseal.proto";
|
||||
|
||||
message UserAgentRequest {
|
||||
oneof payload {
|
||||
arbiter.auth.ClientMessage auth_message = 1;
|
||||
arbiter.unseal.UnsealStart unseal_start = 2;
|
||||
arbiter.unseal.UnsealEncryptedKey unseal_encrypted_key = 3;
|
||||
}
|
||||
}
|
||||
message UserAgentResponse {
|
||||
oneof payload {
|
||||
arbiter.auth.ServerMessage auth_message = 1;
|
||||
arbiter.unseal.UnsealStartResponse unseal_start_response = 2;
|
||||
arbiter.unseal.UnsealResult unseal_result = 3;
|
||||
}
|
||||
}
|
||||
537
server/Cargo.lock
generated
537
server/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
BIN
server/crates/.DS_Store
vendored
Normal file
BIN
server/crates/.DS_Store
vendored
Normal file
Binary file not shown.
@@ -3,6 +3,9 @@ use tonic_prost_build::configure;
|
||||
static PROTOBUF_DIR: &str = "../../../protobufs";
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
|
||||
println!("cargo::rerun-if-changed={PROTOBUF_DIR}");
|
||||
|
||||
configure()
|
||||
.message_attribute(".", "#[derive(::kameo::Reply)]")
|
||||
.compile_protos(
|
||||
|
||||
@@ -6,6 +6,9 @@ pub mod proto {
|
||||
pub mod auth {
|
||||
tonic::include_proto!("arbiter.auth");
|
||||
}
|
||||
pub mod unseal {
|
||||
tonic::include_proto!("arbiter.unseal");
|
||||
}
|
||||
}
|
||||
|
||||
pub mod transport;
|
||||
|
||||
BIN
server/crates/arbiter-server/.DS_Store
vendored
Normal file
BIN
server/crates/arbiter-server/.DS_Store
vendored
Normal file
Binary file not shown.
@@ -5,13 +5,7 @@ edition = "2024"
|
||||
repository = "https://git.markettakers.org/MarketTakers/arbiter"
|
||||
|
||||
[dependencies]
|
||||
diesel = { version = "2.3.6", features = [
|
||||
"sqlite",
|
||||
"uuid",
|
||||
"time",
|
||||
"chrono",
|
||||
"serde_json",
|
||||
] }
|
||||
diesel = { version = "2.3.6", features = ["chrono", "returning_clauses_for_sqlite_3_35", "serde_json", "time", "uuid"] }
|
||||
diesel-async = { version = "0.7.4", features = [
|
||||
"bb8",
|
||||
"migrations",
|
||||
@@ -21,6 +15,7 @@ diesel-async = { version = "0.7.4", features = [
|
||||
ed25519-dalek.workspace = true
|
||||
arbiter-proto.path = "../arbiter-proto"
|
||||
tracing.workspace = true
|
||||
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
||||
tonic.workspace = true
|
||||
tokio.workspace = true
|
||||
rustls.workspace = true
|
||||
@@ -44,6 +39,12 @@ chrono.workspace = true
|
||||
memsafe = "0.4.0"
|
||||
zeroize = { version = "1.8.2", features = ["std", "simd"] }
|
||||
kameo.workspace = true
|
||||
x25519-dalek = { version = "2.0.1", features = ["getrandom"] }
|
||||
chacha20poly1305 = { version = "0.10.1", features = ["std"] }
|
||||
argon2 = { version = "0.5.3", features = ["zeroize"] }
|
||||
restructed = "0.2.2"
|
||||
strum = { version = "0.27.2", features = ["derive"] }
|
||||
|
||||
[dev-dependencies]
|
||||
insta = "1.46.3"
|
||||
test-log = { version = "0.2", default-features = false, features = ["trace"] }
|
||||
|
||||
@@ -1,22 +1,40 @@
|
||||
create table if not exists aead_encrypted (
|
||||
create table if not exists root_key_history (
|
||||
id INTEGER not null PRIMARY KEY,
|
||||
current_nonce integer not null default(1), -- if re-encrypted, this should be incremented
|
||||
-- root key stored as aead encrypted artifact, with only difference that it's decrypted by unseal key (derived from user password)
|
||||
root_key_encryption_nonce blob not null default(1), -- if re-encrypted, this should be incremented. Used for encrypting root key
|
||||
data_encryption_nonce blob not null default(1), -- nonce used for encrypting with key itself
|
||||
ciphertext blob not null,
|
||||
tag blob not null,
|
||||
schema_version integer not null default(1) -- server would need to reencrypt, because this means that we have changed algorithm
|
||||
schema_version integer not null default(1), -- server would need to reencrypt, because this means that we have changed algorithm
|
||||
salt blob not null -- for key deriviation
|
||||
) STRICT;
|
||||
|
||||
create table if not exists aead_encrypted (
|
||||
id INTEGER not null PRIMARY KEY,
|
||||
current_nonce blob not null default(1), -- if re-encrypted, this should be incremented
|
||||
ciphertext blob not null,
|
||||
tag blob not null,
|
||||
schema_version integer not null default(1), -- server would need to reencrypt, because this means that we have changed algorithm
|
||||
associated_root_key_id integer not null references root_key_history (id) on delete RESTRICT,
|
||||
created_at integer not null default(unixepoch ('now'))
|
||||
) STRICT;
|
||||
|
||||
create unique index if not exists uniq_nonce_per_root_key on aead_encrypted (
|
||||
current_nonce,
|
||||
associated_root_key_id
|
||||
);
|
||||
|
||||
-- This is a singleton
|
||||
create table if not exists arbiter_settings (
|
||||
id INTEGER not null PRIMARY KEY CHECK (id = 1), -- singleton row, id must be 1
|
||||
root_key_id integer references aead_encrypted (id) on delete RESTRICT, -- if null, means wasn't bootstrapped yet
|
||||
root_key_id integer references root_key_history (id) on delete RESTRICT, -- if null, means wasn't bootstrapped yet
|
||||
cert_key blob not null,
|
||||
cert blob not null
|
||||
) STRICT;
|
||||
|
||||
create table if not exists useragent_client (
|
||||
id integer not null primary key,
|
||||
nonce integer not null default (1), -- used for auth challenge
|
||||
nonce integer not null default(1), -- used for auth challenge
|
||||
public_key blob not null,
|
||||
created_at integer not null default(unixepoch ('now')),
|
||||
updated_at integer not null default(unixepoch ('now'))
|
||||
@@ -24,7 +42,7 @@ create table if not exists useragent_client (
|
||||
|
||||
create table if not exists program_client (
|
||||
id integer not null primary key,
|
||||
nonce integer not null default (1), -- used for auth challenge
|
||||
nonce integer not null default(1), -- used for auth challenge
|
||||
public_key blob not null,
|
||||
created_at integer not null default(unixepoch ('now')),
|
||||
updated_at integer not null default(unixepoch ('now'))
|
||||
|
||||
BIN
server/crates/arbiter-server/src/.DS_Store
vendored
Normal file
BIN
server/crates/arbiter-server/src/.DS_Store
vendored
Normal file
Binary file not shown.
@@ -1,2 +1,4 @@
|
||||
pub mod user_agent;
|
||||
pub mod client;
|
||||
pub(crate) mod bootstrap;
|
||||
pub(crate) mod keyholder;
|
||||
@@ -1,19 +1,13 @@
|
||||
use arbiter_proto::{BOOTSTRAP_TOKEN_PATH, home_path};
|
||||
use diesel::{ExpressionMethods, QueryDsl};
|
||||
use diesel::QueryDsl;
|
||||
use diesel_async::RunQueryDsl;
|
||||
use kameo::{Actor, messages};
|
||||
use memsafe::MemSafe;
|
||||
use miette::Diagnostic;
|
||||
use rand::{RngExt, distr::StandardUniform, make_rng, rngs::StdRng};
|
||||
use secrecy::SecretString;
|
||||
use thiserror::Error;
|
||||
use tracing::info;
|
||||
use zeroize::{Zeroize, Zeroizing};
|
||||
|
||||
use crate::{
|
||||
context::{self, ServerContext},
|
||||
db::{self, DatabasePool, schema},
|
||||
};
|
||||
use crate::db::{self, DatabasePool, schema};
|
||||
|
||||
const TOKEN_LENGTH: usize = 64;
|
||||
|
||||
939
server/crates/arbiter-server/src/actors/keyholder.rs
Normal file
939
server/crates/arbiter-server/src/actors/keyholder.rs
Normal file
@@ -0,0 +1,939 @@
|
||||
use diesel::{
|
||||
ExpressionMethods as _, OptionalExtension, QueryDsl, SelectableHelper,
|
||||
dsl::{insert_into, update},
|
||||
};
|
||||
use diesel_async::{AsyncConnection, RunQueryDsl};
|
||||
use kameo::{Actor, Reply, messages};
|
||||
use memsafe::MemSafe;
|
||||
use strum::{EnumDiscriminants, IntoDiscriminant};
|
||||
use tracing::{error, info};
|
||||
|
||||
use crate::{
|
||||
actors::keyholder::v1::{KeyCell, Nonce},
|
||||
db::{
|
||||
self,
|
||||
models::{self, RootKeyHistory},
|
||||
schema::{self},
|
||||
},
|
||||
};
|
||||
|
||||
pub mod v1;
|
||||
|
||||
#[derive(Default, EnumDiscriminants)]
|
||||
#[strum_discriminants(derive(Reply), vis(pub))]
|
||||
enum State {
|
||||
#[default]
|
||||
Unbootstrapped,
|
||||
Sealed {
|
||||
root_key_history_id: i32,
|
||||
},
|
||||
Unsealed {
|
||||
root_key_history_id: i32,
|
||||
root_key: KeyCell,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error, miette::Diagnostic)]
|
||||
pub enum Error {
|
||||
#[error("Keyholder is already bootstrapped")]
|
||||
#[diagnostic(code(arbiter::keyholder::already_bootstrapped))]
|
||||
AlreadyBootstrapped,
|
||||
#[error("Keyholder is not bootstrapped")]
|
||||
#[diagnostic(code(arbiter::keyholder::not_bootstrapped))]
|
||||
NotBootstrapped,
|
||||
#[error("Invalid key provided")]
|
||||
#[diagnostic(code(arbiter::keyholder::invalid_key))]
|
||||
InvalidKey,
|
||||
|
||||
#[error("Requested aead entry not found")]
|
||||
#[diagnostic(code(arbiter::keyholder::aead_not_found))]
|
||||
NotFound,
|
||||
|
||||
#[error("Encryption error: {0}")]
|
||||
#[diagnostic(code(arbiter::keyholder::encryption_error))]
|
||||
Encryption(#[from] chacha20poly1305::aead::Error),
|
||||
|
||||
#[error("Database error: {0}")]
|
||||
#[diagnostic(code(arbiter::keyholder::database_error))]
|
||||
DatabaseConnection(#[from] db::PoolError),
|
||||
|
||||
#[error("Database transaction error: {0}")]
|
||||
#[diagnostic(code(arbiter::keyholder::database_transaction_error))]
|
||||
DatabaseTransaction(#[from] diesel::result::Error),
|
||||
|
||||
#[error("Broken database")]
|
||||
#[diagnostic(code(arbiter::keyholder::broken_database))]
|
||||
BrokenDatabase,
|
||||
}
|
||||
|
||||
/// Manages vault root key and tracks current state of the vault (bootstrapped/unbootstrapped, sealed/unsealed).
|
||||
/// Provides API for encrypting and decrypting data using the vault root key.
|
||||
/// Abstraction over database to make sure nonces are never reused and encryption keys are never exposed in plaintext outside of this actor.
|
||||
#[derive(Actor)]
|
||||
pub struct KeyHolder {
|
||||
db: db::DatabasePool,
|
||||
state: State,
|
||||
}
|
||||
|
||||
#[messages]
|
||||
impl KeyHolder {
|
||||
pub async fn new(db: db::DatabasePool) -> Result<Self, Error> {
|
||||
let state = {
|
||||
let mut conn = db.get().await?;
|
||||
|
||||
let (root_key_history,) = schema::arbiter_settings::table
|
||||
.left_join(schema::root_key_history::table)
|
||||
.select((Option::<RootKeyHistory>::as_select(),))
|
||||
.get_result::<(Option<RootKeyHistory>,)>(&mut conn)
|
||||
.await?;
|
||||
|
||||
match root_key_history {
|
||||
Some(root_key_history) => State::Sealed {
|
||||
root_key_history_id: root_key_history.id,
|
||||
},
|
||||
None => State::Unbootstrapped,
|
||||
}
|
||||
};
|
||||
|
||||
Ok(Self { db, state })
|
||||
}
|
||||
|
||||
// Exclusive transaction to avoid race condtions if multiple keyholders write
|
||||
// additional layer of protection against nonce-reuse
|
||||
async fn get_new_nonce(pool: &db::DatabasePool, root_key_id: i32) -> Result<Nonce, Error> {
|
||||
let mut conn = pool.get().await?;
|
||||
|
||||
let nonce = conn
|
||||
.exclusive_transaction(|conn| {
|
||||
Box::pin(async move {
|
||||
let current_nonce: Vec<u8> = schema::root_key_history::table
|
||||
.filter(schema::root_key_history::id.eq(root_key_id))
|
||||
.select(schema::root_key_history::data_encryption_nonce)
|
||||
.first(conn)
|
||||
.await?;
|
||||
|
||||
let mut nonce =
|
||||
v1::Nonce::try_from(current_nonce.as_slice()).map_err(|_| {
|
||||
error!(
|
||||
"Broken database: invalid nonce for root key history id={}",
|
||||
root_key_id
|
||||
);
|
||||
Error::BrokenDatabase
|
||||
})?;
|
||||
nonce.increment();
|
||||
|
||||
update(schema::root_key_history::table)
|
||||
.filter(schema::root_key_history::id.eq(root_key_id))
|
||||
.set(schema::root_key_history::data_encryption_nonce.eq(nonce.to_vec()))
|
||||
.execute(conn)
|
||||
.await?;
|
||||
|
||||
Result::<_, Error>::Ok(nonce)
|
||||
})
|
||||
})
|
||||
.await?;
|
||||
|
||||
Ok(nonce)
|
||||
}
|
||||
|
||||
#[message]
|
||||
pub async fn bootstrap(&mut self, seal_key_raw: MemSafe<Vec<u8>>) -> Result<(), Error> {
|
||||
if !matches!(self.state, State::Unbootstrapped) {
|
||||
return Err(Error::AlreadyBootstrapped);
|
||||
}
|
||||
let salt = v1::generate_salt();
|
||||
let mut seal_key = v1::derive_seal_key(seal_key_raw, &salt);
|
||||
let mut root_key = KeyCell::new_secure_random();
|
||||
|
||||
// Zero nonces are fine because they are one-time
|
||||
let root_key_nonce = v1::Nonce::default();
|
||||
let data_encryption_nonce = v1::Nonce::default();
|
||||
|
||||
let root_key_ciphertext: Vec<u8> = {
|
||||
let root_key_reader = root_key.0.read().unwrap();
|
||||
let root_key_reader = root_key_reader.as_slice();
|
||||
seal_key
|
||||
.encrypt(&root_key_nonce, v1::ROOT_KEY_TAG, root_key_reader)
|
||||
.map_err(|err| {
|
||||
error!(?err, "Fatal bootstrap error");
|
||||
Error::Encryption(err)
|
||||
})?
|
||||
};
|
||||
|
||||
let mut conn = self.db.get().await?;
|
||||
|
||||
let data_encryption_nonce_bytes = data_encryption_nonce.to_vec();
|
||||
let root_key_history_id = conn
|
||||
.transaction(|conn| {
|
||||
Box::pin(async move {
|
||||
let root_key_history_id: i32 = insert_into(schema::root_key_history::table)
|
||||
.values(&models::NewRootKeyHistory {
|
||||
ciphertext: root_key_ciphertext,
|
||||
tag: v1::ROOT_KEY_TAG.to_vec(),
|
||||
root_key_encryption_nonce: root_key_nonce.to_vec(),
|
||||
data_encryption_nonce: data_encryption_nonce_bytes,
|
||||
schema_version: 1,
|
||||
salt: salt.to_vec(),
|
||||
})
|
||||
.returning(schema::root_key_history::id)
|
||||
.get_result(conn)
|
||||
.await?;
|
||||
|
||||
update(schema::arbiter_settings::table)
|
||||
.set(schema::arbiter_settings::root_key_id.eq(root_key_history_id))
|
||||
.execute(conn)
|
||||
.await?;
|
||||
|
||||
Result::<_, diesel::result::Error>::Ok(root_key_history_id)
|
||||
})
|
||||
})
|
||||
.await?;
|
||||
|
||||
self.state = State::Unsealed {
|
||||
root_key,
|
||||
root_key_history_id,
|
||||
};
|
||||
|
||||
info!("Keyholder bootstrapped successfully");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[message]
|
||||
pub async fn try_unseal(&mut self, seal_key_raw: MemSafe<Vec<u8>>) -> Result<(), Error> {
|
||||
let State::Sealed {
|
||||
root_key_history_id,
|
||||
} = &self.state
|
||||
else {
|
||||
return Err(Error::NotBootstrapped);
|
||||
};
|
||||
|
||||
// We don't want to hold connection while doing expensive KDF work
|
||||
let current_key = {
|
||||
let mut conn = self.db.get().await?;
|
||||
schema::root_key_history::table
|
||||
.filter(schema::root_key_history::id.eq(*root_key_history_id))
|
||||
.select(schema::root_key_history::data_encryption_nonce )
|
||||
.select(RootKeyHistory::as_select() )
|
||||
.first(&mut conn)
|
||||
.await?
|
||||
};
|
||||
|
||||
let salt = ¤t_key.salt;
|
||||
let salt = v1::Salt::try_from(salt.as_slice()).map_err(|_| {
|
||||
error!("Broken database: invalid salt for root key");
|
||||
Error::BrokenDatabase
|
||||
})?;
|
||||
let mut seal_key = v1::derive_seal_key(seal_key_raw, &salt);
|
||||
|
||||
let mut root_key = MemSafe::new(current_key.ciphertext.clone()).unwrap();
|
||||
|
||||
let nonce = v1::Nonce::try_from(current_key.root_key_encryption_nonce.as_slice()).map_err(
|
||||
|_| {
|
||||
error!("Broken database: invalid nonce for root key");
|
||||
Error::BrokenDatabase
|
||||
},
|
||||
)?;
|
||||
|
||||
seal_key
|
||||
.decrypt_in_place(&nonce, v1::ROOT_KEY_TAG, &mut root_key)
|
||||
.map_err(|err| {
|
||||
error!(?err, "Failed to unseal root key: invalid seal key");
|
||||
Error::InvalidKey
|
||||
})?;
|
||||
|
||||
self.state = State::Unsealed {
|
||||
root_key_history_id: current_key.id,
|
||||
root_key: v1::KeyCell::try_from(root_key).map_err(|err| {
|
||||
error!(?err, "Broken database: invalid encryption key size");
|
||||
Error::BrokenDatabase
|
||||
})?,
|
||||
};
|
||||
|
||||
info!("Keyholder unsealed successfully");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Decrypts the `aead_encrypted` entry with the given ID and returns the plaintext
|
||||
#[message]
|
||||
pub async fn decrypt(&mut self, aead_id: i32) -> Result<MemSafe<Vec<u8>>, Error> {
|
||||
let State::Unsealed { root_key, .. } = &mut self.state else {
|
||||
return Err(Error::NotBootstrapped);
|
||||
};
|
||||
|
||||
let row: models::AeadEncrypted = {
|
||||
let mut conn = self.db.get().await?;
|
||||
schema::aead_encrypted::table
|
||||
.select(models::AeadEncrypted::as_select())
|
||||
.filter(schema::aead_encrypted::id.eq(aead_id))
|
||||
.first(&mut conn)
|
||||
.await
|
||||
.optional()?
|
||||
.ok_or(Error::NotFound)?
|
||||
};
|
||||
|
||||
let nonce = v1::Nonce::try_from(row.current_nonce.as_slice()).map_err(|_| {
|
||||
error!(
|
||||
"Broken database: invalid nonce for aead_encrypted id={}",
|
||||
aead_id
|
||||
);
|
||||
Error::BrokenDatabase
|
||||
})?;
|
||||
let mut output = MemSafe::new(row.ciphertext).unwrap();
|
||||
root_key.decrypt_in_place(&nonce, v1::TAG, &mut output)?;
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
// Creates new `aead_encrypted` entry in the database and returns it's ID
|
||||
#[message]
|
||||
pub async fn create_new(&mut self, mut plaintext: MemSafe<Vec<u8>>) -> Result<i32, Error> {
|
||||
let State::Unsealed {
|
||||
root_key,
|
||||
root_key_history_id,
|
||||
} = &mut self.state
|
||||
else {
|
||||
return Err(Error::NotBootstrapped);
|
||||
};
|
||||
|
||||
// Order matters here - `get_new_nonce` acquires connection, so we need to call it before next acquire
|
||||
// Borrow checker note: &mut borrow a few lines above is disjoint from this field
|
||||
let nonce = Self::get_new_nonce(&self.db, *root_key_history_id).await?;
|
||||
|
||||
let mut ciphertext_buffer = plaintext.write().unwrap();
|
||||
let ciphertext_buffer: &mut Vec<u8> = ciphertext_buffer.as_mut();
|
||||
root_key.encrypt_in_place(&nonce, v1::TAG, &mut *ciphertext_buffer)?;
|
||||
|
||||
let ciphertext = std::mem::take(ciphertext_buffer);
|
||||
|
||||
let mut conn = self.db.get().await?;
|
||||
let aead_id: i32 = insert_into(schema::aead_encrypted::table)
|
||||
.values(&models::NewAeadEncrypted {
|
||||
ciphertext,
|
||||
tag: v1::TAG.to_vec(),
|
||||
current_nonce: nonce.to_vec(),
|
||||
schema_version: 1,
|
||||
associated_root_key_id: *root_key_history_id,
|
||||
created_at: chrono::Utc::now().timestamp() as i32,
|
||||
})
|
||||
.returning(schema::aead_encrypted::id)
|
||||
.get_result(&mut conn)
|
||||
.await?;
|
||||
|
||||
Ok(aead_id)
|
||||
}
|
||||
|
||||
#[message]
|
||||
pub fn get_state(&self) -> StateDiscriminants {
|
||||
self.state.discriminant()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::sync::Arc;
|
||||
|
||||
use diesel::dsl::{insert_into, sql_query, update};
|
||||
use diesel_async::RunQueryDsl;
|
||||
use futures::stream::TryUnfold;
|
||||
use kameo::actor::{ActorRef, Spawn as _};
|
||||
use memsafe::MemSafe;
|
||||
use tokio::sync::Mutex;
|
||||
use tokio::task::JoinSet;
|
||||
|
||||
use crate::db::{self, models::ArbiterSetting};
|
||||
|
||||
use super::*;
|
||||
|
||||
async fn seed_settings(pool: &db::DatabasePool) {
|
||||
let mut conn = pool.get().await.unwrap();
|
||||
insert_into(schema::arbiter_settings::table)
|
||||
.values(&ArbiterSetting {
|
||||
id: 1,
|
||||
root_key_id: None,
|
||||
cert_key: vec![],
|
||||
cert: vec![],
|
||||
})
|
||||
.execute(&mut conn)
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
async fn bootstrapped_actor(db: &db::DatabasePool) -> KeyHolder {
|
||||
seed_settings(db).await;
|
||||
let mut actor = KeyHolder::new(db.clone()).await.unwrap();
|
||||
let seal_key = MemSafe::new(b"test-seal-key".to_vec()).unwrap();
|
||||
actor.bootstrap(seal_key).await.unwrap();
|
||||
actor
|
||||
}
|
||||
|
||||
async fn write_concurrently(
|
||||
actor: ActorRef<KeyHolder>,
|
||||
prefix: &'static str,
|
||||
count: usize,
|
||||
) -> Vec<(i32, Vec<u8>)> {
|
||||
let mut set = JoinSet::new();
|
||||
for i in 0..count {
|
||||
let actor = actor.clone();
|
||||
set.spawn(async move {
|
||||
let plaintext = format!("{prefix}-{i}").into_bytes();
|
||||
let id = {
|
||||
actor
|
||||
.ask(CreateNew {
|
||||
plaintext: MemSafe::new(plaintext.clone()).unwrap(),
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
};
|
||||
(id, plaintext)
|
||||
});
|
||||
}
|
||||
|
||||
let mut out = Vec::with_capacity(count);
|
||||
while let Some(res) = set.join_next().await {
|
||||
out.push(res.unwrap());
|
||||
}
|
||||
out
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[test_log::test]
|
||||
async fn test_bootstrap() {
|
||||
let db = db::create_test_pool().await;
|
||||
seed_settings(&db).await;
|
||||
let mut actor = KeyHolder::new(db.clone()).await.unwrap();
|
||||
|
||||
assert!(matches!(actor.state, State::Unbootstrapped));
|
||||
|
||||
let seal_key = MemSafe::new(b"test-seal-key".to_vec()).unwrap();
|
||||
actor.bootstrap(seal_key).await.unwrap();
|
||||
|
||||
assert!(matches!(actor.state, State::Unsealed { .. }));
|
||||
|
||||
let mut conn = db.get().await.unwrap();
|
||||
let row: models::RootKeyHistory = schema::root_key_history::table
|
||||
.select(models::RootKeyHistory::as_select())
|
||||
.first(&mut conn)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(row.schema_version, 1);
|
||||
assert_eq!(row.tag, v1::ROOT_KEY_TAG);
|
||||
assert!(!row.ciphertext.is_empty());
|
||||
assert!(!row.salt.is_empty());
|
||||
assert_eq!(row.data_encryption_nonce, v1::Nonce::default().to_vec());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[test_log::test]
|
||||
async fn test_bootstrap_rejects_double() {
|
||||
let db = db::create_test_pool().await;
|
||||
let mut actor = bootstrapped_actor(&db).await;
|
||||
|
||||
let seal_key2 = MemSafe::new(b"test-seal-key".to_vec()).unwrap();
|
||||
let err = actor.bootstrap(seal_key2).await.unwrap_err();
|
||||
assert!(matches!(err, Error::AlreadyBootstrapped));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[test_log::test]
|
||||
async fn test_create_decrypt_roundtrip() {
|
||||
let db = db::create_test_pool().await;
|
||||
let mut actor = bootstrapped_actor(&db).await;
|
||||
|
||||
let plaintext = b"hello arbiter";
|
||||
let aead_id = actor
|
||||
.create_new(MemSafe::new(plaintext.to_vec()).unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut decrypted = actor.decrypt(aead_id).await.unwrap();
|
||||
let decrypted = decrypted.read().unwrap();
|
||||
assert_eq!(*decrypted, plaintext);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[test_log::test]
|
||||
async fn test_create_new_before_bootstrap_fails() {
|
||||
let db = db::create_test_pool().await;
|
||||
seed_settings(&db).await;
|
||||
let mut actor = KeyHolder::new(db).await.unwrap();
|
||||
|
||||
let err = actor
|
||||
.create_new(MemSafe::new(b"data".to_vec()).unwrap())
|
||||
.await
|
||||
.unwrap_err();
|
||||
assert!(matches!(err, Error::NotBootstrapped));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[test_log::test]
|
||||
async fn test_decrypt_before_bootstrap_fails() {
|
||||
let db = db::create_test_pool().await;
|
||||
seed_settings(&db).await;
|
||||
let mut actor = KeyHolder::new(db).await.unwrap();
|
||||
|
||||
let err = actor.decrypt(1).await.unwrap_err();
|
||||
assert!(matches!(err, Error::NotBootstrapped));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[test_log::test]
|
||||
async fn test_decrypt_nonexistent_returns_not_found() {
|
||||
let db = db::create_test_pool().await;
|
||||
let mut actor = bootstrapped_actor(&db).await;
|
||||
|
||||
let err = actor.decrypt(9999).await.unwrap_err();
|
||||
assert!(matches!(err, Error::NotFound));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[test_log::test]
|
||||
async fn test_new_restores_sealed_state() {
|
||||
let db = db::create_test_pool().await;
|
||||
let actor = bootstrapped_actor(&db).await;
|
||||
drop(actor);
|
||||
|
||||
let actor2 = KeyHolder::new(db).await.unwrap();
|
||||
assert!(matches!(actor2.state, State::Sealed { .. }));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[test_log::test]
|
||||
async fn test_nonce_never_reused() {
|
||||
let db = db::create_test_pool().await;
|
||||
let mut actor = bootstrapped_actor(&db).await;
|
||||
|
||||
let n = 5;
|
||||
let mut ids = Vec::with_capacity(n);
|
||||
for i in 0..n {
|
||||
let id = actor
|
||||
.create_new(MemSafe::new(format!("secret {i}").into_bytes()).unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
ids.push(id);
|
||||
}
|
||||
|
||||
// read all stored nonces from DB
|
||||
let mut conn = db.get().await.unwrap();
|
||||
let rows: Vec<models::AeadEncrypted> = schema::aead_encrypted::table
|
||||
.select(models::AeadEncrypted::as_select())
|
||||
.load(&mut conn)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(rows.len(), n);
|
||||
|
||||
let nonces: Vec<&Vec<u8>> = rows.iter().map(|r| &r.current_nonce).collect();
|
||||
let unique: HashSet<&Vec<u8>> = nonces.iter().copied().collect();
|
||||
assert_eq!(nonces.len(), unique.len(), "all nonces must be unique");
|
||||
|
||||
// verify nonces are sequential increments from 1
|
||||
for (i, row) in rows.iter().enumerate() {
|
||||
let mut expected = v1::Nonce::default();
|
||||
for _ in 0..=i {
|
||||
expected.increment();
|
||||
}
|
||||
assert_eq!(row.current_nonce, expected.to_vec(), "nonce {i} mismatch");
|
||||
}
|
||||
|
||||
// verify data_encryption_nonce on root_key_history tracks the latest nonce
|
||||
let root_row: models::RootKeyHistory = schema::root_key_history::table
|
||||
.select(models::RootKeyHistory::as_select())
|
||||
.first(&mut conn)
|
||||
.await
|
||||
.unwrap();
|
||||
let last_nonce = &rows.last().unwrap().current_nonce;
|
||||
assert_eq!(
|
||||
&root_row.data_encryption_nonce, last_nonce,
|
||||
"root_key_history must track the latest nonce"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[test_log::test]
|
||||
async fn test_unseal_correct_password() {
|
||||
let db = db::create_test_pool().await;
|
||||
let mut actor = bootstrapped_actor(&db).await;
|
||||
|
||||
let plaintext = b"survive a restart";
|
||||
let aead_id = actor
|
||||
.create_new(MemSafe::new(plaintext.to_vec()).unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
drop(actor);
|
||||
|
||||
let mut actor = KeyHolder::new(db.clone()).await.unwrap();
|
||||
assert!(matches!(actor.state, State::Sealed { .. }));
|
||||
|
||||
let seal_key = MemSafe::new(b"test-seal-key".to_vec()).unwrap();
|
||||
actor.try_unseal(seal_key).await.unwrap();
|
||||
assert!(matches!(actor.state, State::Unsealed { .. }));
|
||||
|
||||
// previously encrypted data is still decryptable
|
||||
let mut decrypted = actor.decrypt(aead_id).await.unwrap();
|
||||
assert_eq!(*decrypted.read().unwrap(), plaintext);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[test_log::test]
|
||||
async fn test_unseal_wrong_then_correct_password() {
|
||||
let db = db::create_test_pool().await;
|
||||
let mut actor = bootstrapped_actor(&db).await;
|
||||
|
||||
let plaintext = b"important data";
|
||||
let aead_id = actor
|
||||
.create_new(MemSafe::new(plaintext.to_vec()).unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
drop(actor);
|
||||
|
||||
let mut actor = KeyHolder::new(db.clone()).await.unwrap();
|
||||
assert!(matches!(actor.state, State::Sealed { .. }));
|
||||
|
||||
// wrong password
|
||||
let bad_key = MemSafe::new(b"wrong-password".to_vec()).unwrap();
|
||||
let err = actor.try_unseal(bad_key).await.unwrap_err();
|
||||
assert!(matches!(err, Error::InvalidKey));
|
||||
assert!(
|
||||
matches!(actor.state, State::Sealed { .. }),
|
||||
"state must remain Sealed after failed attempt"
|
||||
);
|
||||
|
||||
// correct password
|
||||
let good_key = MemSafe::new(b"test-seal-key".to_vec()).unwrap();
|
||||
actor.try_unseal(good_key).await.unwrap();
|
||||
assert!(matches!(actor.state, State::Unsealed { .. }));
|
||||
|
||||
let mut decrypted = actor.decrypt(aead_id).await.unwrap();
|
||||
assert_eq!(*decrypted.read().unwrap(), plaintext);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[test_log::test]
|
||||
async fn test_ciphertext_differs_across_entries() {
|
||||
let db = db::create_test_pool().await;
|
||||
let mut actor = bootstrapped_actor(&db).await;
|
||||
|
||||
let plaintext = b"same content";
|
||||
let id1 = actor
|
||||
.create_new(MemSafe::new(plaintext.to_vec()).unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
let id2 = actor
|
||||
.create_new(MemSafe::new(plaintext.to_vec()).unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// different nonces => different ciphertext, even for identical plaintext
|
||||
let mut conn = db.get().await.unwrap();
|
||||
let row1: models::AeadEncrypted = schema::aead_encrypted::table
|
||||
.filter(schema::aead_encrypted::id.eq(id1))
|
||||
.select(models::AeadEncrypted::as_select())
|
||||
.first(&mut conn)
|
||||
.await
|
||||
.unwrap();
|
||||
let row2: models::AeadEncrypted = schema::aead_encrypted::table
|
||||
.filter(schema::aead_encrypted::id.eq(id2))
|
||||
.select(models::AeadEncrypted::as_select())
|
||||
.first(&mut conn)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_ne!(row1.ciphertext, row2.ciphertext);
|
||||
|
||||
// but both decrypt to the same plaintext
|
||||
let mut d1 = actor.decrypt(id1).await.unwrap();
|
||||
let mut d2 = actor.decrypt(id2).await.unwrap();
|
||||
assert_eq!(*d1.read().unwrap(), plaintext);
|
||||
assert_eq!(*d2.read().unwrap(), plaintext);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[test_log::test]
|
||||
async fn concurrent_create_new_no_duplicate_nonces_() {
|
||||
let db = db::create_test_pool().await;
|
||||
let actor = KeyHolder::spawn(bootstrapped_actor(&db).await);
|
||||
|
||||
let writes = write_concurrently(actor, "nonce-unique", 32).await;
|
||||
assert_eq!(writes.len(), 32);
|
||||
|
||||
let mut conn = db.get().await.unwrap();
|
||||
let rows: Vec<models::AeadEncrypted> = schema::aead_encrypted::table
|
||||
.select(models::AeadEncrypted::as_select())
|
||||
.load(&mut conn)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(rows.len(), 32);
|
||||
|
||||
let nonces: Vec<&Vec<u8>> = rows.iter().map(|r| &r.current_nonce).collect();
|
||||
let unique: HashSet<&Vec<u8>> = nonces.iter().copied().collect();
|
||||
assert_eq!(nonces.len(), unique.len(), "all nonces must be unique");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[test_log::test]
|
||||
async fn concurrent_create_new_root_nonce_never_moves_backward() {
|
||||
let db = db::create_test_pool().await;
|
||||
let actor = KeyHolder::spawn(bootstrapped_actor(&db).await);
|
||||
|
||||
write_concurrently(actor, "root-max", 24).await;
|
||||
|
||||
let mut conn = db.get().await.unwrap();
|
||||
let rows: Vec<models::AeadEncrypted> = schema::aead_encrypted::table
|
||||
.select(models::AeadEncrypted::as_select())
|
||||
.load(&mut conn)
|
||||
.await
|
||||
.unwrap();
|
||||
let max_nonce = rows
|
||||
.iter()
|
||||
.map(|r| r.current_nonce.clone())
|
||||
.max()
|
||||
.expect("at least one row");
|
||||
|
||||
let root_row: models::RootKeyHistory = schema::root_key_history::table
|
||||
.select(models::RootKeyHistory::as_select())
|
||||
.first(&mut conn)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(root_row.data_encryption_nonce, max_nonce);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[test_log::test]
|
||||
async fn nonce_monotonic_even_when_nonce_allocation_interleaves() {
|
||||
let db = db::create_test_pool().await;
|
||||
let mut actor = bootstrapped_actor(&db).await;
|
||||
let root_key_history_id = match actor.state {
|
||||
State::Unsealed {
|
||||
root_key_history_id,
|
||||
..
|
||||
} => root_key_history_id,
|
||||
_ => panic!("expected unsealed state"),
|
||||
};
|
||||
|
||||
let n1 = KeyHolder::get_new_nonce(&db, root_key_history_id)
|
||||
.await
|
||||
.unwrap();
|
||||
let n2 = KeyHolder::get_new_nonce(&db, root_key_history_id)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(n2.to_vec() > n1.to_vec(), "nonce must increase");
|
||||
|
||||
let mut conn = db.get().await.unwrap();
|
||||
let root_row: models::RootKeyHistory = schema::root_key_history::table
|
||||
.select(models::RootKeyHistory::as_select())
|
||||
.first(&mut conn)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(root_row.data_encryption_nonce, n2.to_vec());
|
||||
|
||||
let id = actor
|
||||
.create_new(MemSafe::new(b"post-interleave".to_vec()).unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
let row: models::AeadEncrypted = schema::aead_encrypted::table
|
||||
.filter(schema::aead_encrypted::id.eq(id))
|
||||
.select(models::AeadEncrypted::as_select())
|
||||
.first(&mut conn)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(
|
||||
row.current_nonce > n2.to_vec(),
|
||||
"next write must advance nonce"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[test_log::test]
|
||||
async fn insert_failure_does_not_create_partial_row() {
|
||||
let db = db::create_test_pool().await;
|
||||
let mut actor = bootstrapped_actor(&db).await;
|
||||
let root_key_history_id = match actor.state {
|
||||
State::Unsealed {
|
||||
root_key_history_id,
|
||||
..
|
||||
} => root_key_history_id,
|
||||
_ => panic!("expected unsealed state"),
|
||||
};
|
||||
|
||||
let mut conn = db.get().await.unwrap();
|
||||
let before_count: i64 = schema::aead_encrypted::table
|
||||
.count()
|
||||
.get_result(&mut conn)
|
||||
.await
|
||||
.unwrap();
|
||||
let before_root_nonce: Vec<u8> = schema::root_key_history::table
|
||||
.filter(schema::root_key_history::id.eq(root_key_history_id))
|
||||
.select(schema::root_key_history::data_encryption_nonce)
|
||||
.first(&mut conn)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
sql_query(
|
||||
"CREATE TRIGGER fail_aead_insert BEFORE INSERT ON aead_encrypted BEGIN SELECT RAISE(ABORT, 'forced test failure'); END;",
|
||||
)
|
||||
.execute(&mut conn)
|
||||
.await
|
||||
.unwrap();
|
||||
drop(conn);
|
||||
|
||||
let err = actor
|
||||
.create_new(MemSafe::new(b"should fail".to_vec()).unwrap())
|
||||
.await
|
||||
.unwrap_err();
|
||||
assert!(matches!(err, Error::DatabaseTransaction(_)));
|
||||
|
||||
let mut conn = db.get().await.unwrap();
|
||||
sql_query("DROP TRIGGER fail_aead_insert;")
|
||||
.execute(&mut conn)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let after_count: i64 = schema::aead_encrypted::table
|
||||
.count()
|
||||
.get_result(&mut conn)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
before_count, after_count,
|
||||
"failed insert must not create row"
|
||||
);
|
||||
|
||||
let after_root_nonce: Vec<u8> = schema::root_key_history::table
|
||||
.filter(schema::root_key_history::id.eq(root_key_history_id))
|
||||
.select(schema::root_key_history::data_encryption_nonce)
|
||||
.first(&mut conn)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(
|
||||
after_root_nonce > before_root_nonce,
|
||||
"current behavior allows nonce gap on failed insert"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[test_log::test]
|
||||
async fn decrypt_roundtrip_after_high_concurrency() {
|
||||
let db = db::create_test_pool().await;
|
||||
let actor = KeyHolder::spawn(bootstrapped_actor(&db).await);
|
||||
|
||||
let writes = write_concurrently(actor, "roundtrip", 40).await;
|
||||
let expected: HashMap<i32, Vec<u8>> = writes.into_iter().collect();
|
||||
|
||||
let mut decryptor = KeyHolder::new(db.clone()).await.unwrap();
|
||||
decryptor
|
||||
.try_unseal(MemSafe::new(b"test-seal-key".to_vec()).unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
for (id, plaintext) in expected {
|
||||
let mut decrypted = decryptor.decrypt(id).await.unwrap();
|
||||
assert_eq!(*decrypted.read().unwrap(), plaintext);
|
||||
}
|
||||
}
|
||||
|
||||
// #[tokio::test]
|
||||
// #[test_log::test]
|
||||
// async fn swapping_ciphertext_and_nonce_between_rows_changes_logical_binding() {
|
||||
// let db = db::create_test_pool().await;
|
||||
// let mut actor = bootstrapped_actor(&db).await;
|
||||
|
||||
// let plaintext1 = b"entry-one";
|
||||
// let plaintext2 = b"entry-two";
|
||||
// let id1 = actor
|
||||
// .create_new(MemSafe::new(plaintext1.to_vec()).unwrap())
|
||||
// .await
|
||||
// .unwrap();
|
||||
// let id2 = actor
|
||||
// .create_new(MemSafe::new(plaintext2.to_vec()).unwrap())
|
||||
// .await
|
||||
// .unwrap();
|
||||
|
||||
// let mut conn = db.get().await.unwrap();
|
||||
// let row1: models::AeadEncrypted = schema::aead_encrypted::table
|
||||
// .filter(schema::aead_encrypted::id.eq(id1))
|
||||
// .select(models::AeadEncrypted::as_select())
|
||||
// .first(&mut conn)
|
||||
// .await
|
||||
// .unwrap();
|
||||
// let row2: models::AeadEncrypted = schema::aead_encrypted::table
|
||||
// .filter(schema::aead_encrypted::id.eq(id2))
|
||||
// .select(models::AeadEncrypted::as_select())
|
||||
// .first(&mut conn)
|
||||
// .await
|
||||
// .unwrap();
|
||||
|
||||
// update(schema::aead_encrypted::table.filter(schema::aead_encrypted::id.eq(id1)))
|
||||
// .set((
|
||||
// schema::aead_encrypted::ciphertext.eq(row2.ciphertext.clone()),
|
||||
// schema::aead_encrypted::current_nonce.eq(row2.current_nonce.clone()),
|
||||
// ))
|
||||
// .execute(&mut conn)
|
||||
// .await
|
||||
// .unwrap();
|
||||
// update(schema::aead_encrypted::table.filter(schema::aead_encrypted::id.eq(id2)))
|
||||
// .set((
|
||||
// schema::aead_encrypted::ciphertext.eq(row1.ciphertext.clone()),
|
||||
// schema::aead_encrypted::current_nonce.eq(row1.current_nonce.clone()),
|
||||
// ))
|
||||
// .execute(&mut conn)
|
||||
// .await
|
||||
// .unwrap();
|
||||
|
||||
// let mut d1 = actor.decrypt(id1).await.unwrap();
|
||||
// let mut d2 = actor.decrypt(id2).await.unwrap();
|
||||
// assert_eq!(*d1.read().unwrap(), plaintext2);
|
||||
// assert_eq!(*d2.read().unwrap(), plaintext1);
|
||||
// }
|
||||
#[tokio::test]
|
||||
#[test_log::test]
|
||||
async fn broken_db_nonce_format_fails_closed() {
|
||||
// malformed root_key_history nonce must fail create_new
|
||||
let db = db::create_test_pool().await;
|
||||
let mut actor = bootstrapped_actor(&db).await;
|
||||
let root_key_history_id = match actor.state {
|
||||
State::Unsealed {
|
||||
root_key_history_id,
|
||||
..
|
||||
} => root_key_history_id,
|
||||
_ => panic!("expected unsealed state"),
|
||||
};
|
||||
|
||||
let mut conn = db.get().await.unwrap();
|
||||
update(
|
||||
schema::root_key_history::table
|
||||
.filter(schema::root_key_history::id.eq(root_key_history_id)),
|
||||
)
|
||||
.set(schema::root_key_history::data_encryption_nonce.eq(vec![1, 2, 3]))
|
||||
.execute(&mut conn)
|
||||
.await
|
||||
.unwrap();
|
||||
drop(conn);
|
||||
|
||||
let err = actor
|
||||
.create_new(MemSafe::new(b"must fail".to_vec()).unwrap())
|
||||
.await
|
||||
.unwrap_err();
|
||||
assert!(matches!(err, Error::BrokenDatabase));
|
||||
|
||||
// malformed per-row nonce must fail decrypt
|
||||
let db = db::create_test_pool().await;
|
||||
let mut actor = bootstrapped_actor(&db).await;
|
||||
let id = actor
|
||||
.create_new(MemSafe::new(b"decrypt target".to_vec()).unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
let mut conn = db.get().await.unwrap();
|
||||
update(schema::aead_encrypted::table.filter(schema::aead_encrypted::id.eq(id)))
|
||||
.set(schema::aead_encrypted::current_nonce.eq(vec![7, 8]))
|
||||
.execute(&mut conn)
|
||||
.await
|
||||
.unwrap();
|
||||
drop(conn);
|
||||
|
||||
let err = actor.decrypt(id).await.unwrap_err();
|
||||
assert!(matches!(err, Error::BrokenDatabase));
|
||||
}
|
||||
}
|
||||
242
server/crates/arbiter-server/src/actors/keyholder/v1.rs
Normal file
242
server/crates/arbiter-server/src/actors/keyholder/v1.rs
Normal file
@@ -0,0 +1,242 @@
|
||||
use std::ops::Deref as _;
|
||||
|
||||
use argon2::{Algorithm, Argon2, password_hash::Salt as ArgonSalt};
|
||||
use chacha20poly1305::{
|
||||
AeadInPlace, Key, KeyInit as _, XChaCha20Poly1305, XNonce,
|
||||
aead::{AeadMut, Error, Payload},
|
||||
};
|
||||
use memsafe::MemSafe;
|
||||
use rand::{
|
||||
Rng as _, SeedableRng,
|
||||
rngs::{StdRng, SysRng},
|
||||
};
|
||||
|
||||
pub const ROOT_KEY_TAG: &[u8] = "arbiter/seal/v1".as_bytes();
|
||||
pub const TAG: &[u8] = "arbiter/private-key/v1".as_bytes();
|
||||
|
||||
pub const NONCE_LENGTH: usize = 24;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Nonce([u8; NONCE_LENGTH]);
|
||||
impl Nonce {
|
||||
pub fn increment(&mut self) {
|
||||
for i in (0..self.0.len()).rev() {
|
||||
if self.0[i] == 0xFF {
|
||||
self.0[i] = 0;
|
||||
} else {
|
||||
self.0[i] += 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_vec(&self) -> Vec<u8> {
|
||||
self.0.to_vec()
|
||||
}
|
||||
}
|
||||
impl<'a> TryFrom<&'a [u8]> for Nonce {
|
||||
type Error = ();
|
||||
|
||||
fn try_from(value: &'a [u8]) -> Result<Self, Self::Error> {
|
||||
if value.len() != NONCE_LENGTH {
|
||||
return Err(());
|
||||
}
|
||||
let mut nonce = [0u8; NONCE_LENGTH];
|
||||
nonce.copy_from_slice(&value);
|
||||
Ok(Self(nonce))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct KeyCell(pub(super) MemSafe<Key>);
|
||||
impl From<MemSafe<Key>> for KeyCell {
|
||||
fn from(value: MemSafe<Key>) -> Self {
|
||||
Self(value)
|
||||
}
|
||||
}
|
||||
impl TryFrom<MemSafe<Vec<u8>>> for KeyCell {
|
||||
type Error = ();
|
||||
|
||||
fn try_from(mut value: MemSafe<Vec<u8>>) -> Result<Self, Self::Error> {
|
||||
let value = value.read().unwrap();
|
||||
if value.len() != size_of::<Key>() {
|
||||
return Err(());
|
||||
}
|
||||
let mut cell = MemSafe::new(Key::default()).unwrap();
|
||||
{
|
||||
let mut cell_write = cell.write().unwrap();
|
||||
let cell_slice: &mut [u8] = cell_write.as_mut();
|
||||
cell_slice.copy_from_slice(&value);
|
||||
}
|
||||
Ok(Self(cell))
|
||||
}
|
||||
}
|
||||
|
||||
impl KeyCell {
|
||||
pub fn new_secure_random() -> Self {
|
||||
let mut key = MemSafe::new(Key::default()).unwrap();
|
||||
{
|
||||
let mut key_buffer = key.write().unwrap();
|
||||
let key_buffer: &mut [u8] = key_buffer.as_mut();
|
||||
|
||||
let mut rng = StdRng::try_from_rng(&mut SysRng).unwrap();
|
||||
rng.fill_bytes(key_buffer);
|
||||
}
|
||||
|
||||
key.into()
|
||||
}
|
||||
|
||||
pub fn into_inner(self) -> MemSafe<Key> {
|
||||
self.0
|
||||
}
|
||||
|
||||
pub fn encrypt_in_place(
|
||||
&mut self,
|
||||
nonce: &Nonce,
|
||||
associated_data: &[u8],
|
||||
mut buffer: impl AsMut<Vec<u8>>,
|
||||
) -> Result<(), Error> {
|
||||
let key_reader = self.0.read().unwrap();
|
||||
let key_ref = key_reader.deref();
|
||||
let cipher = XChaCha20Poly1305::new(key_ref);
|
||||
let nonce = XNonce::from_slice(nonce.0.as_ref());
|
||||
let buffer = buffer.as_mut();
|
||||
cipher.encrypt_in_place(nonce, associated_data, buffer)
|
||||
}
|
||||
pub fn decrypt_in_place(
|
||||
&mut self,
|
||||
nonce: &Nonce,
|
||||
associated_data: &[u8],
|
||||
buffer: &mut MemSafe<Vec<u8>>,
|
||||
) -> Result<(), Error> {
|
||||
let key_reader = self.0.read().unwrap();
|
||||
let key_ref = key_reader.deref();
|
||||
let cipher = XChaCha20Poly1305::new(key_ref);
|
||||
let nonce = XNonce::from_slice(nonce.0.as_ref());
|
||||
let mut buffer = buffer.write().unwrap();
|
||||
let buffer: &mut Vec<u8> = buffer.as_mut();
|
||||
cipher.decrypt_in_place(nonce, associated_data, buffer)
|
||||
}
|
||||
|
||||
pub fn encrypt(
|
||||
&mut self,
|
||||
nonce: &Nonce,
|
||||
associated_data: &[u8],
|
||||
plaintext: impl AsRef<[u8]>,
|
||||
) -> Result<Vec<u8>, Error> {
|
||||
let key_reader = self.0.read().unwrap();
|
||||
let key_ref = key_reader.deref();
|
||||
let mut cipher = XChaCha20Poly1305::new(key_ref);
|
||||
let nonce = XNonce::from_slice(nonce.0.as_ref());
|
||||
|
||||
|
||||
let ciphertext = cipher.encrypt(
|
||||
&nonce,
|
||||
Payload {
|
||||
msg: plaintext.as_ref(),
|
||||
aad: associated_data,
|
||||
},
|
||||
)?;
|
||||
Ok(ciphertext)
|
||||
}
|
||||
}
|
||||
|
||||
pub type Salt = [u8; ArgonSalt::RECOMMENDED_LENGTH];
|
||||
|
||||
pub(super) fn generate_salt() -> Salt {
|
||||
let mut salt = Salt::default();
|
||||
let mut rng = StdRng::try_from_rng(&mut SysRng).unwrap();
|
||||
rng.fill_bytes(&mut salt);
|
||||
salt
|
||||
}
|
||||
|
||||
/// User password might be of different length, have not enough entropy, etc...
|
||||
/// Derive a fixed-length key from the password using Argon2id, which is designed for password hashing and key derivation.
|
||||
pub(super) fn derive_seal_key(mut password: MemSafe<Vec<u8>>, salt: &Salt) -> KeyCell {
|
||||
let params = argon2::Params::new(262_144, 3, 4, None).unwrap();
|
||||
let hasher = Argon2::new(Algorithm::Argon2id, argon2::Version::V0x13, params);
|
||||
let mut key = MemSafe::new(Key::default()).unwrap();
|
||||
{
|
||||
let password_source = password.read().unwrap();
|
||||
let mut key_buffer = key.write().unwrap();
|
||||
let key_buffer: &mut [u8] = key_buffer.as_mut();
|
||||
|
||||
hasher
|
||||
.hash_password_into(password_source.deref(), salt, key_buffer)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
key.into()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use memsafe::MemSafe;
|
||||
|
||||
#[test]
|
||||
pub fn derive_seal_key_deterministic() {
|
||||
static PASSWORD: &[u8] = b"password";
|
||||
let password = MemSafe::new(PASSWORD.to_vec()).unwrap();
|
||||
let password2 = MemSafe::new(PASSWORD.to_vec()).unwrap();
|
||||
let salt = generate_salt();
|
||||
|
||||
let mut key1 = derive_seal_key(password, &salt);
|
||||
let mut key2 = derive_seal_key(password2, &salt);
|
||||
|
||||
let key1_reader = key1.0.read().unwrap();
|
||||
let key2_reader = key2.0.read().unwrap();
|
||||
|
||||
assert_eq!(key1_reader.deref(), key2_reader.deref());
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn successful_derive() {
|
||||
static PASSWORD: &[u8] = b"password";
|
||||
let password = MemSafe::new(PASSWORD.to_vec()).unwrap();
|
||||
let salt = generate_salt();
|
||||
|
||||
let mut key = derive_seal_key(password, &salt);
|
||||
let key_reader = key.0.read().unwrap();
|
||||
let key_ref = key_reader.deref();
|
||||
|
||||
assert_ne!(key_ref.as_slice(), &[0u8; 32][..]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn encrypt_decrypt() {
|
||||
static PASSWORD: &[u8] = b"password";
|
||||
let password = MemSafe::new(PASSWORD.to_vec()).unwrap();
|
||||
let salt = generate_salt();
|
||||
|
||||
let mut key = derive_seal_key(password, &salt);
|
||||
let nonce = Nonce(*b"unique nonce 123 1231233"); // 24 bytes for XChaCha20Poly1305
|
||||
let associated_data = b"associated data";
|
||||
let mut buffer = b"secret data".to_vec();
|
||||
|
||||
key.encrypt_in_place(&nonce, associated_data, &mut buffer)
|
||||
.unwrap();
|
||||
assert_ne!(buffer, b"secret data");
|
||||
|
||||
let mut buffer = MemSafe::new(buffer).unwrap();
|
||||
|
||||
key.decrypt_in_place(&nonce, associated_data, &mut buffer)
|
||||
.unwrap();
|
||||
|
||||
let buffer = buffer.read().unwrap();
|
||||
assert_eq!(*buffer, b"secret data");
|
||||
}
|
||||
|
||||
#[test]
|
||||
// We should fuzz this
|
||||
pub fn test_nonce_increment() {
|
||||
let mut nonce = Nonce([0u8; NONCE_LENGTH]);
|
||||
nonce.increment();
|
||||
|
||||
assert_eq!(
|
||||
nonce.0,
|
||||
[
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1
|
||||
]
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,96 +1,58 @@
|
||||
use std::{
|
||||
ops::DerefMut,
|
||||
sync::Mutex,
|
||||
};
|
||||
|
||||
use arbiter_proto::proto::{
|
||||
UserAgentRequest, UserAgentResponse,
|
||||
UserAgentResponse,
|
||||
auth::{
|
||||
self, AuthChallenge, AuthChallengeRequest, AuthOk, ClientMessage,
|
||||
ServerMessage as AuthServerMessage, client_message::Payload as ClientAuthPayload,
|
||||
self, AuthChallengeRequest, AuthOk, ServerMessage as AuthServerMessage,
|
||||
server_message::Payload as ServerAuthPayload,
|
||||
},
|
||||
user_agent_request::Payload as UserAgentRequestPayload,
|
||||
unseal::{UnsealEncryptedKey, UnsealResult, UnsealStart, UnsealStartResponse},
|
||||
user_agent_response::Payload as UserAgentResponsePayload,
|
||||
};
|
||||
use chacha20poly1305::{
|
||||
AeadInPlace, XChaCha20Poly1305, XNonce,
|
||||
aead::KeyInit,
|
||||
};
|
||||
use diesel::{ExpressionMethods as _, OptionalExtension as _, QueryDsl, dsl::update};
|
||||
use diesel_async::{AsyncConnection, RunQueryDsl};
|
||||
use ed25519_dalek::VerifyingKey;
|
||||
use futures::StreamExt;
|
||||
use kameo::{
|
||||
Actor,
|
||||
actor::{ActorRef, Spawn},
|
||||
error::SendError,
|
||||
messages,
|
||||
prelude::Context,
|
||||
};
|
||||
use tokio::sync::mpsc;
|
||||
use kameo::{Actor, actor::ActorRef, messages};
|
||||
use memsafe::MemSafe;
|
||||
use tokio::sync::mpsc::Sender;
|
||||
use tonic::Status;
|
||||
use tracing::{error, info};
|
||||
use x25519_dalek::{EphemeralSecret, PublicKey};
|
||||
|
||||
use crate::{
|
||||
ServerContext,
|
||||
context::bootstrap::{BootstrapActor, ConsumeToken},
|
||||
actors::{
|
||||
bootstrap::{BootstrapActor, ConsumeToken},
|
||||
user_agent::state::{
|
||||
AuthRequestContext, ChallengeContext, DummyContext, UnsealContext, UserAgentEvents,
|
||||
UserAgentStateMachine, UserAgentStates,
|
||||
},
|
||||
},
|
||||
db::{self, schema},
|
||||
errors::GrpcStatusExt,
|
||||
};
|
||||
|
||||
/// Context for state machine with validated key and sent challenge
|
||||
/// Challenge is then transformed to bytes using shared function and verified
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ChallengeContext {
|
||||
challenge: AuthChallenge,
|
||||
key: VerifyingKey,
|
||||
}
|
||||
mod state;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
// Request context with deserialized public key for state machine.
|
||||
// This intermediate struct is needed because the state machine branches depending on presence of bootstrap token,
|
||||
// but we want to have the deserialized key in both branches.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct AuthRequestContext {
|
||||
pubkey: VerifyingKey,
|
||||
bootstrap_token: Option<String>,
|
||||
}
|
||||
|
||||
smlang::statemachine!(
|
||||
name: UserAgent,
|
||||
derive_states: [Debug],
|
||||
custom_error: false,
|
||||
transitions: {
|
||||
*Init + AuthRequest(AuthRequestContext) / auth_request_context = ReceivedAuthRequest(AuthRequestContext),
|
||||
ReceivedAuthRequest(AuthRequestContext) + ReceivedBootstrapToken = Authenticated,
|
||||
|
||||
ReceivedAuthRequest(AuthRequestContext) + SentChallenge(ChallengeContext) / move_challenge = WaitingForChallengeSolution(ChallengeContext),
|
||||
|
||||
WaitingForChallengeSolution(ChallengeContext) + ReceivedGoodSolution = Authenticated,
|
||||
WaitingForChallengeSolution(ChallengeContext) + ReceivedBadSolution = AuthError, // block further transitions, but connection should close anyway
|
||||
}
|
||||
);
|
||||
|
||||
pub struct DummyContext;
|
||||
impl UserAgentStateMachineContext for DummyContext {
|
||||
#[allow(missing_docs)]
|
||||
#[allow(clippy::unused_unit)]
|
||||
fn move_challenge(
|
||||
&mut self,
|
||||
state_data: &AuthRequestContext,
|
||||
event_data: ChallengeContext,
|
||||
) -> Result<ChallengeContext, ()> {
|
||||
Ok(event_data)
|
||||
}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
#[allow(clippy::unused_unit)]
|
||||
fn auth_request_context(
|
||||
&mut self,
|
||||
event_data: AuthRequestContext,
|
||||
) -> Result<AuthRequestContext, ()> {
|
||||
Ok(event_data)
|
||||
}
|
||||
}
|
||||
mod transport;
|
||||
pub(crate) use transport::handle_user_agent;
|
||||
|
||||
#[derive(Actor)]
|
||||
pub struct UserAgentActor {
|
||||
db: db::DatabasePool,
|
||||
bootstapper: ActorRef<BootstrapActor>,
|
||||
state: UserAgentStateMachine<DummyContext>,
|
||||
tx: Sender<Result<UserAgentResponse, Status>>,
|
||||
// will be used in future
|
||||
_tx: Sender<Result<UserAgentResponse, Status>>,
|
||||
}
|
||||
|
||||
impl UserAgentActor {
|
||||
@@ -102,10 +64,11 @@ impl UserAgentActor {
|
||||
db: context.db.clone(),
|
||||
bootstapper: context.bootstrapper.clone(),
|
||||
state: UserAgentStateMachine::new(DummyContext),
|
||||
tx,
|
||||
_tx: tx,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn new_manual(
|
||||
db: db::DatabasePool,
|
||||
bootstapper: ActorRef<BootstrapActor>,
|
||||
@@ -115,7 +78,7 @@ impl UserAgentActor {
|
||||
db,
|
||||
bootstapper,
|
||||
state: UserAgentStateMachine::new(DummyContext),
|
||||
tx,
|
||||
_tx: tx,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -255,18 +218,93 @@ fn auth_response(payload: ServerAuthPayload) -> UserAgentResponse {
|
||||
}
|
||||
}
|
||||
|
||||
fn unseal_response(payload: UserAgentResponsePayload) -> UserAgentResponse {
|
||||
UserAgentResponse {
|
||||
payload: Some(payload),
|
||||
}
|
||||
}
|
||||
|
||||
#[messages]
|
||||
impl UserAgentActor {
|
||||
#[message(ctx)]
|
||||
pub async fn handle_auth_challenge_request(
|
||||
&mut self,
|
||||
req: AuthChallengeRequest,
|
||||
ctx: &mut Context<Self, Output>,
|
||||
) -> Output {
|
||||
#[message]
|
||||
pub async fn handle_unseal_request(&mut self, req: UnsealStart) -> Output {
|
||||
let secret = EphemeralSecret::random();
|
||||
let public_key = PublicKey::from(&secret);
|
||||
|
||||
let client_pubkey_bytes: [u8; 32] = req
|
||||
.client_pubkey
|
||||
.try_into()
|
||||
.map_err(|_| Status::invalid_argument("client_pubkey must be 32 bytes"))?;
|
||||
|
||||
let client_public_key = PublicKey::from(client_pubkey_bytes);
|
||||
|
||||
self.transition(UserAgentEvents::UnsealRequest(UnsealContext {
|
||||
server_public_key: public_key,
|
||||
secret: Mutex::new(Some(secret)),
|
||||
client_public_key,
|
||||
}))?;
|
||||
|
||||
Ok(unseal_response(
|
||||
UserAgentResponsePayload::UnsealStartResponse(UnsealStartResponse {
|
||||
server_pubkey: public_key.as_bytes().to_vec(),
|
||||
}),
|
||||
))
|
||||
}
|
||||
|
||||
#[message]
|
||||
pub async fn handle_unseal_encrypted_key(&mut self, req: UnsealEncryptedKey) -> Output {
|
||||
let UserAgentStates::WaitingForUnsealKey(unseal_context) = self.state.state() else {
|
||||
error!("Received unseal encrypted key in invalid state");
|
||||
return Err(Status::failed_precondition(
|
||||
"Invalid state for unseal encrypted key",
|
||||
));
|
||||
};
|
||||
let ephemeral_secret = {
|
||||
let mut secret_lock = unseal_context.secret.lock().unwrap();
|
||||
let secret = secret_lock.take();
|
||||
match secret {
|
||||
Some(secret) => secret,
|
||||
None => {
|
||||
drop(secret_lock);
|
||||
error!("Ephemeral secret already taken");
|
||||
self.transition(UserAgentEvents::ReceivedInvalidKey)?;
|
||||
return Ok(unseal_response(UserAgentResponsePayload::UnsealResult(
|
||||
UnsealResult::InvalidKey.into(),
|
||||
)));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let nonce = XNonce::from_slice(&req.nonce);
|
||||
|
||||
let shared_secret = ephemeral_secret.diffie_hellman(&unseal_context.client_public_key);
|
||||
let cipher = XChaCha20Poly1305::new(shared_secret.as_bytes().into());
|
||||
|
||||
let mut root_key_buffer = MemSafe::new(req.ciphertext.clone()).unwrap();
|
||||
let mut write_handle = root_key_buffer.write().unwrap();
|
||||
let write_handle = write_handle.deref_mut();
|
||||
|
||||
let decryption_result = cipher
|
||||
.decrypt_in_place(nonce, &req.associated_data, write_handle);
|
||||
|
||||
match decryption_result {
|
||||
Ok(_) => todo!("Send key to the keyguarding"),
|
||||
Err(err) => {
|
||||
error!(?err, "Failed to decrypt unseal key");
|
||||
self.transition(UserAgentEvents::ReceivedInvalidKey)?;
|
||||
return Ok(unseal_response(UserAgentResponsePayload::UnsealResult(
|
||||
UnsealResult::InvalidKey.into(),
|
||||
)));
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[message]
|
||||
pub async fn handle_auth_challenge_request(&mut self, req: AuthChallengeRequest) -> Output {
|
||||
let pubkey = req.pubkey.as_array().ok_or(Status::invalid_argument(
|
||||
"Expected pubkey to have specific length",
|
||||
))?;
|
||||
let pubkey = VerifyingKey::from_bytes(pubkey).map_err(|err| {
|
||||
let pubkey = VerifyingKey::from_bytes(pubkey).map_err(|_err| {
|
||||
error!(?pubkey, "Failed to convert to VerifyingKey");
|
||||
Status::invalid_argument("Failed to convert pubkey to VerifyingKey")
|
||||
})?;
|
||||
@@ -282,11 +320,10 @@ impl UserAgentActor {
|
||||
}
|
||||
}
|
||||
|
||||
#[message(ctx)]
|
||||
#[message]
|
||||
pub async fn handle_auth_challenge_solution(
|
||||
&mut self,
|
||||
solution: auth::AuthChallengeSolution,
|
||||
ctx: &mut Context<Self, Output>,
|
||||
) -> Output {
|
||||
let (valid, challenge_context) = self.verify_challenge_solution(&solution)?;
|
||||
|
||||
@@ -304,66 +341,3 @@ impl UserAgentActor {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use arbiter_proto::proto::{
|
||||
UserAgentResponse, auth::{AuthChallengeRequest, AuthOk},
|
||||
user_agent_response::Payload as UserAgentResponsePayload,
|
||||
};
|
||||
use kameo::actor::Spawn;
|
||||
|
||||
use crate::{
|
||||
actors::user_agent::HandleAuthChallengeRequest, context::bootstrap::BootstrapActor, db,
|
||||
};
|
||||
|
||||
use super::UserAgentActor;
|
||||
|
||||
#[tokio::test]
|
||||
#[test_log::test]
|
||||
pub async fn test_bootstrap_token_auth() {
|
||||
let db = db::create_test_pool().await;
|
||||
// explicitly not installing any user_agent pubkeys
|
||||
let bootstrapper = BootstrapActor::new(&db).await.unwrap(); // this will create bootstrap token
|
||||
let token = bootstrapper.get_token().unwrap();
|
||||
|
||||
let bootstrapper_ref = BootstrapActor::spawn(bootstrapper);
|
||||
let user_agent = UserAgentActor::new_manual(
|
||||
db.clone(),
|
||||
bootstrapper_ref,
|
||||
tokio::sync::mpsc::channel(1).0, // dummy channel, we won't actually send responses in this test
|
||||
);
|
||||
let user_agent_ref = UserAgentActor::spawn(user_agent);
|
||||
|
||||
// simulate client sending auth request with bootstrap token
|
||||
let new_key = ed25519_dalek::SigningKey::generate(&mut rand::rng());
|
||||
let pubkey_bytes = new_key.verifying_key().to_bytes().to_vec();
|
||||
|
||||
let result = user_agent_ref
|
||||
.ask(HandleAuthChallengeRequest {
|
||||
req: AuthChallengeRequest {
|
||||
pubkey: pubkey_bytes,
|
||||
bootstrap_token: Some(token),
|
||||
},
|
||||
})
|
||||
.await
|
||||
.expect("Shouldn't fail to send message");
|
||||
|
||||
// auth succeeded
|
||||
assert_eq!(
|
||||
result,
|
||||
UserAgentResponse {
|
||||
payload: Some(UserAgentResponsePayload::AuthMessage(
|
||||
arbiter_proto::proto::auth::ServerMessage {
|
||||
payload: Some(arbiter_proto::proto::auth::server_message::Payload::AuthOk(
|
||||
AuthOk {},
|
||||
)),
|
||||
},
|
||||
)),
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
mod transport;
|
||||
pub(crate) use transport::handle_user_agent;
|
||||
|
||||
76
server/crates/arbiter-server/src/actors/user_agent/state.rs
Normal file
76
server/crates/arbiter-server/src/actors/user_agent/state.rs
Normal file
@@ -0,0 +1,76 @@
|
||||
use std::sync::Mutex;
|
||||
|
||||
use arbiter_proto::proto::auth::AuthChallenge;
|
||||
use ed25519_dalek::VerifyingKey;
|
||||
use x25519_dalek::{EphemeralSecret, PublicKey};
|
||||
|
||||
/// Context for state machine with validated key and sent challenge
|
||||
/// Challenge is then transformed to bytes using shared function and verified
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ChallengeContext {
|
||||
pub challenge: AuthChallenge,
|
||||
pub key: VerifyingKey,
|
||||
}
|
||||
|
||||
// Request context with deserialized public key for state machine.
|
||||
// This intermediate struct is needed because the state machine branches depending on presence of bootstrap token,
|
||||
// but we want to have the deserialized key in both branches.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct AuthRequestContext {
|
||||
pub pubkey: VerifyingKey,
|
||||
pub bootstrap_token: Option<String>,
|
||||
}
|
||||
|
||||
pub struct UnsealContext {
|
||||
pub server_public_key: PublicKey,
|
||||
pub client_public_key: PublicKey,
|
||||
pub secret: Mutex<Option<EphemeralSecret>>,
|
||||
}
|
||||
|
||||
|
||||
|
||||
smlang::statemachine!(
|
||||
name: UserAgent,
|
||||
custom_error: false,
|
||||
transitions: {
|
||||
*Init + AuthRequest(AuthRequestContext) / auth_request_context = ReceivedAuthRequest(AuthRequestContext),
|
||||
ReceivedAuthRequest(AuthRequestContext) + ReceivedBootstrapToken = Idle,
|
||||
|
||||
ReceivedAuthRequest(AuthRequestContext) + SentChallenge(ChallengeContext) / move_challenge = WaitingForChallengeSolution(ChallengeContext),
|
||||
|
||||
WaitingForChallengeSolution(ChallengeContext) + ReceivedGoodSolution = Idle,
|
||||
WaitingForChallengeSolution(ChallengeContext) + ReceivedBadSolution = AuthError, // block further transitions, but connection should close anyway
|
||||
|
||||
Idle + UnsealRequest(UnsealContext) / generate_temp_keypair = WaitingForUnsealKey(UnsealContext),
|
||||
WaitingForUnsealKey(UnsealContext) + ReceivedValidKey = Unsealed,
|
||||
WaitingForUnsealKey(UnsealContext) + ReceivedInvalidKey = Idle,
|
||||
}
|
||||
);
|
||||
|
||||
pub struct DummyContext;
|
||||
impl UserAgentStateMachineContext for DummyContext {
|
||||
#[allow(missing_docs)]
|
||||
#[allow(clippy::unused_unit)]
|
||||
fn move_challenge(
|
||||
&mut self,
|
||||
_state_data: &AuthRequestContext,
|
||||
event_data: ChallengeContext,
|
||||
) -> Result<ChallengeContext, ()> {
|
||||
Ok(event_data)
|
||||
}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
#[allow(clippy::unused_unit)]
|
||||
fn auth_request_context(
|
||||
&mut self,
|
||||
event_data: AuthRequestContext,
|
||||
) -> Result<AuthRequestContext, ()> {
|
||||
Ok(event_data)
|
||||
}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
#[allow(clippy::unused_unit)]
|
||||
fn generate_temp_keypair(&mut self, event_data: UnsealContext) -> Result<UnsealContext, ()> {
|
||||
Ok(event_data)
|
||||
}
|
||||
}
|
||||
199
server/crates/arbiter-server/src/actors/user_agent/tests.rs
Normal file
199
server/crates/arbiter-server/src/actors/user_agent/tests.rs
Normal file
@@ -0,0 +1,199 @@
|
||||
use arbiter_proto::proto::{
|
||||
UserAgentResponse,
|
||||
auth::{self, AuthChallengeRequest, AuthOk},
|
||||
user_agent_response::Payload as UserAgentResponsePayload,
|
||||
};
|
||||
use chrono::format;
|
||||
use diesel::{ExpressionMethods as _, QueryDsl, insert_into};
|
||||
use diesel_async::RunQueryDsl;
|
||||
use ed25519_dalek::Signer as _;
|
||||
use kameo::actor::Spawn;
|
||||
|
||||
use crate::{
|
||||
actors::{
|
||||
bootstrap::BootstrapActor,
|
||||
user_agent::{HandleAuthChallengeRequest, HandleAuthChallengeSolution},
|
||||
},
|
||||
db::{self, schema},
|
||||
};
|
||||
|
||||
use super::UserAgentActor;
|
||||
|
||||
#[tokio::test]
|
||||
#[test_log::test]
|
||||
pub async fn test_bootstrap_token_auth() {
|
||||
let db = db::create_test_pool().await;
|
||||
// explicitly not installing any user_agent pubkeys
|
||||
let bootstrapper = BootstrapActor::new(&db).await.unwrap(); // this will create bootstrap token
|
||||
let token = bootstrapper.get_token().unwrap();
|
||||
|
||||
let bootstrapper_ref = BootstrapActor::spawn(bootstrapper);
|
||||
let user_agent = UserAgentActor::new_manual(
|
||||
db.clone(),
|
||||
bootstrapper_ref,
|
||||
tokio::sync::mpsc::channel(1).0, // dummy channel, we won't actually send responses in this test
|
||||
);
|
||||
let user_agent_ref = UserAgentActor::spawn(user_agent);
|
||||
|
||||
// simulate client sending auth request with bootstrap token
|
||||
let new_key = ed25519_dalek::SigningKey::generate(&mut rand::rng());
|
||||
let pubkey_bytes = new_key.verifying_key().to_bytes().to_vec();
|
||||
|
||||
let result = user_agent_ref
|
||||
.ask(HandleAuthChallengeRequest {
|
||||
req: AuthChallengeRequest {
|
||||
pubkey: pubkey_bytes,
|
||||
bootstrap_token: Some(token),
|
||||
},
|
||||
})
|
||||
.await
|
||||
.expect("Shouldn't fail to send message");
|
||||
|
||||
// auth succeeded
|
||||
assert_eq!(
|
||||
result,
|
||||
UserAgentResponse {
|
||||
payload: Some(UserAgentResponsePayload::AuthMessage(
|
||||
arbiter_proto::proto::auth::ServerMessage {
|
||||
payload: Some(arbiter_proto::proto::auth::server_message::Payload::AuthOk(
|
||||
AuthOk {},
|
||||
)),
|
||||
},
|
||||
)),
|
||||
}
|
||||
);
|
||||
|
||||
// key is succesfully recorded in database
|
||||
let mut conn = db.get().await.unwrap();
|
||||
let stored_pubkey: Vec<u8> = schema::useragent_client::table
|
||||
.select(schema::useragent_client::public_key)
|
||||
.first::<Vec<u8>>(&mut conn)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(stored_pubkey, new_key.verifying_key().to_bytes().to_vec());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[test_log::test]
|
||||
pub async fn test_bootstrap_invalid_token_auth() {
|
||||
let db = db::create_test_pool().await;
|
||||
// explicitly not installing any user_agent pubkeys
|
||||
let bootstrapper = BootstrapActor::new(&db).await.unwrap(); // this will create bootstrap token
|
||||
|
||||
let bootstrapper_ref = BootstrapActor::spawn(bootstrapper);
|
||||
let user_agent = UserAgentActor::new_manual(
|
||||
db.clone(),
|
||||
bootstrapper_ref,
|
||||
tokio::sync::mpsc::channel(1).0, // dummy channel, we won't actually send responses in this test
|
||||
);
|
||||
let user_agent_ref = UserAgentActor::spawn(user_agent);
|
||||
|
||||
// simulate client sending auth request with bootstrap token
|
||||
let new_key = ed25519_dalek::SigningKey::generate(&mut rand::rng());
|
||||
let pubkey_bytes = new_key.verifying_key().to_bytes().to_vec();
|
||||
|
||||
let result = user_agent_ref
|
||||
.ask(HandleAuthChallengeRequest {
|
||||
req: AuthChallengeRequest {
|
||||
pubkey: pubkey_bytes,
|
||||
bootstrap_token: Some("invalid_token".to_string()),
|
||||
},
|
||||
})
|
||||
.await;
|
||||
|
||||
match result {
|
||||
Err(kameo::error::SendError::HandlerError(status)) => {
|
||||
assert_eq!(status.code(), tonic::Code::InvalidArgument);
|
||||
insta::assert_debug_snapshot!(status, @r#"
|
||||
Status {
|
||||
code: InvalidArgument,
|
||||
message: "Invalid bootstrap token",
|
||||
source: None,
|
||||
}
|
||||
"#);
|
||||
}
|
||||
Err(other) => {
|
||||
panic!("Expected SendError::HandlerError, got {other:?}");
|
||||
}
|
||||
Ok(_) => {
|
||||
panic!("Expected error due to invalid bootstrap token, but got success");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[test_log::test]
|
||||
pub async fn test_challenge_auth() {
|
||||
let db = db::create_test_pool().await;
|
||||
|
||||
let bootstrapper_ref = BootstrapActor::spawn(BootstrapActor::new(&db).await.unwrap());
|
||||
let user_agent = UserAgentActor::new_manual(
|
||||
db.clone(),
|
||||
bootstrapper_ref,
|
||||
tokio::sync::mpsc::channel(1).0, // dummy channel, we won't actually send responses in this test
|
||||
);
|
||||
let user_agent_ref = UserAgentActor::spawn(user_agent);
|
||||
|
||||
// simulate client sending auth request with bootstrap token
|
||||
let new_key = ed25519_dalek::SigningKey::generate(&mut rand::rng());
|
||||
let pubkey_bytes = new_key.verifying_key().to_bytes().to_vec();
|
||||
|
||||
// insert pubkey into database to trigger challenge-response auth flow
|
||||
{
|
||||
let mut conn = db.get().await.unwrap();
|
||||
insert_into(schema::useragent_client::table)
|
||||
.values(schema::useragent_client::public_key.eq(pubkey_bytes.clone()))
|
||||
.execute(&mut conn)
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
let result = user_agent_ref
|
||||
.ask(HandleAuthChallengeRequest {
|
||||
req: AuthChallengeRequest {
|
||||
pubkey: pubkey_bytes,
|
||||
bootstrap_token: None,
|
||||
},
|
||||
})
|
||||
.await
|
||||
.expect("Shouldn't fail to send message");
|
||||
|
||||
// auth challenge succeeded
|
||||
let UserAgentResponse {
|
||||
payload:
|
||||
Some(UserAgentResponsePayload::AuthMessage(arbiter_proto::proto::auth::ServerMessage {
|
||||
payload:
|
||||
Some(arbiter_proto::proto::auth::server_message::Payload::AuthChallenge(challenge)),
|
||||
})),
|
||||
} = result
|
||||
else {
|
||||
panic!("Expected auth challenge response, got {result:?}");
|
||||
};
|
||||
|
||||
let formatted_challenge = arbiter_proto::format_challenge(&challenge);
|
||||
let signature = new_key.sign(&formatted_challenge);
|
||||
let serialized_signature = signature.to_bytes().to_vec();
|
||||
|
||||
let result = user_agent_ref
|
||||
.ask(HandleAuthChallengeSolution {
|
||||
solution: auth::AuthChallengeSolution {
|
||||
signature: serialized_signature,
|
||||
},
|
||||
})
|
||||
.await
|
||||
.expect("Shouldn't fail to send message");
|
||||
|
||||
// auth succeeded
|
||||
assert_eq!(
|
||||
result,
|
||||
UserAgentResponse {
|
||||
payload: Some(UserAgentResponsePayload::AuthMessage(
|
||||
arbiter_proto::proto::auth::ServerMessage {
|
||||
payload: Some(arbiter_proto::proto::auth::server_message::Payload::AuthOk(
|
||||
AuthOk {},
|
||||
)),
|
||||
},
|
||||
)),
|
||||
}
|
||||
);
|
||||
}
|
||||
@@ -2,12 +2,9 @@ use super::UserAgentActor;
|
||||
use arbiter_proto::proto::{
|
||||
UserAgentRequest, UserAgentResponse,
|
||||
auth::{
|
||||
self, AuthChallenge, AuthChallengeRequest, AuthOk, ClientMessage,
|
||||
ServerMessage as AuthServerMessage, client_message::Payload as ClientAuthPayload,
|
||||
server_message::Payload as ServerAuthPayload,
|
||||
ClientMessage as ClientAuthMessage, client_message::Payload as ClientAuthPayload,
|
||||
},
|
||||
user_agent_request::Payload as UserAgentRequestPayload,
|
||||
user_agent_response::Payload as UserAgentResponsePayload,
|
||||
};
|
||||
use futures::StreamExt;
|
||||
use kameo::{
|
||||
@@ -19,7 +16,10 @@ use tonic::Status;
|
||||
use tracing::error;
|
||||
|
||||
use crate::{
|
||||
actors::user_agent::{HandleAuthChallengeRequest, HandleAuthChallengeSolution},
|
||||
actors::user_agent::{
|
||||
HandleAuthChallengeRequest, HandleAuthChallengeSolution, HandleUnsealEncryptedKey,
|
||||
HandleUnsealRequest,
|
||||
},
|
||||
context::ServerContext,
|
||||
};
|
||||
|
||||
@@ -59,28 +59,30 @@ async fn process_message(
|
||||
Status::invalid_argument("Expected message with payload")
|
||||
})?;
|
||||
|
||||
let UserAgentRequestPayload::AuthMessage(ClientMessage {
|
||||
payload: Some(client_message),
|
||||
}) = msg
|
||||
else {
|
||||
error!(
|
||||
actor = "useragent",
|
||||
"Received unexpected message type during authentication"
|
||||
);
|
||||
return Err(Status::invalid_argument(
|
||||
"Expected AuthMessage with ClientMessage payload",
|
||||
));
|
||||
};
|
||||
|
||||
match client_message {
|
||||
ClientAuthPayload::AuthChallengeRequest(req) => actor
|
||||
match msg {
|
||||
UserAgentRequestPayload::AuthMessage(ClientAuthMessage {
|
||||
payload: Some(ClientAuthPayload::AuthChallengeRequest(req)),
|
||||
}) => actor
|
||||
.ask(HandleAuthChallengeRequest { req })
|
||||
.await
|
||||
.map_err(into_status),
|
||||
ClientAuthPayload::AuthChallengeSolution(solution) => actor
|
||||
UserAgentRequestPayload::AuthMessage(ClientAuthMessage {
|
||||
payload: Some(ClientAuthPayload::AuthChallengeSolution(solution)),
|
||||
}) => actor
|
||||
.ask(HandleAuthChallengeSolution { solution })
|
||||
.await
|
||||
.map_err(into_status),
|
||||
UserAgentRequestPayload::UnsealStart(unseal_start) => actor
|
||||
.ask(HandleUnsealRequest { req: unseal_start })
|
||||
.await
|
||||
.map_err(into_status),
|
||||
UserAgentRequestPayload::UnsealEncryptedKey(unseal_encrypted_key) => actor
|
||||
.ask(HandleUnsealEncryptedKey {
|
||||
req: unseal_encrypted_key,
|
||||
})
|
||||
.await
|
||||
.map_err(into_status),
|
||||
_ => Err(Status::invalid_argument("Expected message with payload")),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@ use std::sync::Arc;
|
||||
|
||||
use diesel::OptionalExtension as _;
|
||||
use diesel_async::RunQueryDsl as _;
|
||||
use ed25519_dalek::VerifyingKey;
|
||||
use kameo::actor::{ActorRef, Spawn};
|
||||
use miette::Diagnostic;
|
||||
use rand::rngs::StdRng;
|
||||
@@ -11,21 +10,15 @@ use thiserror::Error;
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
use crate::{
|
||||
context::{
|
||||
bootstrap::{BootstrapActor, generate_token},
|
||||
lease::LeaseHandler,
|
||||
tls::{TlsDataRaw, TlsManager},
|
||||
},
|
||||
db::{
|
||||
actors::bootstrap::{self, BootstrapActor}, context::tls::{TlsDataRaw, TlsManager}, db::{
|
||||
self,
|
||||
models::ArbiterSetting,
|
||||
schema::{self, arbiter_settings},
|
||||
},
|
||||
schema::arbiter_settings,
|
||||
}
|
||||
};
|
||||
|
||||
pub(crate) mod bootstrap;
|
||||
pub(crate) mod lease;
|
||||
pub(crate) mod tls;
|
||||
|
||||
pub mod tls;
|
||||
|
||||
#[derive(Error, Debug, Diagnostic)]
|
||||
pub enum InitError {
|
||||
@@ -78,7 +71,7 @@ impl ServerStateMachineContext for _Context {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct _ServerContextInner {
|
||||
pub struct _ServerContextInner {
|
||||
pub db: db::DatabasePool,
|
||||
pub state: RwLock<ServerStateMachine<_Context>>,
|
||||
pub rng: StdRng,
|
||||
@@ -86,7 +79,7 @@ pub(crate) struct _ServerContextInner {
|
||||
pub bootstrapper: ActorRef<BootstrapActor>,
|
||||
}
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct ServerContext(Arc<_ServerContextInner>);
|
||||
pub struct ServerContext(Arc<_ServerContextInner>);
|
||||
|
||||
impl std::ops::Deref for ServerContext {
|
||||
type Target = _ServerContextInner;
|
||||
|
||||
@@ -1,41 +0,0 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use dashmap::DashSet;
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
struct LeaseStorage<T: Eq + std::hash::Hash>(Arc<DashSet<T>>);
|
||||
|
||||
// A lease that automatically releases the item when dropped
|
||||
pub struct Lease<T: Clone + std::hash::Hash + Eq> {
|
||||
item: T,
|
||||
storage: LeaseStorage<T>,
|
||||
}
|
||||
impl<T: Clone + std::hash::Hash + Eq> Drop for Lease<T> {
|
||||
fn drop(&mut self) {
|
||||
self.storage.0.remove(&self.item);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct LeaseHandler<T: Clone + std::hash::Hash + Eq> {
|
||||
storage: LeaseStorage<T>,
|
||||
}
|
||||
|
||||
impl<T: Clone + std::hash::Hash + Eq> LeaseHandler<T> {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
storage: LeaseStorage(Arc::new(DashSet::new())),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn acquire(&self, item: T) -> Result<Lease<T>, ()> {
|
||||
if self.storage.0.insert(item.clone()) {
|
||||
Ok(Lease {
|
||||
item,
|
||||
storage: self.storage.clone(),
|
||||
})
|
||||
} else {
|
||||
Err(())
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -60,7 +60,7 @@ fn generate_cert(key: &KeyPair) -> Result<Certificate, rcgen::Error> {
|
||||
}
|
||||
|
||||
// TODO: Implement cert rotation
|
||||
pub(crate) struct TlsManager {
|
||||
pub struct TlsManager {
|
||||
data: TlsData,
|
||||
}
|
||||
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use diesel::{
|
||||
Connection as _, SqliteConnection,
|
||||
connection::{SimpleConnection as _, TransactionManager},
|
||||
connection::SimpleConnection as _,
|
||||
};
|
||||
use diesel_async::{
|
||||
AsyncConnection, SimpleAsyncConnection,
|
||||
pooled_connection::{AsyncDieselConnectionManager, ManagerConfig, RecyclingMethod},
|
||||
pooled_connection::{AsyncDieselConnectionManager, ManagerConfig},
|
||||
sync_connection_wrapper::SyncConnectionWrapper,
|
||||
};
|
||||
use diesel_migrations::{EmbeddedMigrations, MigrationHarness, embed_migrations};
|
||||
|
||||
@@ -1,29 +1,56 @@
|
||||
#![allow(unused)]
|
||||
#![allow(clippy::all)]
|
||||
|
||||
use crate::db::schema::{self, aead_encrypted, arbiter_settings};
|
||||
use crate::db::schema::{self, aead_encrypted, arbiter_settings, root_key_history};
|
||||
use diesel::{prelude::*, sqlite::Sqlite};
|
||||
use restructed::Models;
|
||||
|
||||
pub mod types {
|
||||
use chrono::{DateTime, Utc};
|
||||
pub struct SqliteTimestamp(DateTime<Utc>);
|
||||
}
|
||||
|
||||
#[derive(Queryable, Debug, Insertable)]
|
||||
#[derive(Models, Queryable, Debug, Insertable, Selectable)]
|
||||
#[view(
|
||||
NewAeadEncrypted,
|
||||
derive(Insertable),
|
||||
omit(id),
|
||||
attributes_with = "deriveless"
|
||||
)]
|
||||
#[diesel(table_name = aead_encrypted, check_for_backend(Sqlite))]
|
||||
pub struct AeadEncrypted {
|
||||
pub id: i32,
|
||||
pub ciphertext: Vec<u8>,
|
||||
pub tag: Vec<u8>,
|
||||
pub current_nonce: i32,
|
||||
pub current_nonce: Vec<u8>,
|
||||
pub schema_version: i32,
|
||||
pub associated_root_key_id: i32, // references root_key_history.id
|
||||
pub created_at: i32,
|
||||
}
|
||||
|
||||
#[derive(Models, Queryable, Debug, Insertable, Selectable)]
|
||||
#[diesel(table_name = root_key_history, check_for_backend(Sqlite))]
|
||||
#[view(
|
||||
NewRootKeyHistory,
|
||||
derive(Insertable),
|
||||
omit(id),
|
||||
attributes_with = "deriveless"
|
||||
)]
|
||||
pub struct RootKeyHistory {
|
||||
pub id: i32,
|
||||
pub ciphertext: Vec<u8>,
|
||||
pub tag: Vec<u8>,
|
||||
pub root_key_encryption_nonce: Vec<u8>,
|
||||
pub data_encryption_nonce: Vec<u8>,
|
||||
pub schema_version: i32,
|
||||
pub salt: Vec<u8>,
|
||||
}
|
||||
|
||||
#[derive(Queryable, Debug, Insertable)]
|
||||
#[diesel(table_name = arbiter_settings, check_for_backend(Sqlite))]
|
||||
pub struct ArbiterSetting {
|
||||
pub id: i32,
|
||||
pub root_key_id: Option<i32>, // references aead_encrypted.id
|
||||
pub root_key_id: Option<i32>, // references root_key_history.id
|
||||
pub cert_key: Vec<u8>,
|
||||
pub cert: Vec<u8>,
|
||||
}
|
||||
|
||||
@@ -3,10 +3,12 @@
|
||||
diesel::table! {
|
||||
aead_encrypted (id) {
|
||||
id -> Integer,
|
||||
current_nonce -> Integer,
|
||||
current_nonce -> Binary,
|
||||
ciphertext -> Binary,
|
||||
tag -> Binary,
|
||||
schema_version -> Integer,
|
||||
associated_root_key_id -> Integer,
|
||||
created_at -> Integer,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,6 +31,18 @@ diesel::table! {
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
root_key_history (id) {
|
||||
id -> Integer,
|
||||
root_key_encryption_nonce -> Binary,
|
||||
data_encryption_nonce -> Binary,
|
||||
ciphertext -> Binary,
|
||||
tag -> Binary,
|
||||
schema_version -> Integer,
|
||||
salt -> Binary,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
useragent_client (id) {
|
||||
id -> Integer,
|
||||
@@ -39,11 +53,13 @@ diesel::table! {
|
||||
}
|
||||
}
|
||||
|
||||
diesel::joinable!(arbiter_settings -> aead_encrypted (root_key_id));
|
||||
diesel::joinable!(aead_encrypted -> root_key_history (associated_root_key_id));
|
||||
diesel::joinable!(arbiter_settings -> root_key_history (root_key_id));
|
||||
|
||||
diesel::allow_tables_to_appear_in_same_query!(
|
||||
aead_encrypted,
|
||||
arbiter_settings,
|
||||
program_client,
|
||||
root_key_history,
|
||||
useragent_client,
|
||||
);
|
||||
|
||||
@@ -1,7 +1,4 @@
|
||||
#![allow(unused)]
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
#![forbid(unsafe_code)]
|
||||
use arbiter_proto::{
|
||||
proto::{ClientRequest, ClientResponse, UserAgentRequest, UserAgentResponse},
|
||||
transport::BiStream,
|
||||
@@ -18,8 +15,8 @@ use crate::{
|
||||
};
|
||||
|
||||
pub mod actors;
|
||||
mod context;
|
||||
mod db;
|
||||
pub mod context;
|
||||
pub mod db;
|
||||
mod errors;
|
||||
|
||||
const DEFAULT_CHANNEL_SIZE: usize = 1000;
|
||||
@@ -28,6 +25,12 @@ pub struct Server {
|
||||
context: ServerContext,
|
||||
}
|
||||
|
||||
impl Server {
|
||||
pub fn new(context: ServerContext) -> Self {
|
||||
Self { context }
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl arbiter_proto::proto::arbiter_service_server::ArbiterService for Server {
|
||||
type UserAgentStream = ReceiverStream<Result<UserAgentResponse, Status>>;
|
||||
|
||||
34
server/crates/arbiter-server/src/main.rs
Normal file
34
server/crates/arbiter-server/src/main.rs
Normal file
@@ -0,0 +1,34 @@
|
||||
use arbiter_proto::proto::arbiter_service_server::ArbiterServiceServer;
|
||||
use arbiter_server::{Server, context::ServerContext, db};
|
||||
use tracing::info;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> miette::Result<()> {
|
||||
tracing_subscriber::fmt()
|
||||
.with_env_filter(
|
||||
tracing_subscriber::EnvFilter::try_from_default_env()
|
||||
.unwrap_or_else(|_| tracing_subscriber::EnvFilter::new("info")),
|
||||
)
|
||||
.init();
|
||||
|
||||
info!("Starting arbiter server");
|
||||
|
||||
info!("Initializing database");
|
||||
let db = db::create_pool(None).await?;
|
||||
info!("Database ready");
|
||||
|
||||
info!("Initializing server context");
|
||||
let context = ServerContext::new(db).await?;
|
||||
info!("Server context ready");
|
||||
|
||||
let addr = "[::1]:50051".parse().expect("valid address");
|
||||
info!(%addr, "Starting gRPC server");
|
||||
|
||||
tonic::transport::Server::builder()
|
||||
.add_service(ArbiterServiceServer::new(Server::new(context)))
|
||||
.serve(addr)
|
||||
.await
|
||||
.map_err(|e| miette::miette!("gRPC server error: {e}"))?;
|
||||
|
||||
unreachable!("gRPC server should run indefinitely");
|
||||
}
|
||||
@@ -1,6 +1,11 @@
|
||||
|
||||
# cargo-vet audits file
|
||||
|
||||
[[audits.similar]]
|
||||
who = "hdbg <httpdebugger@protonmail.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
version = "2.2.1"
|
||||
|
||||
[[audits.test-log]]
|
||||
who = "hdbg <httpdebugger@protonmail.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
@@ -11,6 +16,12 @@ who = "hdbg <httpdebugger@protonmail.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.2.18 -> 0.2.19"
|
||||
|
||||
[[trusted.cc]]
|
||||
criteria = "safe-to-deploy"
|
||||
user-id = 55123 # rust-lang-owner
|
||||
start = "2022-10-29"
|
||||
end = "2027-02-16"
|
||||
|
||||
[[trusted.h2]]
|
||||
criteria = "safe-to-deploy"
|
||||
user-id = 359 # Sean McArthur (seanmonstar)
|
||||
@@ -29,6 +40,12 @@ user-id = 359 # Sean McArthur (seanmonstar)
|
||||
start = "2022-01-15"
|
||||
end = "2027-02-14"
|
||||
|
||||
[[trusted.libc]]
|
||||
criteria = "safe-to-deploy"
|
||||
user-id = 55123 # rust-lang-owner
|
||||
start = "2024-08-15"
|
||||
end = "2027-02-16"
|
||||
|
||||
[[trusted.rustix]]
|
||||
criteria = "safe-to-deploy"
|
||||
user-id = 6825 # Dan Gohman (sunfishcode)
|
||||
@@ -46,3 +63,33 @@ criteria = "safe-to-deploy"
|
||||
user-id = 3618 # David Tolnay (dtolnay)
|
||||
start = "2019-03-01"
|
||||
end = "2027-02-14"
|
||||
|
||||
[[trusted.thread_local]]
|
||||
criteria = "safe-to-deploy"
|
||||
user-id = 2915 # Amanieu d'Antras (Amanieu)
|
||||
start = "2019-09-07"
|
||||
end = "2027-02-16"
|
||||
|
||||
[[trusted.toml]]
|
||||
criteria = "safe-to-deploy"
|
||||
user-id = 6743 # Ed Page (epage)
|
||||
start = "2022-12-14"
|
||||
end = "2027-02-16"
|
||||
|
||||
[[trusted.toml_parser]]
|
||||
criteria = "safe-to-deploy"
|
||||
user-id = 6743 # Ed Page (epage)
|
||||
start = "2025-07-08"
|
||||
end = "2027-02-16"
|
||||
|
||||
[[trusted.tonic-build]]
|
||||
criteria = "safe-to-deploy"
|
||||
user-id = 10
|
||||
start = "2019-09-10"
|
||||
end = "2027-02-16"
|
||||
|
||||
[[trusted.windows-sys]]
|
||||
criteria = "safe-to-deploy"
|
||||
user-id = 64539 # Kenny Kerr (kennykerr)
|
||||
start = "2021-11-15"
|
||||
end = "2027-02-16"
|
||||
|
||||
@@ -13,6 +13,9 @@ url = "https://raw.githubusercontent.com/google/supply-chain/main/audits.toml"
|
||||
[imports.mozilla]
|
||||
url = "https://raw.githubusercontent.com/mozilla/supply-chain/main/audits.toml"
|
||||
|
||||
[imports.zcash]
|
||||
url = "https://raw.githubusercontent.com/zcash/rust-ecosystem/main/supply-chain/audits.toml"
|
||||
|
||||
[[exemptions.addr2line]]
|
||||
version = "0.25.1"
|
||||
criteria = "safe-to-deploy"
|
||||
@@ -41,10 +44,6 @@ criteria = "safe-to-deploy"
|
||||
version = "0.1.89"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.autocfg]]
|
||||
version = "1.5.0"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.aws-lc-rs]]
|
||||
version = "1.15.4"
|
||||
criteria = "safe-to-deploy"
|
||||
@@ -193,10 +192,6 @@ criteria = "safe-to-deploy"
|
||||
version = "0.2.0"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.dunce]]
|
||||
version = "1.0.5"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.dyn-clone]]
|
||||
version = "1.0.20"
|
||||
criteria = "safe-to-deploy"
|
||||
@@ -209,10 +204,6 @@ criteria = "safe-to-deploy"
|
||||
version = "3.0.0-pre.6"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.errno]]
|
||||
version = "0.3.14"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.fiat-crypto]]
|
||||
version = "0.3.0"
|
||||
criteria = "safe-to-deploy"
|
||||
@@ -261,10 +252,6 @@ criteria = "safe-to-deploy"
|
||||
version = "1.4.0"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.http-body]]
|
||||
version = "1.0.1"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.http-body-util]]
|
||||
version = "0.1.3"
|
||||
criteria = "safe-to-deploy"
|
||||
@@ -329,10 +316,6 @@ criteria = "safe-to-deploy"
|
||||
version = "0.19.0"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.libc]]
|
||||
version = "0.2.181"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.libsqlite3-sys]]
|
||||
version = "0.35.0"
|
||||
criteria = "safe-to-deploy"
|
||||
@@ -525,10 +508,6 @@ criteria = "safe-to-deploy"
|
||||
version = "0.1.27"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.rustc_version]]
|
||||
version = "0.4.1"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.rusticata-macros]]
|
||||
version = "4.1.0"
|
||||
criteria = "safe-to-deploy"
|
||||
@@ -545,10 +524,6 @@ criteria = "safe-to-deploy"
|
||||
version = "0.103.9"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.rustversion]]
|
||||
version = "1.0.22"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.scoped-futures]]
|
||||
version = "0.1.4"
|
||||
criteria = "safe-to-deploy"
|
||||
@@ -653,10 +628,6 @@ criteria = "safe-to-deploy"
|
||||
version = "2.0.18"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.thread_local]]
|
||||
version = "1.1.9"
|
||||
criteria = "safe-to-run"
|
||||
|
||||
[[exemptions.time]]
|
||||
version = "0.3.47"
|
||||
criteria = "safe-to-deploy"
|
||||
@@ -689,14 +660,6 @@ criteria = "safe-to-deploy"
|
||||
version = "0.7.18"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.toml]]
|
||||
version = "0.9.11+spec-1.1.0"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.toml_parser]]
|
||||
version = "1.0.6+spec-1.1.0"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.tonic]]
|
||||
version = "0.14.3"
|
||||
criteria = "safe-to-deploy"
|
||||
@@ -741,10 +704,6 @@ criteria = "safe-to-deploy"
|
||||
version = "0.3.22"
|
||||
criteria = "safe-to-run"
|
||||
|
||||
[[exemptions.try-lock]]
|
||||
version = "0.2.5"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.typenum]]
|
||||
version = "1.19.0"
|
||||
criteria = "safe-to-deploy"
|
||||
@@ -769,10 +728,6 @@ criteria = "safe-to-deploy"
|
||||
version = "1.20.0"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.want]]
|
||||
version = "0.3.1"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.wasi]]
|
||||
version = "0.11.1+wasi-snapshot-preview1"
|
||||
criteria = "safe-to-deploy"
|
||||
@@ -817,10 +772,6 @@ criteria = "safe-to-deploy"
|
||||
version = "0.59.3"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.windows-link]]
|
||||
version = "0.2.1"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.windows-result]]
|
||||
version = "0.4.1"
|
||||
criteria = "safe-to-deploy"
|
||||
@@ -829,18 +780,6 @@ criteria = "safe-to-deploy"
|
||||
version = "0.5.1"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.windows-sys]]
|
||||
version = "0.52.0"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.windows-sys]]
|
||||
version = "0.60.2"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.windows-sys]]
|
||||
version = "0.61.2"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.windows-targets]]
|
||||
version = "0.52.6"
|
||||
criteria = "safe-to-deploy"
|
||||
@@ -925,10 +864,6 @@ criteria = "safe-to-deploy"
|
||||
version = "0.5.2"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.zeroize]]
|
||||
version = "1.8.2"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
[[exemptions.zmij]]
|
||||
version = "1.0.20"
|
||||
criteria = "safe-to-deploy"
|
||||
|
||||
@@ -41,6 +41,12 @@ user-id = 359
|
||||
user-login = "seanmonstar"
|
||||
user-name = "Sean McArthur"
|
||||
|
||||
[[publisher.libc]]
|
||||
version = "0.2.182"
|
||||
when = "2026-02-13"
|
||||
user-id = 55123
|
||||
user-login = "rust-lang-owner"
|
||||
|
||||
[[publisher.rustix]]
|
||||
version = "1.1.3"
|
||||
when = "2025-12-23"
|
||||
@@ -63,12 +69,33 @@ user-login = "dtolnay"
|
||||
user-name = "David Tolnay"
|
||||
|
||||
[[publisher.syn]]
|
||||
version = "2.0.114"
|
||||
when = "2026-01-07"
|
||||
version = "2.0.115"
|
||||
when = "2026-02-12"
|
||||
user-id = 3618
|
||||
user-login = "dtolnay"
|
||||
user-name = "David Tolnay"
|
||||
|
||||
[[publisher.thread_local]]
|
||||
version = "1.1.9"
|
||||
when = "2025-06-12"
|
||||
user-id = 2915
|
||||
user-login = "Amanieu"
|
||||
user-name = "Amanieu d'Antras"
|
||||
|
||||
[[publisher.toml]]
|
||||
version = "0.9.12+spec-1.1.0"
|
||||
when = "2026-02-10"
|
||||
user-id = 6743
|
||||
user-login = "epage"
|
||||
user-name = "Ed Page"
|
||||
|
||||
[[publisher.toml_parser]]
|
||||
version = "1.0.8+spec-1.1.0"
|
||||
when = "2026-02-12"
|
||||
user-id = 6743
|
||||
user-login = "epage"
|
||||
user-name = "Ed Page"
|
||||
|
||||
[[publisher.unicode-width]]
|
||||
version = "0.1.14"
|
||||
when = "2024-09-19"
|
||||
@@ -120,6 +147,34 @@ version = "0.244.0"
|
||||
when = "2026-01-06"
|
||||
trusted-publisher = "github:bytecodealliance/wasm-tools"
|
||||
|
||||
[[publisher.windows-sys]]
|
||||
version = "0.52.0"
|
||||
when = "2023-11-15"
|
||||
user-id = 64539
|
||||
user-login = "kennykerr"
|
||||
user-name = "Kenny Kerr"
|
||||
|
||||
[[publisher.windows-sys]]
|
||||
version = "0.59.0"
|
||||
when = "2024-07-30"
|
||||
user-id = 64539
|
||||
user-login = "kennykerr"
|
||||
user-name = "Kenny Kerr"
|
||||
|
||||
[[publisher.windows-sys]]
|
||||
version = "0.60.2"
|
||||
when = "2025-06-12"
|
||||
user-id = 64539
|
||||
user-login = "kennykerr"
|
||||
user-name = "Kenny Kerr"
|
||||
|
||||
[[publisher.windows-sys]]
|
||||
version = "0.61.2"
|
||||
when = "2025-10-06"
|
||||
user-id = 64539
|
||||
user-login = "kennykerr"
|
||||
user-name = "Kenny Kerr"
|
||||
|
||||
[[publisher.wit-bindgen]]
|
||||
version = "0.51.0"
|
||||
when = "2026-01-12"
|
||||
@@ -265,6 +320,12 @@ criteria = "safe-to-deploy"
|
||||
version = "1.1.2"
|
||||
notes = "Contains `unsafe` code but it's well-documented and scoped to what it's intended to be doing. Otherwise a well-focused and straightforward crate."
|
||||
|
||||
[[audits.bytecode-alliance.audits.cipher]]
|
||||
who = "Andrew Brown <andrew.brown@intel.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
version = "0.4.4"
|
||||
notes = "Most unsafe is hidden by `inout` dependency; only remaining unsafe is raw-splitting a slice and an unreachable hint. Older versions of this regularly reach ~150k daily downloads."
|
||||
|
||||
[[audits.bytecode-alliance.audits.core-foundation-sys]]
|
||||
who = "Dan Gohman <dev@sunfishcode.online>"
|
||||
criteria = "safe-to-deploy"
|
||||
@@ -279,6 +340,23 @@ who = "Nick Fitzgerald <fitzgen@gmail.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.2.4 -> 0.2.5"
|
||||
|
||||
[[audits.bytecode-alliance.audits.errno]]
|
||||
who = "Dan Gohman <dev@sunfishcode.online>"
|
||||
criteria = "safe-to-deploy"
|
||||
version = "0.3.0"
|
||||
notes = "This crate uses libc and windows-sys APIs to get and set the raw OS error value."
|
||||
|
||||
[[audits.bytecode-alliance.audits.errno]]
|
||||
who = "Dan Gohman <dev@sunfishcode.online>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.3.0 -> 0.3.1"
|
||||
notes = "Just a dependency version bump and a bug fix for redox"
|
||||
|
||||
[[audits.bytecode-alliance.audits.errno]]
|
||||
who = "Dan Gohman <dev@sunfishcode.online>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.3.9 -> 0.3.10"
|
||||
|
||||
[[audits.bytecode-alliance.audits.fastrand]]
|
||||
who = "Alex Crichton <alex@alexcrichton.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
@@ -385,11 +463,28 @@ criteria = "safe-to-deploy"
|
||||
delta = "0.4.1 -> 0.5.0"
|
||||
notes = "Minor changes for a `no_std` upgrade but otherwise everything looks as expected."
|
||||
|
||||
[[audits.bytecode-alliance.audits.http-body]]
|
||||
who = "Pat Hickey <phickey@fastly.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
version = "1.0.0-rc.2"
|
||||
|
||||
[[audits.bytecode-alliance.audits.http-body]]
|
||||
who = "Alex Crichton <alex@alexcrichton.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "1.0.0-rc.2 -> 1.0.0"
|
||||
notes = "Only minor changes made for a stable release."
|
||||
|
||||
[[audits.bytecode-alliance.audits.iana-time-zone-haiku]]
|
||||
who = "Dan Gohman <dev@sunfishcode.online>"
|
||||
criteria = "safe-to-deploy"
|
||||
version = "0.1.2"
|
||||
|
||||
[[audits.bytecode-alliance.audits.inout]]
|
||||
who = "Andrew Brown <andrew.brown@intel.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
version = "0.1.3"
|
||||
notes = "A part of RustCrypto/utils, this crate is designed to handle unsafe buffers and carefully documents the safety concerns throughout. Older versions of this tally up to ~130k daily downloads."
|
||||
|
||||
[[audits.bytecode-alliance.audits.leb128fmt]]
|
||||
who = "Alex Crichton <alex@alexcrichton.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
@@ -443,6 +538,24 @@ criteria = "safe-to-deploy"
|
||||
delta = "0.8.5 -> 0.8.9"
|
||||
notes = "No new unsafe code, just refactorings."
|
||||
|
||||
[[audits.bytecode-alliance.audits.nu-ansi-term]]
|
||||
who = "Pat Hickey <phickey@fastly.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
version = "0.46.0"
|
||||
notes = "one use of unsafe to call windows specific api to get console handle."
|
||||
|
||||
[[audits.bytecode-alliance.audits.nu-ansi-term]]
|
||||
who = "Alex Crichton <alex@alexcrichton.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.46.0 -> 0.50.1"
|
||||
notes = "Lots of stylistic/rust-related chanegs, plus new features, but nothing out of the ordrinary."
|
||||
|
||||
[[audits.bytecode-alliance.audits.nu-ansi-term]]
|
||||
who = "Alex Crichton <alex@alexcrichton.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.50.1 -> 0.50.3"
|
||||
notes = "CI changes, Rust changes, nothing out of the ordinary."
|
||||
|
||||
[[audits.bytecode-alliance.audits.num-traits]]
|
||||
who = "Andrew Brown <andrew.brown@intel.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
@@ -537,12 +650,38 @@ criteria = "safe-to-run"
|
||||
delta = "0.2.16 -> 0.2.18"
|
||||
notes = "Standard macro changes, nothing out of place"
|
||||
|
||||
[[audits.bytecode-alliance.audits.tracing-log]]
|
||||
who = "Alex Crichton <alex@alexcrichton.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
version = "0.1.3"
|
||||
notes = """
|
||||
This is a standard adapter between the `log` ecosystem and the `tracing`
|
||||
ecosystem. There's one `unsafe` block in this crate and it's well-scoped.
|
||||
"""
|
||||
|
||||
[[audits.bytecode-alliance.audits.tracing-log]]
|
||||
who = "Alex Crichton <alex@alexcrichton.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.1.3 -> 0.2.0"
|
||||
notes = "Nothing out of the ordinary, a typical major version update and nothing awry."
|
||||
|
||||
[[audits.bytecode-alliance.audits.try-lock]]
|
||||
who = "Pat Hickey <phickey@fastly.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
version = "0.2.4"
|
||||
notes = "Implements a concurrency primitive with atomics, and is not obviously incorrect"
|
||||
|
||||
[[audits.bytecode-alliance.audits.vcpkg]]
|
||||
who = "Pat Hickey <phickey@fastly.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
version = "0.2.15"
|
||||
notes = "no build.rs, no macros, no unsafe. It reads the filesystem and makes copies of DLLs into OUT_DIR."
|
||||
|
||||
[[audits.bytecode-alliance.audits.want]]
|
||||
who = "Pat Hickey <phickey@fastly.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
version = "0.3.0"
|
||||
|
||||
[[audits.bytecode-alliance.audits.wasm-metadata]]
|
||||
who = "Alex Crichton <alex@alexcrichton.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
@@ -591,6 +730,13 @@ criteria = "safe-to-deploy"
|
||||
delta = "0.243.0 -> 0.244.0"
|
||||
notes = "The Bytecode Alliance is the author of this crate"
|
||||
|
||||
[[audits.google.audits.autocfg]]
|
||||
who = "Manish Goregaokar <manishearth@google.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
version = "1.4.0"
|
||||
notes = "Contains no unsafe"
|
||||
aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT"
|
||||
|
||||
[[audits.google.audits.base64]]
|
||||
who = "amarjotgill <amarjotgill@google.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
@@ -719,6 +865,89 @@ delta = "0.2.9 -> 0.2.13"
|
||||
notes = "Audited at https://fxrev.dev/946396"
|
||||
aggregated-from = "https://fuchsia.googlesource.com/fuchsia/+/refs/heads/main/third_party/rust_crates/supply-chain/audits.toml?format=TEXT"
|
||||
|
||||
[[audits.google.audits.proc-macro-error-attr]]
|
||||
who = "George Burgess IV <gbiv@google.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
version = "1.0.4"
|
||||
aggregated-from = "https://chromium.googlesource.com/chromiumos/third_party/rust_crates/+/refs/heads/main/cargo-vet/audits.toml?format=TEXT"
|
||||
|
||||
[[audits.google.audits.rand_core]]
|
||||
who = "Lukasz Anforowicz <lukasza@chromium.org>"
|
||||
criteria = "safe-to-deploy"
|
||||
version = "0.6.4"
|
||||
notes = """
|
||||
For more detailed unsafe review notes please see https://crrev.com/c/6362797
|
||||
"""
|
||||
aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT"
|
||||
|
||||
[[audits.google.audits.rustversion]]
|
||||
who = "Lukasz Anforowicz <lukasza@chromium.org>"
|
||||
criteria = "safe-to-deploy"
|
||||
version = "1.0.14"
|
||||
notes = """
|
||||
Grepped for `-i cipher`, `-i crypto`, `'\bfs\b'``, `'\bnet\b'``, `'\bunsafe\b'``
|
||||
and there were no hits except for:
|
||||
|
||||
* Using trivially-safe `unsafe` in test code:
|
||||
|
||||
```
|
||||
tests/test_const.rs:unsafe fn _unsafe() {}
|
||||
tests/test_const.rs:const _UNSAFE: () = unsafe { _unsafe() };
|
||||
```
|
||||
|
||||
* Using `unsafe` in a string:
|
||||
|
||||
```
|
||||
src/constfn.rs: "unsafe" => Qualifiers::Unsafe,
|
||||
```
|
||||
|
||||
* Using `std::fs` in `build/build.rs` to write `${OUT_DIR}/version.expr`
|
||||
which is later read back via `include!` used in `src/lib.rs`.
|
||||
|
||||
Version `1.0.6` of this crate has been added to Chromium in
|
||||
https://source.chromium.org/chromium/chromium/src/+/28841c33c77833cc30b286f9ae24c97e7a8f4057
|
||||
"""
|
||||
aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT"
|
||||
|
||||
[[audits.google.audits.rustversion]]
|
||||
who = "Adrian Taylor <adetaylor@chromium.org>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "1.0.14 -> 1.0.15"
|
||||
aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT"
|
||||
|
||||
[[audits.google.audits.rustversion]]
|
||||
who = "danakj <danakj@chromium.org>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "1.0.15 -> 1.0.16"
|
||||
aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT"
|
||||
|
||||
[[audits.google.audits.rustversion]]
|
||||
who = "Dustin J. Mitchell <djmitche@chromium.org>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "1.0.16 -> 1.0.17"
|
||||
notes = "Just updates windows compat"
|
||||
aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT"
|
||||
|
||||
[[audits.google.audits.rustversion]]
|
||||
who = "Liza Burakova <liza@chromium.org>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "1.0.17 -> 1.0.18"
|
||||
aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT"
|
||||
|
||||
[[audits.google.audits.rustversion]]
|
||||
who = "Dustin J. Mitchell <djmitche@chromium.org>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "1.0.18 -> 1.0.19"
|
||||
notes = "No unsafe, just doc changes"
|
||||
aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT"
|
||||
|
||||
[[audits.google.audits.rustversion]]
|
||||
who = "Daniel Cheng <dcheng@chromium.org>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "1.0.19 -> 1.0.20"
|
||||
notes = "Only minor updates to documentation and the mock today used for testing."
|
||||
aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT"
|
||||
|
||||
[[audits.google.audits.smallvec]]
|
||||
who = "Manish Goregaokar <manishearth@google.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
@@ -736,6 +965,28 @@ Previously reviewed during security review and the audit is grandparented in.
|
||||
"""
|
||||
aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT"
|
||||
|
||||
[[audits.google.audits.strum]]
|
||||
who = "danakj@chromium.org"
|
||||
criteria = "safe-to-deploy"
|
||||
version = "0.25.0"
|
||||
notes = """
|
||||
Reviewed in https://crrev.com/c/5171063
|
||||
|
||||
Previously reviewed during security review and the audit is grandparented in.
|
||||
"""
|
||||
aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT"
|
||||
|
||||
[[audits.google.audits.strum_macros]]
|
||||
who = "danakj@chromium.org"
|
||||
criteria = "safe-to-deploy"
|
||||
version = "0.25.3"
|
||||
notes = """
|
||||
Reviewed in https://crrev.com/c/5171063
|
||||
|
||||
Previously reviewed during security review and the audit is grandparented in.
|
||||
"""
|
||||
aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT"
|
||||
|
||||
[[audits.mozilla.wildcard-audits.core-foundation-sys]]
|
||||
who = "Bobby Holley <bobbyholley@gmail.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
@@ -812,6 +1063,12 @@ criteria = "safe-to-deploy"
|
||||
delta = "0.2.3 -> 0.2.4"
|
||||
aggregated-from = "https://hg.mozilla.org/mozilla-central/raw-file/tip/supply-chain/audits.toml"
|
||||
|
||||
[[audits.mozilla.audits.errno]]
|
||||
who = "Mike Hommey <mh+mozilla@glandium.org>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.3.1 -> 0.3.3"
|
||||
aggregated-from = "https://hg.mozilla.org/mozilla-central/raw-file/tip/supply-chain/audits.toml"
|
||||
|
||||
[[audits.mozilla.audits.fastrand]]
|
||||
who = "Mike Hommey <mh+mozilla@glandium.org>"
|
||||
criteria = "safe-to-deploy"
|
||||
@@ -929,6 +1186,16 @@ yet, but it's all valid. Otherwise it's a pretty simple crate.
|
||||
"""
|
||||
aggregated-from = "https://hg.mozilla.org/mozilla-central/raw-file/tip/supply-chain/audits.toml"
|
||||
|
||||
[[audits.mozilla.audits.rustc_version]]
|
||||
who = "Nika Layzell <nika@thelayzells.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
version = "0.4.0"
|
||||
notes = """
|
||||
Use of powerful capabilities is limited to invoking `rustc -vV` to get version
|
||||
information for parsing version information.
|
||||
"""
|
||||
aggregated-from = "https://raw.githubusercontent.com/mozilla/cargo-vet/main/supply-chain/audits.toml"
|
||||
|
||||
[[audits.mozilla.audits.serde_spanned]]
|
||||
who = "Ben Dean-Kawamura <bdk@mozilla.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
@@ -955,6 +1222,12 @@ criteria = "safe-to-deploy"
|
||||
delta = "1.1.0 -> 1.3.0"
|
||||
aggregated-from = "https://hg.mozilla.org/mozilla-central/raw-file/tip/supply-chain/audits.toml"
|
||||
|
||||
[[audits.mozilla.audits.similar]]
|
||||
who = "Nika Layzell <nika@thelayzells.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "2.2.1 -> 2.7.0"
|
||||
aggregated-from = "https://raw.githubusercontent.com/mozilla/cargo-vet/main/supply-chain/audits.toml"
|
||||
|
||||
[[audits.mozilla.audits.smallvec]]
|
||||
who = "Erich Gubler <erichdongubler@gmail.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
@@ -967,6 +1240,30 @@ criteria = "safe-to-deploy"
|
||||
delta = "0.10.0 -> 0.11.1"
|
||||
aggregated-from = "https://hg.mozilla.org/mozilla-central/raw-file/tip/supply-chain/audits.toml"
|
||||
|
||||
[[audits.mozilla.audits.strum]]
|
||||
who = "Teodor Tanasoaia <ttanasoaia@mozilla.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.25.0 -> 0.26.3"
|
||||
aggregated-from = "https://hg.mozilla.org/mozilla-central/raw-file/tip/supply-chain/audits.toml"
|
||||
|
||||
[[audits.mozilla.audits.strum]]
|
||||
who = "Erich Gubler <erichdongubler@gmail.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.26.3 -> 0.27.1"
|
||||
aggregated-from = "https://hg.mozilla.org/mozilla-central/raw-file/tip/supply-chain/audits.toml"
|
||||
|
||||
[[audits.mozilla.audits.strum_macros]]
|
||||
who = "Teodor Tanasoaia <ttanasoaia@mozilla.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.25.3 -> 0.26.4"
|
||||
aggregated-from = "https://hg.mozilla.org/mozilla-central/raw-file/tip/supply-chain/audits.toml"
|
||||
|
||||
[[audits.mozilla.audits.strum_macros]]
|
||||
who = "Erich Gubler <erichdongubler@gmail.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.26.4 -> 0.27.1"
|
||||
aggregated-from = "https://hg.mozilla.org/mozilla-central/raw-file/tip/supply-chain/audits.toml"
|
||||
|
||||
[[audits.mozilla.audits.synstructure]]
|
||||
who = "Nika Layzell <nika@thelayzells.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
@@ -1038,3 +1335,153 @@ who = "Jan-Erik Rediger <jrediger@mozilla.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
version = "0.1.5"
|
||||
aggregated-from = "https://raw.githubusercontent.com/mozilla/glean/main/supply-chain/audits.toml"
|
||||
|
||||
[[audits.mozilla.audits.windows-link]]
|
||||
who = "Mark Hammond <mhammond@skippinet.com.au>"
|
||||
criteria = "safe-to-deploy"
|
||||
version = "0.1.1"
|
||||
notes = "A microsoft crate allowing unsafe calls to windows apis."
|
||||
aggregated-from = "https://hg.mozilla.org/mozilla-central/raw-file/tip/supply-chain/audits.toml"
|
||||
|
||||
[[audits.mozilla.audits.windows-link]]
|
||||
who = "Erich Gubler <erichdongubler@gmail.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.1.1 -> 0.2.0"
|
||||
aggregated-from = "https://hg.mozilla.org/mozilla-central/raw-file/tip/supply-chain/audits.toml"
|
||||
|
||||
[[audits.mozilla.audits.zeroize]]
|
||||
who = "Benjamin Beurdouche <beurdouche@mozilla.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
version = "1.8.1"
|
||||
notes = """
|
||||
This code DOES contain unsafe code required to internally call volatiles
|
||||
for deleting data. This is expected and documented behavior.
|
||||
"""
|
||||
aggregated-from = "https://hg.mozilla.org/mozilla-central/raw-file/tip/supply-chain/audits.toml"
|
||||
|
||||
[[audits.zcash.audits.autocfg]]
|
||||
who = "Jack Grigg <jack@electriccoin.co>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "1.4.0 -> 1.5.0"
|
||||
notes = "Filesystem change is to remove the generated LLVM IR output file after probing."
|
||||
aggregated-from = "https://raw.githubusercontent.com/zcash/zcash/master/qa/supply-chain/audits.toml"
|
||||
|
||||
[[audits.zcash.audits.dunce]]
|
||||
who = "Jack Grigg <jack@electriccoin.co>"
|
||||
criteria = "safe-to-deploy"
|
||||
version = "1.0.5"
|
||||
notes = """
|
||||
Does what it says on the tin. No `unsafe`, and the only IO is `std::fs::canonicalize`.
|
||||
Path and string handling looks plausibly correct.
|
||||
"""
|
||||
aggregated-from = "https://raw.githubusercontent.com/zcash/librustzcash/main/supply-chain/audits.toml"
|
||||
|
||||
[[audits.zcash.audits.errno]]
|
||||
who = "Jack Grigg <jack@electriccoin.co>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.3.3 -> 0.3.8"
|
||||
aggregated-from = "https://raw.githubusercontent.com/zcash/zcash/master/qa/supply-chain/audits.toml"
|
||||
|
||||
[[audits.zcash.audits.errno]]
|
||||
who = "Daira-Emma Hopwood <daira@jacaranda.org>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.3.8 -> 0.3.9"
|
||||
aggregated-from = "https://raw.githubusercontent.com/zcash/librustzcash/main/supply-chain/audits.toml"
|
||||
|
||||
[[audits.zcash.audits.errno]]
|
||||
who = "Jack Grigg <jack@electriccoin.co>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.3.10 -> 0.3.11"
|
||||
notes = "The `__errno` location for vxworks and cygwin looks correct from a quick search."
|
||||
aggregated-from = "https://raw.githubusercontent.com/zcash/wallet/main/supply-chain/audits.toml"
|
||||
|
||||
[[audits.zcash.audits.errno]]
|
||||
who = "Jack Grigg <jack@electriccoin.co>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.3.11 -> 0.3.13"
|
||||
aggregated-from = "https://raw.githubusercontent.com/zcash/zcash/master/qa/supply-chain/audits.toml"
|
||||
|
||||
[[audits.zcash.audits.errno]]
|
||||
who = "Jack Grigg <jack@electriccoin.co>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.3.13 -> 0.3.14"
|
||||
aggregated-from = "https://raw.githubusercontent.com/zcash/librustzcash/main/supply-chain/audits.toml"
|
||||
|
||||
[[audits.zcash.audits.http-body]]
|
||||
who = "Jack Grigg <jack@electriccoin.co>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "1.0.0 -> 1.0.1"
|
||||
aggregated-from = "https://raw.githubusercontent.com/zcash/librustzcash/main/supply-chain/audits.toml"
|
||||
|
||||
[[audits.zcash.audits.inout]]
|
||||
who = "Jack Grigg <jack@electriccoin.co>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.1.3 -> 0.1.4"
|
||||
aggregated-from = "https://raw.githubusercontent.com/zcash/wallet/main/supply-chain/audits.toml"
|
||||
|
||||
[[audits.zcash.audits.rustc_version]]
|
||||
who = "Jack Grigg <jack@electriccoin.co>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.4.0 -> 0.4.1"
|
||||
notes = "Changes to `Command` usage are to add support for `RUSTC_WRAPPER`."
|
||||
aggregated-from = "https://raw.githubusercontent.com/zcash/zcash/master/qa/supply-chain/audits.toml"
|
||||
|
||||
[[audits.zcash.audits.rustversion]]
|
||||
who = "Jack Grigg <jack@electriccoin.co>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "1.0.20 -> 1.0.21"
|
||||
notes = "Build script change is to fix building with `-Zfmt-debug=none`."
|
||||
aggregated-from = "https://raw.githubusercontent.com/zcash/zcash/master/qa/supply-chain/audits.toml"
|
||||
|
||||
[[audits.zcash.audits.rustversion]]
|
||||
who = "Jack Grigg <jack@electriccoin.co>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "1.0.21 -> 1.0.22"
|
||||
notes = "Changes to generated code are to prepend a clippy annotation."
|
||||
aggregated-from = "https://raw.githubusercontent.com/zcash/wallet/main/supply-chain/audits.toml"
|
||||
|
||||
[[audits.zcash.audits.strum]]
|
||||
who = "Jack Grigg <jack@electriccoin.co>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.27.1 -> 0.27.2"
|
||||
aggregated-from = "https://raw.githubusercontent.com/zcash/librustzcash/main/supply-chain/audits.toml"
|
||||
|
||||
[[audits.zcash.audits.strum_macros]]
|
||||
who = "Jack Grigg <jack@electriccoin.co>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.27.1 -> 0.27.2"
|
||||
aggregated-from = "https://raw.githubusercontent.com/zcash/librustzcash/main/supply-chain/audits.toml"
|
||||
|
||||
[[audits.zcash.audits.try-lock]]
|
||||
who = "Jack Grigg <jack@electriccoin.co>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.2.4 -> 0.2.5"
|
||||
notes = "Bumps MSRV to remove unsafe code block."
|
||||
aggregated-from = "https://raw.githubusercontent.com/zcash/zcash/master/qa/supply-chain/audits.toml"
|
||||
|
||||
[[audits.zcash.audits.want]]
|
||||
who = "Jack Grigg <jack@electriccoin.co>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.3.0 -> 0.3.1"
|
||||
notes = """
|
||||
Migrates to `try-lock 0.2.4` to replace some unsafe APIs that were not marked
|
||||
`unsafe` (but that were being used safely).
|
||||
"""
|
||||
aggregated-from = "https://raw.githubusercontent.com/zcash/zcash/master/qa/supply-chain/audits.toml"
|
||||
|
||||
[[audits.zcash.audits.windows-link]]
|
||||
who = "Jack Grigg <jack@electriccoin.co>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.2.0 -> 0.2.1"
|
||||
notes = "No code changes at all."
|
||||
aggregated-from = "https://raw.githubusercontent.com/zcash/librustzcash/main/supply-chain/audits.toml"
|
||||
|
||||
[[audits.zcash.audits.zeroize]]
|
||||
who = "Jack Grigg <jack@electriccoin.co>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "1.8.1 -> 1.8.2"
|
||||
notes = """
|
||||
Changes to `unsafe` code are to alter how `core::mem::size_of` is named; no actual changes
|
||||
to the `unsafe` logic.
|
||||
"""
|
||||
aggregated-from = "https://raw.githubusercontent.com/zcash/wallet/main/supply-chain/audits.toml"
|
||||
|
||||
Reference in New Issue
Block a user