Merge branch 'development'

This commit is contained in:
Jonathan Krebs 2025-02-11 17:00:22 +01:00
commit 57f7e74d03
54 changed files with 2427 additions and 1049 deletions

View File

@ -2,21 +2,28 @@
# Additionally, lint the code before anything else to fail more quickly
stages:
- lint
- check
- build
- test
- release
- dockerify
default:
image: "rust:latest"
image: "registry.gitlab.com/fabinfra/rust-builder:latest"
tags:
- linux
- docker
- fabinfra
variables:
GIT_SUBMODULE_STRATEGY: recursive
CARGO_HOME: $CI_PROJECT_DIR/cargo
APT_CACHE_DIR: $CI_PROJECT_DIR/apt
FF_USE_FASTZIP: "true" # enable fastzip - a faster zip implementation that also supports level configuration.
ARTIFACT_COMPRESSION_LEVEL: fast # can also be set to fastest, fast, slow and slowest. If just enabling fastzip is not enough try setting this to fastest or fast.
CACHE_COMPRESSION_LEVEL: fastest # same as above, but for caches
TRANSFER_METER_FREQUENCY: 5s # will display transfer progress every 5 seconds for artifacts and remote caches.
# cache dependencies and build environment to speed up setup
cache:
@ -26,10 +33,6 @@ cache:
- cargo/
- target/
# install build dependencies
before_script:
- apt-get update -yqq
- apt-get install -o dir::cache::archives="$APT_CACHE_DIR" -yqq --no-install-recommends capnproto build-essential cmake clang libclang-dev jq
.lints:
stage: lint
@ -41,7 +44,6 @@ before_script:
lint:clippy:
extends: .lints
script:
- rustup component add clippy
- cargo clippy -V
- echo -e "\e[0Ksection_start:`date +%s`:clippy_output\r\e[0Kcargo clippy output"
- cargo clippy -- --no-deps
@ -51,15 +53,14 @@ lint:clippy:
lint:fmt:
extends: .lints
script:
- rustup component add rustfmt
- cargo fmt --version
- echo -e "\e[0Ksection_start:`date +%s`:rustfmt_output\r\e[0KChanges suggested by rustfmt"
- cargo fmt --check -- -v
- echo -e "\e[0Ksection_end:`date +%s`:rustfmt_output\r\e[0K"
# Check if the code builds on rust stable
stable:build:
stage: build
stable:check:
stage: check
only:
- main
- development
@ -70,9 +71,94 @@ stable:build:
- cargo check --verbose
- echo -e "\e[0Ksection_end:`date +%s`:build_output\r\e[0K"
# Check if the code builds on rust stable on armv7
stable:check:armhf:
stage: check
only:
- main
- development
- merge_requests
before_script:
- mkdir -p $CARGO_HOME
- cp cargo-cross-config $CARGO_HOME/config.toml
script:
- rustc +stable --version && cargo --version
- echo -e "\e[0Ksection_start:`date +%s`:build_output\r\e[0KOutput of cargo check with target armv7-unknown-linux-gnueabihf"
- cargo check --verbose --target armv7-unknown-linux-gnueabihf
- echo -e "\e[0Ksection_end:`date +%s`:build_output\r\e[0K"
# Check if the code builds on rust stable on arm64
stable:check:arm64:
stage: check
only:
- main
- development
- merge_requests
before_script:
- mkdir -p $CARGO_HOME
- cp cargo-cross-config $CARGO_HOME/config.toml
script:
- rustc +stable --version && cargo --version
- echo -e "\e[0Ksection_start:`date +%s`:build_output\r\e[0KOutput of cargo check with target aarch64-unknown-linux-gnu"
- cargo check --verbose --target aarch64-unknown-linux-gnu
- echo -e "\e[0Ksection_end:`date +%s`:build_output\r\e[0K"
# Check if the code builds on rust stable
stable:build:amd64:
stage: build
only:
- main
- development
- merge_requests
script:
- rustc +stable --version && cargo --version
- echo -e "\e[0Ksection_start:`date +%s`:build_output\r\e[0KOutput of cargo build with target x86_64-unknown-linux-gnu"
- cargo build --release --target x86_64-unknown-linux-gnu
- echo -e "\e[0Ksection_end:`date +%s`:build_output\r\e[0K"
artifacts:
paths:
- target/x86_64-unknown-linux-gnu/release/bffhd
# Check if the code builds on rust stable on armv7
stable:build:armhf:
stage: build
only:
- main
- development
before_script:
- mkdir -p $CARGO_HOME
- cp cargo-cross-config $CARGO_HOME/config.toml
script:
- rustc +stable --version && cargo --version
- echo -e "\e[0Ksection_start:`date +%s`:build_output\r\e[0KOutput of cargo build with target armv7-unknown-linux-gnueabihf"
- cargo build --release --target armv7-unknown-linux-gnueabihf
- echo -e "\e[0Ksection_end:`date +%s`:build_output\r\e[0K"
artifacts:
paths:
- target/armv7-unknown-linux-gnueabihf/release/bffhd
# Check if the code builds on rust stable on arm64
stable:build:arm64:
stage: build
only:
- main
- development
before_script:
- mkdir -p $CARGO_HOME
- cp cargo-cross-config $CARGO_HOME/config.toml
script:
- rustc +stable --version && cargo --version
- echo -e "\e[0Ksection_start:`date +%s`:build_output\r\e[0KOutput of cargo build with target aarch64-unknown-linux-gnu"
- cargo build --release --target aarch64-unknown-linux-gnu
- echo -e "\e[0Ksection_end:`date +%s`:build_output\r\e[0K"
artifacts:
paths:
- target/aarch64-unknown-linux-gnu/release/bffhd
stable:test:
stage: build
needs: ["stable:build"]
needs: ["stable:check"]
only:
- main
- development
@ -80,14 +166,12 @@ stable:test:
script:
- echo -e "\e[0Ksection_start:`date +%s`:build_output\r\e[0KOutput of cargo test --no-run"
- cargo test --verbose --no-run --workspace
- echo -e "\e[0Ksection_end:`date +%s`:build_output\r\e[0K"
- cargo install --root $CARGO_HOME cargo2junit
.tests:
stage: test
needs: ["stable:test"]
script:
- cargo test --workspace $TEST_TARGET -- -Z unstable-options --format json --report-time | $CARGO_HOME/bin/cargo2junit > report.xml
- cargo test --workspace $TEST_TARGET -- -Z unstable-options --format json --report-time | cargo2junit > report.xml
artifacts:
when: always
reports:
@ -114,6 +198,23 @@ unit test 3:3:
TEST_TARGET: "--examples"
extends: .tests
upload_binaries:
stage: release
image: curlimages/curl:latest
before_script: []
cache: []
dependencies:
- stable:build:amd64
- stable:build:armhf
- stable:build:arm64
script:
- 'curl --header "JOB-TOKEN: $CI_JOB_TOKEN" --upload-file target/aarch64-unknown-linux-gnu/release/bffhd "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/generic/bffhd/${CI_COMMIT_TAG}/bffhd_${VERSION}_linux_arm64"'
- 'curl --header "JOB-TOKEN: $CI_JOB_TOKEN" --upload-file target/x86_64-unknown-linux-gnu/release/bffhd "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/generic/bffhd/${CI_COMMIT_TAG}/bffhd_${VERSION}_linux_amd64"'
- 'curl --header "JOB-TOKEN: $CI_JOB_TOKEN" --upload-file target/armv7-unknown-linux-gnueabihf/release/bffhd "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/generic/bffhd/${VERSION}/bffhd_${VERSION}_linux_arm"'
rules:
- if: $CI_COMMIT_TAG =~ "release/.*"
when: never
- if: $CI_COMMIT_BRANCH == "main"
release_prepare:
stage: release
@ -144,32 +245,106 @@ release_job:
name: "BFFH $VERSION"
description: "GitLab CI auto-created release"
tag_name: "release/$VERSION"
assets:
links:
- name: 'bffhd AMD64'
url: "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/generic/bffhd/${VERSION}/bffhd_${VERSION}_linux_amd64"
- name: 'bffhd ARMv7'
url: "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/generic/bffhd/${VERSION}/bffhd_${VERSION}_linux_arm"
- name: 'bffhd ARM64'
url: "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/generic/bffhd/${VERSION}/bffhd_${VERSION}_linux_arm64"
build:docker-releases:
stage: dockerify
image:
name: gcr.io/kaniko-project/executor:v1.6.0-debug
entrypoint: [""]
image: jdrouet/docker-with-buildx:latest
dependencies:
- stable:build:amd64
- stable:build:armhf
- stable:build:arm64
tags:
- linux
- docker
- fabinfra
variables:
DOCKER_HOST: tcp://docker:2375/
DOCKER_DRIVER: overlay2
DOCKER_TLS_CERTDIR: ""
TRIVY_NO_PROGRESS: "true"
TRIVY_CACHE_DIR: ".trivycache/"
services:
- docker:dind
before_script:
- ''
- export TRIVY_VERSION=$(wget -qO - "https://api.github.com/repos/aquasecurity/trivy/releases/latest" | grep '"tag_name":' | sed -E 's/.*"v([^"]+)".*/\1/')
- echo $TRIVY_VERSION
- wget --no-verbose https://github.com/aquasecurity/trivy/releases/download/v${TRIVY_VERSION}/trivy_${TRIVY_VERSION}_Linux-64bit.tar.gz -O - | tar -zxvf -
script:
- mkdir -p /kaniko/.docker
- echo "{\"auths\":{\"$CI_REGISTRY\":{\"username\":\"$CI_REGISTRY_USER\",\"password\":\"$CI_REGISTRY_PASSWORD\"}}}" > /kaniko/.docker/config.json
- /kaniko/executor --force --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --destination $CI_REGISTRY_IMAGE:$CI_COMMIT_TAG
- docker login $CI_REGISTRY -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD"
- docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
- docker buildx create --name cibuilder --driver docker-container --use
- docker buildx ls
- docker buildx inspect --bootstrap
- docker buildx build --platform linux/arm/v7,linux/arm64,linux/amd64 -t $CI_REGISTRY_IMAGE:$CI_COMMIT_TAG .
- docker buildx build --load --platform linux/amd64 -t $CI_REGISTRY_IMAGE:$CI_COMMIT_TAG .
# Build report
- ./trivy image --exit-code 0 --format template --template "@contrib/gitlab.tpl" -o gl-container-scanning-report.json $CI_REGISTRY_IMAGE:$CI_COMMIT_TAG
# Print report
- ./trivy image --exit-code 0 --severity HIGH $CI_REGISTRY_IMAGE:$CI_COMMIT_TAG
# Fail on severe vulnerabilities
- ./trivy image --exit-code 1 --severity CRITICAL $CI_REGISTRY_IMAGE:$CI_COMMIT_TAG
- docker push $CI_REGISTRY_IMAGE:$CI_COMMIT_TAG
cache:
paths:
- .trivycache/
artifacts:
reports:
container_scanning: gl-container-scanning-report.json
rules:
- if: $CI_COMMIT_TAG =~ "release/.*"
when: never
build:docker-development:
stage: dockerify
image:
name: gcr.io/kaniko-project/executor:v1.6.0-debug
entrypoint: [""]
image: jdrouet/docker-with-buildx:latest
dependencies:
- stable:build:amd64
- stable:build:armhf
- stable:build:arm64
tags:
- linux
- docker
- fabinfra
variables:
DOCKER_HOST: tcp://docker:2375/
DOCKER_DRIVER: overlay2
DOCKER_TLS_CERTDIR: ""
TRIVY_NO_PROGRESS: "true"
TRIVY_CACHE_DIR: ".trivycache/"
services:
- docker:dind
before_script:
- ''
- export TRIVY_VERSION=$(wget -qO - "https://api.github.com/repos/aquasecurity/trivy/releases/latest" | grep '"tag_name":' | sed -E 's/.*"v([^"]+)".*/\1/')
- echo $TRIVY_VERSION
- wget --no-verbose https://github.com/aquasecurity/trivy/releases/download/v${TRIVY_VERSION}/trivy_${TRIVY_VERSION}_Linux-64bit.tar.gz -O - | tar -zxvf -
script:
- mkdir -p /kaniko/.docker
- echo "{\"auths\":{\"$CI_REGISTRY\":{\"username\":\"$CI_REGISTRY_USER\",\"password\":\"$CI_REGISTRY_PASSWORD\"}}}" > /kaniko/.docker/config.json
- /kaniko/executor --force --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --destination $CI_REGISTRY_IMAGE:dev-latest
- docker login $CI_REGISTRY -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD"
- docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
- docker buildx create --name cibuilder --driver docker-container --use
- docker buildx ls
- docker buildx inspect --bootstrap
- docker buildx build --platform linux/arm/v7,linux/arm64,linux/amd64 -t $CI_REGISTRY_IMAGE:development .
- docker buildx build --load --platform linux/amd64 -t $CI_REGISTRY_IMAGE:development .
# Build report
- ./trivy image --exit-code 0 --format template --template "@contrib/gitlab.tpl" -o gl-container-scanning-report.json $CI_REGISTRY_IMAGE:development
# Print report
- ./trivy image --exit-code 0 --severity HIGH $CI_REGISTRY_IMAGE:development
# Fail on severe vulnerabilities
- ./trivy image --exit-code 1 --severity CRITICAL $CI_REGISTRY_IMAGE:development
- docker push $CI_REGISTRY_IMAGE:development
cache:
paths:
- .trivycache/
artifacts:
reports:
container_scanning: gl-container-scanning-report.json
only:
- development

View File

@ -7,6 +7,18 @@ A changelog following the [keepachangelog.com/en/1.0.0](https://keepachangelog.c
* errors in actors are now logged as errors ([#84](https://gitlab.com/fabinfra/fabaccess/bffh/-/issues/84))
## 0.4.3 -- 2025-02-11
* Adds binary version of FabFire authenitcation protocol
* Adds commands to dump and restore the full database as a TOML text file (`--dump-db` and `--load-db`)
* allows compilation with current stable Rust (1.84)
- Attention: The database format still relies on Rust data layout, so when updating the compiler, the database must be transfered as TOML dump.
Therefore, the `rust-toolchain.toml` file pinning `rustc` to version `1.66` is still in place.
* resolves a crash (use after free) when disconnecting a client.
* resolves some compiler warnings
## 0.4.2 -- TODO
## 0.4.1 -- 2022-04-24
* Initial full implementation of the FabAccess 0.3 API, "Spigots of Berlin".

View File

@ -56,7 +56,7 @@ But before you open an issue in this repo for a feature request, please first ch
## Contributing Code
To help develop Diflouroborane you will need a Rust toolchain. I heavily recommend installing
To help develop Difluoroborane you will need a Rust toolchain. I heavily recommend installing
[rustup](https://rustup.rs) even if your distribution provides a recent enough rustc, simply because
it allows to easily switch compilers between several versions of both stable and nightly. It also
allows you to download the respective stdlib crate, giving you the option of an offline reference.

1354
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,5 @@
[package]
name = "diflouroborane"
name = "difluoroborane"
version = "0.4.2"
authors = [ "dequbed <me@dequbed.space>"
, "Kai Jan Kriegel <kai@kjkriegel.de>"
@ -66,7 +66,7 @@ ptr_meta = "0.1"
rkyv_typename = "0.7"
rkyv_dyn = "0.7"
inventory = "0.1"
linkme = "0.2.10"
linkme = "0.3"
chrono = { version = "0.4", features = ["serde"] }
# Password hashing for internal users
@ -84,7 +84,8 @@ capnp = "0.14"
capnp-rpc = "0.14.1"
# API Authentication
desfire = "0.2.0-alpha1"
desfire = "0.2.0-alpha3"
hex = { version = "0.4.3", features = ["serde"] }
futures-signals = "0.3.22"
@ -112,10 +113,9 @@ rustls-native-certs = "0.6.1"
shadow-rs = "0.11"
[dependencies.rsasl]
git = "https://github.com/dequbed/rsasl.git"
rev = "0b5012d0"
version = "2.2.0"
default_features = false
features = ["unstable_custom_mechanism", "provider", "registry_static", "plain"]
features = ["unstable_custom_mechanism", "provider", "registry_static", "config_builder", "plain"]
[dev-dependencies]
futures-test = "0.3.16"

View File

@ -1,21 +1,23 @@
# Setup build image for multistage build
FROM rust:bullseye as builder
# install build deps
RUN apt-get update && apt-get upgrade -y
RUN apt-get install -yqq --no-install-recommends capnproto
FROM --platform=$BUILDPLATFORM alpine:latest as copy
ARG TARGETPLATFORM
RUN case "$TARGETPLATFORM" in \
"linux/arm/v7") echo armv7-unknown-linux-gnueabihf > /rust_target.txt ;; \
"linux/arm/v6") echo arm-unknown-linux-gnueabihf > /rust_target.txt ;; \
"linux/arm64") echo aarch64-unknown-linux-gnu > /rust_target.txt ;; \
"linux/amd64") echo x86_64-unknown-linux-gnu > /rust_target.txt ;; \
*) exit 1 ;; \
esac
WORKDIR /usr/src/bffh
COPY . .
RUN cargo build --release
RUN cp target/$(cat /rust_target.txt)/release/bffhd ./bffhd.bin
# Setup deployable image
FROM debian:bullseye-slim
# Install runtime deps
#RUN apt-get update && apt-get upgrade -yqq
COPY --from=builder /usr/src/bffh/target/release/bffhd /usr/local/bin/bffhd
#COPY --from=builder /usr/src/bffh/examples/bffh.dhall /etc/diflouroborane.dhall
# RUN diflouroborane --print-default > /etc/diflouroborane.toml
FROM ubuntu:22.04
RUN apt-get update && apt-get upgrade -y
RUN apt-get install -yqq --no-install-recommends python3 python3-pip
RUN pip3 install paho-mqtt
COPY --from=copy /usr/src/bffh/bffhd.bin /usr/local/bin/bffhd
VOLUME /etc/bffh/
VOLUME /var/lib/bffh/
VOLUME /usr/local/lib/bffh/adapters/

View File

@ -1,10 +1,10 @@
# Installation
Currently there are no distribution packages available.
However installation is reasonably straight-forward, since Diflouroborane compiles into a single
However installation is reasonably straight-forward, since Difluoroborane compiles into a single
mostly static binary with few dependencies.
At the moment only Linux is supported. If you managed to compile Diflouroborane please open an issue
At the moment only Linux is supported. If you managed to compile Difluoroborane please open an issue
outlining your steps or add a merge request expanding this part. Thanks!
## Requirements
@ -12,7 +12,7 @@ outlining your steps or add a merge request expanding this part. Thanks!
General requirements; scroll down for distribution-specific instructions
- GNU SASL (libgsasl).
* If you want to compile Diflouroborane from source you will potentially also need development
* If you want to compile Difluoroborane from source you will potentially also need development
headers
- capnproto
- rustc stable / nightly >= 1.48
@ -26,11 +26,12 @@ $ pacman -S gsasl rust capnproto
## Compiling from source
Diflouroborane uses Cargo, so compilation boils down to:
Difluoroborane uses Cargo, so compilation boils down to:
```shell
$ cargo build --release
```
https://www.geeksforgeeks.org/how-to-install-rust-on-raspberry-pi/ can show you how to install rust on your Linux computer.
The compiled binary can then be found in `./target/release/bffhd`

View File

@ -1,8 +1,8 @@
# FabAccess Diflouroborane
# FabAccess Difluoroborane
Diflouroborane (shorter: BFFH, the chemical formula for Diflouroborane) is the server part of
Difluoroborane (shorter: BFFH, the chemical formula for Difluoroborane) is the server part of
FabAccess.
It provides a server-side implementation of the [FabAccess API](/fabinfra/fabaccess/fabaccess-api).
It provides a server-side implementation of the [FabAccess API](https://gitlab.com/fabinfra/fabaccess/fabaccess-api).
## What is this?
@ -13,8 +13,8 @@ to be used for all other things one would like to give exclusive access to even
dangerous or expensive to use (think 3D printers, smart lightbulbs, meeting rooms).
FabAccess uses a Client/Server architecture with a [Cap'n Proto](https://capnproto.org/) API. You
can find the API schema files over [in their own repository](/fabinfra/fabaccess/fabaccess-api).
The reference client is [Borepin](/fabinfra/fabaccess/borepin), written in C#/Xamarin to be able to
can find the API schema files over [in their own repository](https://gitlab.com/fabinfra/fabaccess/fabaccess-api).
The reference client is [Borepin](https://gitlab.com/fabinfra/fabaccess/borepin), written in C#/Xamarin to be able to
be ported to as many platforms as possible.

View File

@ -12,9 +12,10 @@ use std::future::Future;
use std::pin::Pin;
use miette::IntoDiagnostic;
use miette::Diagnostic;
use std::task::{Context, Poll};
use std::time::Duration;
use thiserror::Error;
use once_cell::sync::Lazy;
use rumqttc::ConnectReturnCode::Success;
@ -111,11 +112,33 @@ static ROOT_CERTS: Lazy<RootCertStore> = Lazy::new(|| {
store
});
pub fn load(executor: Executor, config: &Config, resources: ResourcesHandle) -> miette::Result<()> {
#[derive(Debug, Error, Diagnostic)]
pub enum ActorError {
#[error("failed to parse MQTT url")]
UrlParseError(
#[from]
#[source]
url::ParseError,
),
#[error("MQTT config is invalid")]
InvalidConfig,
#[error("MQTT connection failed")]
ConnectionError(
#[from]
#[source]
rumqttc::ConnectionError,
),
}
pub fn load(
executor: Executor,
config: &Config,
resources: ResourcesHandle,
) -> Result<(), ActorError> {
let span = tracing::info_span!("loading actors");
let _guard = span;
let mqtt_url = Url::parse(config.mqtt_url.as_str()).into_diagnostic()?;
let mqtt_url = Url::parse(config.mqtt_url.as_str())?;
let (transport, default_port) = match mqtt_url.scheme() {
"mqtts" | "ssl" => (
rumqttc::Transport::tls_with_config(
@ -132,12 +155,12 @@ pub fn load(executor: Executor, config: &Config, resources: ResourcesHandle) ->
scheme => {
tracing::error!(%scheme, "MQTT url uses invalid scheme");
miette::bail!("invalid config");
return Err(ActorError::InvalidConfig);
}
};
let host = mqtt_url.host_str().ok_or_else(|| {
tracing::error!("MQTT url must contain a hostname");
miette::miette!("invalid config")
ActorError::InvalidConfig
})?;
let port = mqtt_url.port().unwrap_or(default_port);
@ -168,7 +191,7 @@ pub fn load(executor: Executor, config: &Config, resources: ResourcesHandle) ->
}
Err(error) => {
tracing::error!(?error, "MQTT connection failed");
miette::bail!("mqtt connection failed")
return Err(ActorError::ConnectionError(error));
}
}

View File

@ -1,8 +1,10 @@
use miette::Diagnostic;
use once_cell::sync::OnceCell;
use std::fs::{File, OpenOptions};
use std::io;
use std::io::{LineWriter, Write};
use std::sync::Mutex;
use thiserror::Error;
use crate::Config;
use serde::{Deserialize, Serialize};
@ -23,8 +25,13 @@ pub struct AuditLogLine<'a> {
state: &'a str,
}
#[derive(Debug, Error, Diagnostic)]
#[error(transparent)]
#[repr(transparent)]
pub struct Error(#[from] pub io::Error);
impl AuditLog {
pub fn new(config: &Config) -> io::Result<&'static Self> {
pub fn new(config: &Config) -> Result<&'static Self, Error> {
AUDIT.get_or_try_init(|| {
tracing::debug!(path = %config.auditlog_path.display(), "Initializing audit log");
let fd = OpenOptions::new()

View File

@ -2,43 +2,37 @@ mod server;
pub use server::FabFire;
use rsasl::mechname::Mechname;
use rsasl::registry::{Mechanism, MECHANISMS};
use rsasl::session::Side;
use rsasl::registry::{Matches, Mechanism, Named, Side, MECHANISMS};
const MECHNAME: &'static Mechname = &Mechname::const_new_unchecked(b"X-FABFIRE");
#[linkme::distributed_slice(MECHANISMS)]
pub static FABFIRE: Mechanism = Mechanism {
mechanism: MECHNAME,
priority: 300,
// In this situation there's one struct for both sides, however you can just as well use
// different types than then have different `impl Authentication` instead of checking a value
// in self.
client: None,
server: Some(FabFire::new_server),
first: Side::Client,
};
pub static FABFIRE: Mechanism = Mechanism::build(
MECHNAME,
300,
None,
Some(FabFire::new_server),
Side::Client,
|_| Some(Matches::<Select>::name()),
|_| true,
);
use rsasl::property::{Property, PropertyDefinition, PropertyQ};
struct Select;
impl Named for Select {
fn mech() -> &'static Mechanism {
&FABFIRE
}
}
use rsasl::property::SizedProperty;
use std::marker::PhantomData;
// All Property types must implement Debug.
#[derive(Debug)]
// The `PhantomData` in the constructor is only used so external crates can't construct this type.
pub struct FabFireCardKey(PhantomData<()>);
impl PropertyQ for FabFireCardKey {
// This is the type stored for this property. This could also be the struct itself if you
// so choose
type Item = [u8; 16];
// You need to return the constant you define below here for things to work properly
fn property() -> Property {
FABFIRECARDKEY
}
impl SizedProperty<'_> for FabFireCardKey {
type Value = [u8; 16];
const DESCRIPTION: &'static str = "A AES128 key for a FabFire card";
}
// This const is used by your mechanism to query and by your users to set your property. It
// thus needs to be exported from your crate
pub const FABFIRECARDKEY: Property = Property::new(&PropertyDefinition::new(
// Short name, used in `Debug` output
"FabFireCardKey",
// A longer user-facing name used in `Display` output
"A AES128 key for a FabFire card",
));

View File

@ -2,16 +2,16 @@ use desfire::desfire::desfire::MAX_BYTES_PER_TRANSACTION;
use desfire::desfire::Desfire;
use desfire::error::Error as DesfireError;
use desfire::iso7816_4::apduresponse::APDUResponse;
use rsasl::error::{MechanismError, MechanismErrorKind, SASLError, SessionError};
use rsasl::mechanism::Authentication;
use rsasl::mechanism::{
Authentication, Demand, DemandReply, MechanismData, MechanismError, MechanismErrorKind,
Provider, State, ThisProvider,
};
use rsasl::prelude::{MessageSent, SASLConfig, SASLError, SessionError};
use rsasl::property::AuthId;
use rsasl::session::{SessionData, StepResult};
use rsasl::SASL;
use serde::{Deserialize, Serialize};
use std::convert::TryFrom;
use std::fmt::{Debug, Display, Formatter};
use std::io::Write;
use std::sync::Arc;
use crate::authentication::fabfire::FabFireCardKey;
@ -63,6 +63,8 @@ impl Display for FabFireError {
}
}
impl std::error::Error for FabFireError {}
impl MechanismError for FabFireError {
fn kind(&self) -> MechanismErrorKind {
match self {
@ -89,6 +91,7 @@ struct CardInfo {
}
struct KeyInfo {
authid: String,
key_id: u8,
key: Box<[u8]>,
}
@ -99,6 +102,7 @@ struct AuthInfo {
iv: Vec<u8>,
}
#[allow(non_camel_case_types)]
#[derive(Debug, Deserialize, Serialize)]
#[serde(tag = "Cmd")]
enum CardCommand {
@ -154,7 +158,7 @@ pub struct FabFire {
const MAGIC: &'static str = "FABACCESS\0DESFIRE\01.0\0";
impl FabFire {
pub fn new_server(_sasl: &SASL) -> Result<Box<dyn Authentication>, SASLError> {
pub fn new_server(_sasl: &SASLConfig) -> Result<Box<dyn Authentication>, SASLError> {
Ok(Box::new(Self {
step: Step::New,
card_info: None,
@ -174,10 +178,10 @@ impl FabFire {
impl Authentication for FabFire {
fn step(
&mut self,
session: &mut SessionData,
session: &mut MechanismData<'_, '_>,
input: Option<&[u8]>,
writer: &mut dyn Write,
) -> StepResult {
) -> Result<State, SessionError> {
match self.step {
Step::New => {
tracing::trace!("Step: New");
@ -216,7 +220,7 @@ impl Authentication for FabFire {
writer
.write_all(&send_buf)
.map_err(|e| SessionError::Io { source: e })?;
Ok(rsasl::session::Step::NeedsMore(Some(send_buf.len())))
Ok(State::Running)
}
Err(e) => {
tracing::error!("Failed to serialize APDUCommand: {:?}", e);
@ -282,7 +286,7 @@ impl Authentication for FabFire {
writer
.write_all(&send_buf)
.map_err(|e| SessionError::Io { source: e })?;
Ok(rsasl::session::Step::NeedsMore(Some(send_buf.len())))
Ok(State::Running)
}
Err(e) => {
tracing::error!("Failed to serialize APDUCommand: {:?}", e);
@ -365,7 +369,7 @@ impl Authentication for FabFire {
writer
.write_all(&send_buf)
.map_err(|e| SessionError::Io { source: e })?;
Ok(rsasl::session::Step::NeedsMore(Some(send_buf.len())))
Ok(State::Running)
}
Err(e) => {
tracing::error!("Failed to serialize APDUCommand: {:?}", e);
@ -452,7 +456,7 @@ impl Authentication for FabFire {
writer
.write_all(&send_buf)
.map_err(|e| SessionError::Io { source: e })?;
Ok(rsasl::session::Step::NeedsMore(Some(send_buf.len())))
Ok(State::Running)
}
Err(e) => {
tracing::error!("Failed to serialize APDUCommand: {:?}", e);
@ -490,26 +494,20 @@ impl Authentication for FabFire {
Ok(_) => {
match apdu_response.body {
Some(data) => {
let token = String::from_utf8(data).unwrap();
session.set_property::<AuthId>(Arc::new(
token.trim_matches(char::from(0)).to_string(),
));
let key = match session.get_property_or_callback::<FabFireCardKey>()
{
Ok(Some(key)) => Box::from(key.as_slice()),
Ok(None) => {
tracing::error!("No keys on file for token");
return Err(FabFireError::InvalidCredentials(
"No keys on file for token".to_string(),
)
.into());
}
Err(e) => {
tracing::error!("Failed to get key: {:?}", e);
return Err(FabFireError::Session(e).into());
}
};
self.key_info = Some(KeyInfo { key_id: 0x01, key });
let authid = String::from_utf8(data)
.unwrap()
.trim_matches(char::from(0))
.to_string();
let prov = ThisProvider::<AuthId>::with(&authid);
let key = session
.need_with::<FabFireCardKey, _, _>(&prov, |key| {
Ok(Box::from(key.as_slice()))
})?;
self.key_info = Some(KeyInfo {
authid,
key_id: 0x01,
key,
});
}
None => {
tracing::error!("No data in response");
@ -546,7 +544,7 @@ impl Authentication for FabFire {
writer
.write_all(&send_buf)
.map_err(|e| SessionError::Io { source: e })?;
Ok(rsasl::session::Step::NeedsMore(Some(send_buf.len())))
Ok(State::Running)
}
Err(e) => {
tracing::error!("Failed to serialize command: {:?}", e);
@ -616,7 +614,7 @@ impl Authentication for FabFire {
writer
.write_all(&send_buf)
.map_err(|e| SessionError::Io { source: e })?;
Ok(rsasl::session::Step::NeedsMore(Some(send_buf.len())))
Ok(State::Running)
}
Err(e) => {
tracing::error!("Failed to serialize command: {:?}", e);
@ -691,9 +689,26 @@ impl Authentication for FabFire {
writer
.write_all(&send_buf)
.map_err(|e| SessionError::Io { source: e })?;
return Ok(rsasl::session::Step::Done(Some(
send_buf.len(),
)));
struct Prov<'a> {
authid: &'a str,
}
impl<'a> Provider<'a> for Prov<'a> {
fn provide(
&self,
req: &mut Demand<'a>,
) -> DemandReply<()>
{
req.provide_ref::<AuthId>(self.authid)?
.done()
}
}
let prov = Prov {
authid: &self.key_info.as_ref().unwrap().authid,
};
session.validate(&prov)?;
return Ok(State::Finished(MessageSent::Yes));
}
Err(e) => {
tracing::error!(
@ -722,6 +737,6 @@ impl Authentication for FabFire {
}
}
return Ok(rsasl::session::Step::Done(None));
return Ok(State::Finished(MessageSent::No));
}
}

View File

@ -0,0 +1,25 @@
mod server;
pub use server::FabFire;
use rsasl::mechname::Mechname;
use rsasl::registry::{Matches, Mechanism, Named, Side, MECHANISMS};
const MECHNAME: &'static Mechname = &Mechname::const_new_unchecked(b"X-FABFIRE-BIN");
#[linkme::distributed_slice(MECHANISMS)]
pub static FABFIRE: Mechanism = Mechanism::build(
MECHNAME,
300,
None,
Some(FabFire::new_server),
Side::Client,
|_| Some(Matches::<Select>::name()),
|_| true,
);
struct Select;
impl Named for Select {
fn mech() -> &'static Mechanism {
&FABFIRE
}
}

View File

@ -0,0 +1,532 @@
use desfire::desfire::desfire::MAX_BYTES_PER_TRANSACTION;
use desfire::desfire::Desfire;
use desfire::error::Error as DesfireError;
use desfire::iso7816_4::apduresponse::APDUResponse;
use rsasl::mechanism::{
Authentication, Demand, DemandReply, MechanismData, MechanismError, MechanismErrorKind,
Provider, State, ThisProvider,
};
use rsasl::prelude::{MessageSent, SASLConfig, SASLError, SessionError};
use rsasl::property::AuthId;
use serde::{Deserialize, Serialize};
use std::convert::TryFrom;
use std::fmt::{Debug, Display, Formatter};
use std::io::Write;
use crate::authentication::fabfire::FabFireCardKey;
use crate::CONFIG;
enum FabFireError {
ParseError,
SerializationError,
DeserializationError(serde_json::Error),
CardError(DesfireError),
InvalidMagic(String),
InvalidToken(String),
InvalidURN(String),
InvalidCredentials(String),
Session(SessionError),
}
impl Debug for FabFireError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
FabFireError::ParseError => write!(f, "ParseError"),
FabFireError::SerializationError => write!(f, "SerializationError"),
FabFireError::DeserializationError(e) => write!(f, "DeserializationError: {}", e),
FabFireError::CardError(err) => write!(f, "CardError: {}", err),
FabFireError::InvalidMagic(magic) => write!(f, "InvalidMagic: {}", magic),
FabFireError::InvalidToken(token) => write!(f, "InvalidToken: {}", token),
FabFireError::InvalidURN(urn) => write!(f, "InvalidURN: {}", urn),
FabFireError::InvalidCredentials(credentials) => {
write!(f, "InvalidCredentials: {}", credentials)
}
FabFireError::Session(err) => write!(f, "Session: {}", err),
}
}
}
impl Display for FabFireError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
FabFireError::ParseError => write!(f, "ParseError"),
FabFireError::SerializationError => write!(f, "SerializationError"),
FabFireError::DeserializationError(e) => write!(f, "DeserializationError: {}", e),
FabFireError::CardError(err) => write!(f, "CardError: {}", err),
FabFireError::InvalidMagic(magic) => write!(f, "InvalidMagic: {}", magic),
FabFireError::InvalidToken(token) => write!(f, "InvalidToken: {}", token),
FabFireError::InvalidURN(urn) => write!(f, "InvalidURN: {}", urn),
FabFireError::InvalidCredentials(credentials) => {
write!(f, "InvalidCredentials: {}", credentials)
}
FabFireError::Session(err) => write!(f, "Session: {}", err),
}
}
}
impl std::error::Error for FabFireError {}
impl MechanismError for FabFireError {
fn kind(&self) -> MechanismErrorKind {
match self {
FabFireError::ParseError => MechanismErrorKind::Parse,
FabFireError::SerializationError => MechanismErrorKind::Protocol,
FabFireError::DeserializationError(_) => MechanismErrorKind::Parse,
FabFireError::CardError(_) => MechanismErrorKind::Protocol,
FabFireError::InvalidMagic(_) => MechanismErrorKind::Protocol,
FabFireError::InvalidToken(_) => MechanismErrorKind::Protocol,
FabFireError::InvalidURN(_) => MechanismErrorKind::Protocol,
FabFireError::InvalidCredentials(_) => MechanismErrorKind::Protocol,
FabFireError::Session(_) => MechanismErrorKind::Protocol,
}
}
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
struct CardInfo {
#[serde(rename = "UID", with = "hex")]
uid: [u8; 7],
key_old: Option<Box<[u8]>>,
key_new: Option<Box<[u8]>>,
}
struct KeyInfo {
authid: String,
key_id: u8,
key: Box<[u8]>,
}
struct AuthInfo {
rnd_a: Vec<u8>,
rnd_b: Vec<u8>,
iv: Vec<u8>,
}
enum Step {
New,
SelectApp,
VerifyMagic,
GetURN,
GetToken,
Authenticate1,
Authenticate2,
}
pub struct FabFire {
step: Step,
card_info: Option<CardInfo>,
key_info: Option<KeyInfo>,
auth_info: Option<AuthInfo>,
app_id: u32,
local_urn: String,
desfire: Desfire,
}
const MAGIC: &'static str = "FABACCESS\0DESFIRE\01.0\0";
impl FabFire {
pub fn new_server(_sasl: &SASLConfig) -> Result<Box<dyn Authentication>, SASLError> {
let space = if let Some(space) = CONFIG.get().map(|c| c.spacename.as_str()) {
space
} else {
tracing::error!("No space configured");
"generic"
};
Ok(Box::new(Self {
step: Step::New,
card_info: None,
key_info: None,
auth_info: None,
app_id: 0x464142,
local_urn: format!("urn:fabaccess:lab:{space}"),
desfire: Desfire {
card: None,
session_key: None,
cbc_iv: None,
},
}))
}
}
impl Authentication for FabFire {
fn step(
&mut self,
session: &mut MechanismData<'_, '_>,
input: Option<&[u8]>,
writer: &mut dyn Write,
) -> Result<State, SessionError> {
match self.step {
Step::New => {
tracing::trace!("Step: New");
//receive card info (especially card UID) from reader
return match input {
None => Err(SessionError::InputDataRequired),
Some(_) => {
//select application
return match self.desfire.select_application_cmd(self.app_id) {
Ok(buf) => match Vec::<u8>::try_from(buf) {
Ok(data) => {
self.step = Step::SelectApp;
writer
.write_all(&data)
.map_err(|e| SessionError::Io { source: e })?;
Ok(State::Running)
}
Err(e) => {
tracing::error!(
"Failed to convert APDUCommand to Vec<u8>: {:?}",
e
);
return Err(FabFireError::SerializationError.into());
}
},
Err(e) => {
tracing::error!("Failed to generate APDUCommand: {:?}", e);
return Err(FabFireError::SerializationError.into());
}
};
}
};
}
Step::SelectApp => {
tracing::trace!("Step: SelectApp");
// check that we successfully selected the application
let apdu_response = match input {
Some(data) => APDUResponse::new(data),
None => return Err(SessionError::InputDataRequired),
};
apdu_response
.check()
.map_err(|e| FabFireError::CardError(e))?;
// request the contents of the file containing the magic string
const MAGIC_FILE_ID: u8 = 0x01;
return match self
.desfire
.read_data_chunk_cmd(MAGIC_FILE_ID, 0, MAGIC.len())
{
Ok(buf) => match Vec::<u8>::try_from(buf) {
Ok(data) => {
self.step = Step::VerifyMagic;
writer
.write_all(&data)
.map_err(|e| SessionError::Io { source: e })?;
Ok(State::Running)
}
Err(e) => {
tracing::error!("Failed to convert APDUCommand to Vec<u8>: {:?}", e);
return Err(FabFireError::SerializationError.into());
}
},
Err(e) => {
tracing::error!("Failed to generate APDUCommand: {:?}", e);
return Err(FabFireError::SerializationError.into());
}
};
}
Step::VerifyMagic => {
tracing::trace!("Step: VerifyMagic");
// verify the magic string to determine that we have a valid fabfire card
let apdu_response = match input {
Some(data) => APDUResponse::new(data),
None => return Err(SessionError::InputDataRequired),
};
match apdu_response.check() {
Ok(_) => {
match apdu_response.body {
Some(data) => {
if std::str::from_utf8(data.as_slice()) != Ok(MAGIC) {
tracing::error!("Invalid magic string");
return Err(FabFireError::ParseError.into());
}
}
None => {
tracing::error!("No data returned from card");
return Err(FabFireError::ParseError.into());
}
};
}
Err(e) => {
tracing::error!("Got invalid APDUResponse: {:?}", e);
return Err(FabFireError::ParseError.into());
}
}
// request the contents of the file containing the URN
const URN_FILE_ID: u8 = 0x02;
return match self.desfire.read_data_chunk_cmd(
URN_FILE_ID,
0,
self.local_urn.as_bytes().len(),
) {
// TODO: support urn longer than 47 Bytes
Ok(buf) => match Vec::<u8>::try_from(buf) {
Ok(data) => {
self.step = Step::GetURN;
writer
.write_all(&data)
.map_err(|e| SessionError::Io { source: e })?;
Ok(State::Running)
}
Err(e) => {
tracing::error!("Failed to convert APDUCommand to Vec<u8>: {:?}", e);
return Err(FabFireError::SerializationError.into());
}
},
Err(e) => {
tracing::error!("Failed to generate APDUCommand: {:?}", e);
return Err(FabFireError::SerializationError.into());
}
};
}
Step::GetURN => {
tracing::trace!("Step: GetURN");
// parse the urn and match it to our local urn
let apdu_response = match input {
Some(data) => APDUResponse::new(data),
None => return Err(SessionError::InputDataRequired),
};
match apdu_response.check() {
Ok(_) => {
match apdu_response.body {
Some(data) => {
let received_urn = String::from_utf8(data).unwrap();
if received_urn != self.local_urn {
tracing::error!(
"URN mismatch: {:?} != {:?}",
received_urn,
self.local_urn
);
return Err(FabFireError::ParseError.into());
}
}
None => {
tracing::error!("No data returned from card");
return Err(FabFireError::ParseError.into());
}
};
}
Err(e) => {
tracing::error!("Got invalid APDUResponse: {:?}", e);
return Err(FabFireError::ParseError.into());
}
}
// request the contents of the file containing the URN
const TOKEN_FILE_ID: u8 = 0x03;
return match self.desfire.read_data_chunk_cmd(
TOKEN_FILE_ID,
0,
MAX_BYTES_PER_TRANSACTION,
) {
// TODO: support data longer than 47 Bytes
Ok(buf) => match Vec::<u8>::try_from(buf) {
Ok(data) => {
self.step = Step::GetToken;
writer
.write_all(&data)
.map_err(|e| SessionError::Io { source: e })?;
Ok(State::Running)
}
Err(e) => {
tracing::error!("Failed to convert APDUCommand to Vec<u8>: {:?}", e);
return Err(FabFireError::SerializationError.into());
}
},
Err(e) => {
tracing::error!("Failed to generate APDUCommand: {:?}", e);
return Err(FabFireError::SerializationError.into());
}
};
}
Step::GetToken => {
// println!("Step: GetToken");
// parse the token and select the appropriate user
let apdu_response = match input {
Some(data) => APDUResponse::new(data),
None => return Err(SessionError::InputDataRequired),
};
match apdu_response.check() {
Ok(_) => {
match apdu_response.body {
Some(data) => {
let authid = String::from_utf8(data)
.unwrap()
.trim_matches(char::from(0))
.to_string();
let prov = ThisProvider::<AuthId>::with(&authid);
let key = session
.need_with::<FabFireCardKey, _, _>(&prov, |key| {
Ok(Box::from(key.as_slice()))
})?;
self.key_info = Some(KeyInfo {
authid,
key_id: 0x01,
key,
});
}
None => {
tracing::error!("No data in response");
return Err(FabFireError::ParseError.into());
}
};
}
Err(e) => {
tracing::error!("Failed to check response: {:?}", e);
return Err(FabFireError::ParseError.into());
}
}
return match self
.desfire
.authenticate_iso_aes_challenge_cmd(self.key_info.as_ref().unwrap().key_id)
{
Ok(buf) => match Vec::<u8>::try_from(buf) {
Ok(data) => {
self.step = Step::Authenticate1;
writer
.write_all(&data)
.map_err(|e| SessionError::Io { source: e })?;
Ok(State::Running)
}
Err(e) => {
tracing::error!("Failed to convert to Vec<u8>: {:?}", e);
return Err(FabFireError::SerializationError.into());
}
},
Err(e) => {
tracing::error!("Failed to create authenticate command: {:?}", e);
return Err(FabFireError::SerializationError.into());
}
};
}
Step::Authenticate1 => {
tracing::trace!("Step: Authenticate1");
let apdu_response = match input {
Some(data) => APDUResponse::new(data),
None => return Err(SessionError::InputDataRequired),
};
return match apdu_response.check() {
Ok(_) => {
match apdu_response.body {
Some(data) => {
let rnd_b_enc = data.as_slice();
//FIXME: This is ugly, we should find a better way to make the function testable
//TODO: Check if we need a CSPRNG here
let rnd_a: [u8; 16] = rand::random();
let (cmd_challenge_response, rnd_b, iv) = self
.desfire
.authenticate_iso_aes_response_cmd(
rnd_b_enc,
&*(self.key_info.as_ref().unwrap().key),
&rnd_a,
)
.unwrap();
self.auth_info = Some(AuthInfo {
rnd_a: Vec::<u8>::from(rnd_a),
rnd_b,
iv,
});
match Vec::<u8>::try_from(cmd_challenge_response) {
Ok(data) => {
self.step = Step::Authenticate2;
writer
.write_all(&data)
.map_err(|e| SessionError::Io { source: e })?;
Ok(State::Running)
}
Err(e) => {
tracing::error!("Failed to convert to Vec<u8>: {:?}", e);
return Err(FabFireError::SerializationError.into());
}
}
}
None => {
tracing::error!("Got invalid response: {:?}", apdu_response);
Err(FabFireError::ParseError.into())
}
}
}
Err(e) => {
tracing::error!("Failed to check response: {:?}", e);
Err(FabFireError::ParseError.into())
}
};
}
Step::Authenticate2 => {
// println!("Step: Authenticate2");
let apdu_response = match input {
Some(data) => APDUResponse::new(data),
None => return Err(SessionError::InputDataRequired),
};
match apdu_response.check() {
Ok(_) => {
match apdu_response.body {
Some(data) => match self.auth_info.as_ref() {
None => {
return Err(FabFireError::ParseError.into());
}
Some(auth_info) => {
if self
.desfire
.authenticate_iso_aes_verify(
data.as_slice(),
auth_info.rnd_a.as_slice(),
auth_info.rnd_b.as_slice(),
&*(self.key_info.as_ref().unwrap().key),
auth_info.iv.as_slice(),
)
.is_ok()
{
struct Prov<'a> {
authid: &'a str,
}
impl<'a> Provider<'a> for Prov<'a> {
fn provide(
&self,
req: &mut Demand<'a>,
) -> DemandReply<()>
{
req.provide_ref::<AuthId>(self.authid)?.done()
}
}
let prov = Prov {
authid: &self.key_info.as_ref().unwrap().authid,
};
session.validate(&prov)?;
return Ok(State::Finished(MessageSent::Yes));
}
}
},
None => {
tracing::error!("got empty response");
return Err(FabFireError::ParseError.into());
}
};
}
Err(_e) => {
tracing::error!("Got invalid response: {:?}", apdu_response);
return Err(
FabFireError::InvalidCredentials(format!("{}", apdu_response)).into(),
);
}
}
}
}
return Ok(State::Finished(MessageSent::No));
}
}

View File

@ -1,16 +1,17 @@
use crate::users::Users;
use miette::{Context, IntoDiagnostic};
use rsasl::error::SessionError;
use rsasl::mechname::Mechname;
use rsasl::property::{AuthId, Password};
use rsasl::session::{Session, SessionData};
use rsasl::validate::{validations, Validation};
use rsasl::{Property, SASL};
use miette::{IntoDiagnostic, WrapErr};
use rsasl::callback::{CallbackError, Context, Request, SessionCallback, SessionData};
use rsasl::mechanism::SessionError;
use rsasl::prelude::{Mechname, SASLConfig, SASLServer, Session, Validation};
use rsasl::property::{AuthId, AuthzId, Password};
use rsasl::validate::{Validate, ValidationError};
use std::sync::Arc;
use crate::authentication::fabfire::FabFireCardKey;
use crate::users::db::User;
mod fabfire;
mod fabfire_bin;
struct Callback {
users: Users,
@ -22,89 +23,102 @@ impl Callback {
Self { users, span }
}
}
impl rsasl::callback::Callback for Callback {
fn provide_prop(
impl SessionCallback for Callback {
fn callback(
&self,
session: &mut rsasl::session::SessionData,
property: Property,
_session_data: &SessionData,
context: &Context,
request: &mut Request,
) -> Result<(), SessionError> {
match property {
fabfire::FABFIRECARDKEY => {
let authcid = session.get_property_or_callback::<AuthId>()?;
let user = self
.users
.get_user(authcid.unwrap().as_ref())
.ok_or(SessionError::AuthenticationFailure)?;
if let Some(authid) = context.get_ref::<AuthId>() {
request.satisfy_with::<FabFireCardKey, _>(|| {
let user = self.users.get_user(authid).ok_or(CallbackError::NoValue)?;
let kv = user
.userdata
.kv
.get("cardkey")
.ok_or(SessionError::AuthenticationFailure)?;
let card_key = <[u8; 16]>::try_from(
hex::decode(kv).map_err(|_| SessionError::AuthenticationFailure)?,
)
.map_err(|_| SessionError::AuthenticationFailure)?;
session.set_property::<FabFireCardKey>(Arc::new(card_key));
.ok_or(CallbackError::NoValue)?;
let card_key =
<[u8; 16]>::try_from(hex::decode(kv).map_err(|_| CallbackError::NoValue)?)
.map_err(|_| CallbackError::NoValue)?;
Ok(card_key)
})?;
}
Ok(())
}
_ => Err(SessionError::NoProperty { property }),
}
}
fn validate(
&self,
session: &mut SessionData,
validation: Validation,
_mechanism: &Mechname,
) -> Result<(), SessionError> {
session_data: &SessionData,
context: &Context,
validate: &mut Validate<'_>,
) -> Result<(), ValidationError> {
let span = tracing::info_span!(parent: &self.span, "validate");
let _guard = span.enter();
match validation {
validations::SIMPLE => {
let authnid = session
.get_property::<AuthId>()
.ok_or(SessionError::no_property::<AuthId>())?;
tracing::debug!(authid=%authnid, "SIMPLE validation requested");
if validate.is::<V>() {
match session_data.mechanism().mechanism.as_str() {
"PLAIN" => {
let authcid = context
.get_ref::<AuthId>()
.ok_or(ValidationError::MissingRequiredProperty)?;
let authzid = context
.get_ref::<AuthzId>()
.ok_or(ValidationError::MissingRequiredProperty)?;
let password = context
.get_ref::<Password>()
.ok_or(ValidationError::MissingRequiredProperty)?;
if let Some(user) = self.users.get_user(authnid.as_str()) {
let passwd = session
.get_property::<Password>()
.ok_or(SessionError::no_property::<Password>())?;
if user
.check_password(passwd.as_bytes())
.map_err(|_e| SessionError::AuthenticationFailure)?
{
if !authzid.is_empty() {
return Ok(());
} else {
tracing::warn!(authid=%authnid, "AUTH FAILED: bad password");
}
} else {
tracing::warn!(authid=%authnid, "AUTH FAILED: no such user '{}'", authnid);
}
Err(SessionError::AuthenticationFailure)
if let Some(user) = self.users.get_user(authcid) {
match user.check_password(password) {
Ok(true) => validate.finalize::<V>(user),
Ok(false) => {
tracing::warn!(authid=%authcid, "AUTH FAILED: bad password");
}
_ => {
tracing::error!(?validation, "Unimplemented validation requested");
Err(SessionError::no_validate(validation))
Err(error) => {
tracing::warn!(authid=%authcid, "Bad DB entry: {}", error);
}
}
} else {
tracing::warn!(authid=%authcid, "AUTH FAILED: no such user");
}
}
"X-FABFIRE" | "X-FABFIRE-BIN" => {
let authcid = context
.get_ref::<AuthId>()
.ok_or(ValidationError::MissingRequiredProperty)?;
if let Some(user) = self.users.get_user(authcid) {
validate.finalize::<V>(user)
}
}
_ => {}
}
}
Ok(())
}
}
pub struct V;
impl Validation for V {
type Value = User;
}
#[derive(Clone)]
struct Inner {
rsasl: SASL,
rsasl: Arc<SASLConfig>,
}
impl Inner {
pub fn new(rsasl: SASL) -> Self {
pub fn new(rsasl: Arc<SASLConfig>) -> Self {
Self { rsasl }
}
}
#[derive(Clone)]
pub struct AuthenticationHandle {
inner: Arc<Inner>,
inner: Inner,
}
impl AuthenticationHandle {
@ -112,11 +126,13 @@ impl AuthenticationHandle {
let span = tracing::debug_span!("authentication");
let _guard = span.enter();
let mut rsasl = SASL::new();
rsasl.install_callback(Arc::new(Callback::new(userdb)));
let config = SASLConfig::builder()
.with_defaults()
.with_callback(Callback::new(userdb))
.unwrap();
let mechs: Vec<&'static str> = rsasl
.server_mech_list()
let mechs: Vec<&'static str> = SASLServer::<V>::new(config.clone())
.get_available()
.into_iter()
.map(|m| m.mechanism.as_str())
.collect();
@ -124,24 +140,18 @@ impl AuthenticationHandle {
tracing::debug!(?mechs, "available mechs");
Self {
inner: Arc::new(Inner::new(rsasl)),
inner: Inner::new(config),
}
}
pub fn start(&self, mechanism: &Mechname) -> miette::Result<Session> {
Ok(self
.inner
.rsasl
.server_start(mechanism)
pub fn start(&self, mechanism: &Mechname) -> miette::Result<Session<V>> {
Ok(SASLServer::new(self.inner.rsasl.clone())
.start_suggested(mechanism)
.into_diagnostic()
.wrap_err("Failed to start a SASL authentication with the given mechanism")?)
}
pub fn list_available_mechs(&self) -> impl IntoIterator<Item = &Mechname> {
self.inner
.rsasl
.server_mech_list()
.into_iter()
.map(|m| m.mechanism)
pub fn sess(&self) -> SASLServer<V> {
SASLServer::new(self.inner.rsasl.clone())
}
}

View File

@ -2,13 +2,13 @@ use capnp::capability::Promise;
use capnp::Error;
use capnp_rpc::pry;
use rsasl::mechname::Mechname;
use rsasl::property::AuthId;
use rsasl::session::{Session, Step};
use rsasl::prelude::State as SaslState;
use rsasl::prelude::{MessageSent, Session};
use std::fmt;
use std::fmt::{Formatter, Write};
use std::io::Cursor;
use tracing::Span;
use crate::authentication::V;
use crate::capnp::session::APISession;
use crate::session::SessionManager;
use api::authenticationsystem_capnp::authentication::{
@ -27,7 +27,7 @@ impl Authentication {
pub fn new(
parent: &Span,
mechanism: &Mechname, /* TODO: this is stored in session as well, get it out of there. */
session: Session,
session: Session<V>,
sessionmanager: SessionManager,
) -> Self {
let span = tracing::info_span!(
@ -92,7 +92,7 @@ enum State {
InvalidMechanism,
Finished,
Aborted,
Running(Session, SessionManager),
Running(Session<V>, SessionManager),
}
impl AuthenticationSystem for Authentication {
@ -113,7 +113,7 @@ impl AuthenticationSystem for Authentication {
f.write_char(')')
}
}
let mut response;
let response;
let mut builder = results.get();
if let State::Running(mut session, manager) =
@ -121,36 +121,35 @@ impl AuthenticationSystem for Authentication {
{
let data: &[u8] = pry!(pry!(params.get()).get_data());
let mut out = Cursor::new(Vec::new());
let mut out = Vec::new();
match session.step(Some(data), &mut out) {
Ok(Step::Done(data)) => {
Ok(SaslState::Finished(sent)) => {
self.state = State::Finished;
let uid = pry!(session.get_property::<AuthId>().ok_or_else(|| {
tracing::warn!("Authentication didn't provide an authid as required.");
capnp::Error::failed(
"Authentication didn't provide an authid as required".to_string(),
)
}));
let session = pry!(manager.open(&self.span, uid.as_ref()).ok_or_else(|| {
tracing::warn!(uid = uid.as_str(), "Failed to lookup the given user");
capnp::Error::failed("Failed to lookup the given user".to_string())
}));
if let Some(user) = session.validation() {
let session = manager.open(&self.span, user);
response = Response {
union_field: "successful",
};
let mut builder = builder.init_successful();
if data.is_some() {
builder.set_additional_data(out.into_inner().as_slice());
if sent == MessageSent::Yes {
builder.set_additional_data(out.as_slice());
}
APISession::build(session, builder)
} else {
let mut builder = builder.init_failed();
builder.set_code(ErrorCode::InvalidCredentials);
response = Response {
union_field: "error",
};
}
Ok(Step::NeedsMore(_)) => {
}
Ok(SaslState::Running) => {
self.state = State::Running(session, manager);
builder.set_challenge(out.into_inner().as_slice());
builder.set_challenge(out.as_slice());
response = Response {
union_field: "challenge",

View File

@ -95,9 +95,10 @@ impl bootstrap::Server for BootCap {
let builder = result.get();
let mechs: Vec<_> = self
.authentication
.list_available_mechs()
.sess()
.get_available()
.into_iter()
.map(|m| m.as_str())
.map(|m| m.mechanism.as_str())
.collect();
let mut mechbuilder = builder.init_mechs(mechs.len() as u32);
for (i, m) in mechs.iter().enumerate() {
@ -146,7 +147,7 @@ impl bootstrap::Server for BootCap {
tracing::trace!(params.mechanism = mechanism, "method call");
let mechname = Mechname::new(mechanism.as_bytes());
let mechname = Mechname::parse(mechanism.as_bytes());
let auth = if let Ok(mechname) = mechname {
if let Ok(session) = self.authentication.start(mechname) {
Authentication::new(&self.span, mechname, session, self.sessionmanager.clone())

View File

@ -211,7 +211,6 @@ impl ManageServer for Machine {
mut result: manage::GetMachineInfoExtendedResults,
) -> Promise<(), ::capnp::Error> {
let mut builder = result.get();
let user = User::new_self(self.session.clone());
User::build_optional(
&self.session,
self.resource.get_current_user(),

View File

@ -1,13 +1,15 @@
use async_net::TcpListener;
use miette::Diagnostic;
use thiserror::Error;
use async_net::TcpListener;
use capnp_rpc::rpc_twoparty_capnp::Side;
use capnp_rpc::twoparty::VatNetwork;
use capnp_rpc::RpcSystem;
use executor::prelude::{Executor, GroupId, SupervisionRegistry};
use executor::prelude::{Executor, SupervisionRegistry};
use futures_rustls::server::TlsStream;
use futures_rustls::TlsAcceptor;
use futures_util::stream::FuturesUnordered;
use futures_util::{stream, AsyncRead, AsyncWrite, FutureExt, StreamExt};
use futures_util::{stream, AsyncRead, AsyncWrite, StreamExt};
use std::future::Future;
use std::io;
@ -37,6 +39,10 @@ pub struct APIServer {
authentication: AuthenticationHandle,
}
#[derive(Debug, Error, Diagnostic)]
#[error("Reached Void error, this should not be possible")]
pub enum Error {}
impl APIServer {
pub fn new(
executor: Executor<'static>,
@ -60,7 +66,7 @@ impl APIServer {
acceptor: TlsAcceptor,
sessionmanager: SessionManager,
authentication: AuthenticationHandle,
) -> miette::Result<Self> {
) -> Result<Self, Error> {
let span = tracing::info_span!("binding API listen sockets");
let _guard = span.enter();

View File

@ -1,4 +1,3 @@
use crate::authorization::roles::Role;
use crate::Roles;
use api::permissionsystem_capnp::permission_system::info::{
GetRoleListParams, GetRoleListResults, Server as PermissionSystem,
@ -37,7 +36,7 @@ impl PermissionSystem for Permissions {
tracing::trace!("method call");
let roles = self.roles.list().collect::<Vec<&String>>();
let mut builder = results.get();
let builder = results.get();
let mut b = builder.init_role_list(roles.len() as u32);
for (i, role) in roles.into_iter().enumerate() {
let mut role_builder = b.reborrow().get(i as u32);

View File

@ -1,20 +1,38 @@
use crate::authorization::permissions::Permission;
use crate::session::SessionHandle;
use crate::users::{db, UserRef};
use crate::CONFIG;
use api::general_capnp::optional;
use api::user_capnp::user::{self, admin, info, manage};
use api::user_capnp::user::card_d_e_s_fire_e_v2::{
BindParams, BindResults, GenCardTokenParams, GenCardTokenResults, GetMetaInfoParams,
GetMetaInfoResults, GetSpaceInfoParams, GetSpaceInfoResults, GetTokenListParams,
GetTokenListResults, UnbindParams, UnbindResults,
};
use api::user_capnp::user::{self, admin, card_d_e_s_fire_e_v2, info, manage};
use capnp::capability::Promise;
use capnp::Error;
use capnp_rpc::pry;
use std::borrow::Cow;
use std::io::Write;
use uuid::Uuid;
const TARGET: &str = "bffh::api::user";
#[derive(Clone)]
pub struct User {
span: tracing::Span,
session: SessionHandle,
user: UserRef,
}
impl User {
pub fn new(session: SessionHandle, user: UserRef) -> Self {
Self { session, user }
let span = tracing::info_span!(target: TARGET, "User");
Self {
span,
session,
user,
}
}
pub fn new_self(session: SessionHandle) -> Self {
@ -55,6 +73,7 @@ impl User {
}
if session.has_perm(Permission::new("bffh.users.admin")) {
builder.set_admin(capnp_rpc::new_client(client.clone()));
builder.set_card_d_e_s_fire_e_v2(capnp_rpc::new_client(client));
}
}
}
@ -90,7 +109,7 @@ impl manage::Server for User {
if let Some(mut user) = self.session.users.get_user(uid) {
if let Ok(true) = user.check_password(old_pw.as_bytes()) {
user.set_pw(new_pw.as_bytes());
self.session.users.put_user(uid, &user);
pry!(self.session.users.put_user(uid, &user));
}
}
Promise::ok(())
@ -124,9 +143,9 @@ impl admin::Server for User {
// Only update if needed
if !target.userdata.roles.iter().any(|r| r.as_str() == rolename) {
target.userdata.roles.push(rolename.to_string());
self.session
pry!(self.session
.users
.put_user(self.user.get_username(), &target);
.put_user(self.user.get_username(), &target));
}
}
@ -149,9 +168,9 @@ impl admin::Server for User {
// Only update if needed
if target.userdata.roles.iter().any(|r| r.as_str() == rolename) {
target.userdata.roles.retain(|r| r.as_str() != rolename);
self.session
pry!(self.session
.users
.put_user(self.user.get_username(), &target);
.put_user(self.user.get_username(), &target));
}
}
@ -166,8 +185,218 @@ impl admin::Server for User {
let uid = self.user.get_username();
if let Some(mut user) = self.session.users.get_user(uid) {
user.set_pw(new_pw.as_bytes());
self.session.users.put_user(uid, &user);
pry!(self.session.users.put_user(uid, &user));
}
Promise::ok(())
}
}
impl card_d_e_s_fire_e_v2::Server for User {
fn get_token_list(
&mut self,
_: GetTokenListParams,
mut results: GetTokenListResults,
) -> Promise<(), Error> {
let _guard = self.span.enter();
let _span = tracing::trace_span!(target: TARGET, "get_token_list").entered();
tracing::trace!("method call");
// TODO: This only supports a single token per user
let user = pry!(self
.session
.users
.get_user(self.user.get_username())
.ok_or_else(|| Error::failed(format!(
"User API object with nonexisting user \"{}\"",
self.user.get_username()
))));
let tk = user
.userdata
.kv
.get("cardtoken")
.map(|ck| hex::decode(ck).ok())
.flatten()
.unwrap_or_else(|| {
tracing::debug!(user.id = &user.id, "no tokens stored");
Vec::new()
});
if !tk.is_empty() {
let b = results.get();
let mut lb = b.init_token_list(1);
lb.set(0, &tk[..]);
}
Promise::ok(())
}
fn bind(&mut self, params: BindParams, _: BindResults) -> Promise<(), Error> {
let _guard = self.span.enter();
let _span = tracing::trace_span!(target: TARGET, "bind").entered();
let params = pry!(params.get());
let card_key = pry!(params.get_auth_key());
let token = pry!(params.get_token());
let token: Cow<'_, str> = if let Ok(url) = std::str::from_utf8(token) {
Cow::Borrowed(url)
} else {
Cow::Owned(hex::encode(token))
};
tracing::trace!(
params.token = token.as_ref(),
params.auth_key = "<censored>",
"method call"
);
let card_key = hex::encode(card_key);
let mut user = pry!(self
.session
.users
.get_user(self.user.get_username())
.ok_or_else(|| Error::failed(format!(
"User API object with nonexisting user \"{}\"",
self.user.get_username()
))));
let prev_token = user.userdata.kv.get("cardtoken");
let prev_cardk = user.userdata.kv.get("cardkey");
match (prev_token, prev_cardk) {
(Some(prev_token), Some(prev_cardk))
if prev_token.as_str() == &token && prev_cardk.as_str() == card_key.as_str() =>
{
tracing::info!(
user.id, token = token.as_ref(),
"new token and card key are identical, skipping no-op"
);
return Promise::ok(());
},
(Some(prev_token), Some(_))
if prev_token.as_str() == token /* above guard means prev_cardk != card_key */ =>
{
tracing::warn!(
token = token.as_ref(),
"trying to overwrite card key for existing token, ignoring!"
);
return Promise::ok(());
},
(Some(prev_token), None) => tracing::warn!(
user.id, prev_token,
"token already set for user but no card key, setting new pair unconditionally!"
),
(None, Some(_)) => tracing::warn!(
user.id,
"card key already set for user but no token, setting new pair unconditionally!"
),
(Some(_), Some(_)) | (None, None) => tracing::debug!(
user.id, token = token.as_ref(),
"Adding new card key/token pair"
),
}
user.userdata
.kv
.insert("cardtoken".to_string(), token.to_string());
user.userdata.kv.insert("cardkey".to_string(), card_key);
pry!(self.session.users.put_user(self.user.get_username(), &user));
Promise::ok(())
}
fn unbind(&mut self, params: UnbindParams, _: UnbindResults) -> Promise<(), Error> {
let _guard = self.span.enter();
let _span = tracing::trace_span!(target: TARGET, "unbind").entered();
let params = pry!(params.get());
let token = pry!(params.get_token());
let token: Cow<'_, str> = if let Ok(url) = std::str::from_utf8(token) {
Cow::Borrowed(url)
} else {
Cow::Owned(hex::encode(token))
};
tracing::trace!(params.token = token.as_ref(), "method call");
let mut user = pry!(self
.session
.users
.get_user(self.user.get_username())
.ok_or_else(|| Error::failed(format!(
"User API object with nonexisting user \"{}\"",
self.user.get_username()
))));
if let Some(prev_token) = user.userdata.kv.get("cardtoken") {
if token.as_ref() == prev_token.as_str() {
tracing::debug!(
user.id,
token = token.as_ref(),
"removing card key/token pair"
);
user.userdata.kv.remove("cardtoken");
user.userdata.kv.remove("cardkey");
}
}
pry!(self.session.users.put_user(self.user.get_username(), &user));
Promise::ok(())
}
fn gen_card_token(
&mut self,
_: GenCardTokenParams,
mut results: GenCardTokenResults,
) -> Promise<(), Error> {
let _guard = self.span.enter();
let _span = tracing::trace_span!(target: TARGET, "gen_card_token").entered();
tracing::trace!("method call");
results.get().set_token(Uuid::new_v4().as_bytes());
Promise::ok(())
}
fn get_meta_info(
&mut self,
_: GetMetaInfoParams,
mut results: GetMetaInfoResults,
) -> Promise<(), Error> {
let _guard = self.span.enter();
let _span = tracing::trace_span!(target: TARGET, "get_meta_info").entered();
tracing::trace!("method call");
results.get().set_bytes(b"FABACCESS\x00DESFIRE\x001.0\x00");
Promise::ok(())
}
fn get_space_info(
&mut self,
_: GetSpaceInfoParams,
mut results: GetSpaceInfoResults,
) -> Promise<(), Error> {
let _guard = self.span.enter();
let _span = tracing::trace_span!(target: TARGET, "get_space_info").entered();
tracing::trace!("method call");
let space = if let Some(space) = CONFIG.get().map(|c| c.spacename.as_str()) {
space
} else {
return Promise::err(Error::failed("No space name configured".to_string()));
};
let url = if let Some(url) = CONFIG.get().map(|c| c.instanceurl.as_str()) {
url
} else {
return Promise::err(Error::failed("No instance url configured".to_string()));
};
let mut data = Vec::new();
write!(&mut data, "urn:fabaccess:lab:{space}\x00{url}").unwrap();
results.get().set_bytes(&data);
Promise::ok(())
}
}

View File

@ -84,13 +84,13 @@ impl manage::Server for Users {
"method call"
);
let mut builder = result.get();
let builder = result.get();
if !username.is_empty() && !password.is_empty() {
if self.session.users.get_user(username).is_none() {
let user = db::User::new_with_plain_pw(username, password);
self.session.users.put_user(username, &user);
let mut builder = builder.init_successful();
pry!(self.session.users.put_user(username, &user));
let builder = builder.init_successful();
User::fill(&self.session, user, builder);
} else {
let mut builder = builder.init_failed();

View File

@ -1,8 +1,6 @@
use std::collections::HashMap;
use std::default::Default;
use std::error::Error;
use std::fmt::{Debug, Display};
use std::marker::PhantomData;
use std::fmt::Debug;
use std::path::PathBuf;
use serde::{Deserialize, Serialize};
@ -12,7 +10,6 @@ use crate::authorization::roles::Role;
use crate::capnp::{Listen, TlsListen};
use crate::logging::LogConfig;
use miette::IntoDiagnostic;
use std::path::Path;
#[derive(Debug)]
@ -96,6 +93,10 @@ pub struct Config {
#[serde(default, skip)]
pub logging: LogConfig,
pub spacename: String,
pub instanceurl: String,
}
impl Config {
@ -164,6 +165,8 @@ impl Default for Config {
tlskeylog: None,
verbosity: 0,
logging: LogConfig::default(),
instanceurl: "".into(),
spacename: "".into(),
}
}
}

View File

@ -38,13 +38,15 @@ pub fn read(file: impl AsRef<Path>) -> Result<Config, ConfigError> {
if !path.is_file() {
return Err(ConfigError::NotAFile(path.to_string_lossy().to_string()));
}
let mut config = dhall::read_config_file(file)?;
for (envvar, value) in std::env::vars() {
match envvar.as_str() {
// Do things like this?
// "BFFH_LOG" => config.logging.filter = Some(value),
_ => {}
}
}
let config = dhall::read_config_file(file)?;
// TODO: configuration by environment variables?
// but rather in in a separate function
// for (envvar, value) in std::env::vars() {
// match envvar.as_str() {
// // Do things like this?
// // "BFFH_LOG" => config.logging.filter = Some(value),
// _ => {}
// }
// }
Ok(config)
}

View File

@ -1,10 +1,13 @@
use thiserror::Error;
// for converting a database error into a failed promise
use capnp;
mod raw;
use miette::{Diagnostic, LabeledSpan, Severity, SourceCode};
use miette::{Diagnostic, Severity};
pub use raw::RawDB;
use std::fmt::{Debug, Display, Formatter};
use std::fmt::{Debug, Display};
mod typed;
pub use typed::{Adapter, AlignedAdapter, ArchivedValue, DB};
@ -13,9 +16,9 @@ pub type ErrorO = lmdb::Error;
pub type Result<T> = std::result::Result<T, Error>;
#[repr(transparent)]
#[derive(Debug, Error)]
#[derive(Clone, Debug, PartialEq, Eq, Error)]
#[error(transparent)]
#[repr(transparent)]
pub struct Error(#[from] lmdb::Error);
impl Diagnostic for Error {
@ -79,3 +82,9 @@ impl Diagnostic for Error {
None
}
}
impl From<Error> for capnp::Error {
fn from(dberr: Error) -> capnp::Error {
capnp::Error::failed(format!("database error: {}", dberr.to_string()))
}
}

View File

@ -1,4 +1,3 @@
use super::Result;
use lmdb::{DatabaseFlags, Environment, RwTransaction, Transaction, WriteFlags};
#[derive(Debug, Clone)]

View File

@ -1,4 +1,4 @@
use miette::{Diagnostic, LabeledSpan, Severity, SourceCode};
use miette::{Diagnostic, Severity};
use std::error;
use std::fmt::{Display, Formatter};
use std::io;

View File

@ -5,14 +5,11 @@ use super::Initiator;
use crate::initiators::InitiatorCallbacks;
use crate::resources::modules::fabaccess::Status;
use crate::session::SessionHandle;
use crate::users::UserRef;
use async_io::Timer;
use futures_util::future::BoxFuture;
use futures_util::ready;
use lmdb::Stat;
use std::collections::HashMap;
use std::future::Future;
use std::mem;
use std::pin::Pin;
use std::task::{Context, Poll};
use std::time::{Duration, Instant};
@ -64,10 +61,7 @@ impl Future for Dummy {
match &mut self.state {
DummyState::Empty => {
tracing::trace!("Dummy initiator is empty, initializing…");
mem::replace(
&mut self.state,
DummyState::Sleeping(Self::timer(), Some(Status::Free)),
);
self.state = DummyState::Sleeping(Self::timer(), Some(Status::Free));
}
DummyState::Sleeping(timer, next) => {
tracing::trace!("Sleep timer exists, polling it.");
@ -78,7 +72,7 @@ impl Future for Dummy {
let status = next.take().unwrap();
let f = self.flip(status);
mem::replace(&mut self.state, DummyState::Updating(f));
self.state = DummyState::Updating(f);
}
DummyState::Updating(f) => {
tracing::trace!("Update future exists, polling it .");
@ -87,10 +81,7 @@ impl Future for Dummy {
tracing::trace!("Update future completed, sleeping!");
mem::replace(
&mut self.state,
DummyState::Sleeping(Self::timer(), Some(next)),
);
self.state = DummyState::Sleeping(Self::timer(), Some(next));
}
}
}

View File

@ -3,22 +3,15 @@ use crate::initiators::process::Process;
use crate::resources::modules::fabaccess::Status;
use crate::session::SessionHandle;
use crate::{
AuthenticationHandle, Config, MachineState, Resource, ResourcesHandle, SessionManager,
AuthenticationHandle, Config, Resource, ResourcesHandle, SessionManager,
};
use async_compat::CompatExt;
use executor::prelude::Executor;
use futures_util::ready;
use miette::IntoDiagnostic;
use rumqttc::ConnectReturnCode::Success;
use rumqttc::{AsyncClient, ConnectionError, Event, Incoming, MqttOptions};
use std::collections::HashMap;
use std::fmt::Display;
use std::future::Future;
use std::pin::Pin;
use std::task::{Context, Poll};
use std::time::Duration;
use tracing::Span;
use url::Url;
mod dummy;
mod process;
@ -56,7 +49,7 @@ impl InitiatorCallbacks {
}
pub fn open_session(&self, uid: &str) -> Option<SessionHandle> {
self.sessions.open(&self.span, uid)
self.sessions.try_open(&self.span, uid)
}
}
@ -107,7 +100,7 @@ pub fn load(
config: &Config,
resources: ResourcesHandle,
sessions: SessionManager,
authentication: AuthenticationHandle,
_authentication: AuthenticationHandle,
) -> miette::Result<()> {
let span = tracing::info_span!("loading initiators");
let _guard = span.enter();

View File

@ -1,9 +1,9 @@
use super::Initiator;
use super::InitiatorCallbacks;
use crate::resources::state::State;
use crate::resources::modules::fabaccess::Status;
use crate::utils::linebuffer::LineBuffer;
use async_process::{Child, ChildStderr, ChildStdout, Command, Stdio};
use futures_lite::{ready, AsyncRead};
use futures_lite::AsyncRead;
use miette::{miette, IntoDiagnostic};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
@ -11,7 +11,6 @@ use std::future::Future;
use std::io;
use std::pin::Pin;
use std::task::{Context, Poll};
use crate::resources::modules::fabaccess::Status;
#[derive(Debug, Serialize, Deserialize)]
pub enum InputMessage {
@ -63,7 +62,12 @@ struct ProcessState {
impl ProcessState {
pub fn new(stdout: ChildStdout, stderr: ChildStderr, child: Child) -> Self {
Self { stdout, stderr, stderr_closed: false, child }
Self {
stdout,
stderr,
stderr_closed: false,
child,
}
}
fn try_process(&mut self, buffer: &[u8], callbacks: &mut InitiatorCallbacks) -> usize {
@ -100,7 +104,9 @@ impl ProcessState {
let InputMessage::SetState(status) = state;
callbacks.set_status(status);
}
Err(error) => tracing::warn!(%error, "process initiator did not send a valid line"),
Err(error) => {
tracing::warn!(%error, "process initiator did not send a valid line")
}
}
}
}
@ -110,7 +116,7 @@ impl ProcessState {
impl Future for Process {
type Output = ();
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
if let Process {
state: Some(state),
buffer,

View File

@ -3,11 +3,14 @@
//#![warn(missing_docs)]
//#![warn(missing_crate_level_docs)]
//! Diflouroborane
//! Difluoroborane
//!
//! This is the capnp component of the FabAccess project.
//! The entry point of bffhd can be found in [bin/bffhd/main.rs](../bin/bffhd/main.rs)
use miette::{Diagnostic, IntoDiagnostic};
use thiserror::Error;
pub mod config;
/// Internal Databases build on top of LMDB, a mmap()'ed B-tree DB optimized for reads
@ -44,7 +47,6 @@ mod tls;
use std::sync::Arc;
use futures_util::{FutureExt, StreamExt};
use miette::{Context, IntoDiagnostic, Report};
use once_cell::sync::OnceCell;
use crate::audit::AuditLog;
@ -65,7 +67,9 @@ use lightproc::recoverable_handle::RecoverableHandle;
use signal_hook::consts::signal::*;
use tracing::Span;
pub struct Diflouroborane {
use std::collections::HashMap;
pub struct Difluoroborane {
config: Config,
executor: Executor<'static>,
pub statedb: StateDB,
@ -77,15 +81,72 @@ pub struct Diflouroborane {
pub static RESOURCES: OnceCell<ResourcesHandle> = OnceCell::new();
pub static CONFIG: OnceCell<Config> = OnceCell::new();
struct SignalHandlerErr;
impl error::Description for SignalHandlerErr {
const CODE: &'static str = "signals::new";
}
impl Diflouroborane {
#[derive(Debug, Error, Diagnostic)]
// TODO 0.5: #[non_exhaustive]
pub enum BFFHError {
#[error("DB operation failed")]
DBError(
#[from]
#[source]
db::Error,
),
#[error("failed to initialize global user store")]
UsersError(
#[from]
#[source]
users::Error,
),
#[error("failed to initialize state database")]
StateDBError(
#[from]
#[source]
resources::state::db::StateDBError,
),
#[error("audit log failed")]
AuditLogError(
#[from]
#[source]
audit::Error,
),
#[error("Failed to initialize signal handler")]
SignalsError(#[source] std::io::Error),
#[error("error in actor subsystem")]
ActorError(
#[from]
#[source]
actors::ActorError,
),
#[error("failed to initialize TLS config")]
TlsSetup(
#[from]
#[source]
tls::Error,
),
#[error("API handler failed")]
ApiError(
#[from]
#[source]
capnp::Error,
),
}
#[derive(serde::Serialize, serde::Deserialize)]
struct DatabaseDump {
users: HashMap<String, users::db::UserData>,
state: HashMap<String, resources::state::State>,
}
impl Difluoroborane {
pub fn setup() {}
pub fn new(config: Config) -> miette::Result<Self> {
pub fn new(config: Config) -> Result<Self, BFFHError> {
let mut server = logging::init(&config.logging);
let span = tracing::info_span!(
target: "bffh",
@ -121,9 +182,7 @@ impl Diflouroborane {
let users = Users::new(env.clone())?;
let roles = Roles::new(config.roles.clone());
let _audit_log = AuditLog::new(&config)
.into_diagnostic()
.wrap_err("Failed to initialize audit log")?;
let _audit_log = AuditLog::new(&config)?;
let resources = ResourcesHandle::new(config.machines.iter().map(|(id, desc)| {
Resource::new(Arc::new(resources::Inner::new(
@ -132,7 +191,8 @@ impl Diflouroborane {
desc.clone(),
)))
}));
RESOURCES.set(resources.clone());
RESOURCES.set(resources.clone()).unwrap();
CONFIG.set(config.clone()).unwrap();
Ok(Self {
config,
@ -145,10 +205,27 @@ impl Diflouroborane {
})
}
pub fn run(&mut self) -> miette::Result<()> {
pub fn dump_db(&mut self, file: &str) -> Result<(), miette::Error> {
let users = self.users.dump_map()?;
let state = self.statedb.dump_map()?;
let dump = DatabaseDump{users, state};
let data = toml::ser::to_vec(&dump).map_err(|e| miette::Error::msg(format!("Serializing database dump failed: {}", e)))?;
std::fs::write(file, &data).map_err(|e| miette::Error::msg(format!("writing database dump failed: {}", e)))?;
Ok(())
}
pub fn load_db(&mut self, file: &str) -> Result<(), miette::Error> {
let data = std::fs::read(file).into_diagnostic()?;
let dump: DatabaseDump = toml::de::from_slice(&data).into_diagnostic()?;
self.users.load_map(&dump.users)?;
self.statedb.load_map(&dump.state)?;
Ok(())
}
pub fn run(&mut self) -> Result<(), BFFHError> {
let _guard = self.span.enter();
let mut signals = signal_hook_async_std::Signals::new(&[SIGINT, SIGQUIT, SIGTERM])
.map_err(|ioerr| error::wrap::<SignalHandlerErr>(ioerr.into()))?;
.map_err(BFFHError::SignalsError)?;
let sessionmanager = SessionManager::new(self.users.clone(), self.roles.clone());
let authentication = AuthenticationHandle::new(self.users.clone());
@ -159,11 +236,12 @@ impl Diflouroborane {
self.resources.clone(),
sessionmanager.clone(),
authentication.clone(),
);
).expect("initializing initiators failed");
// TODO 0.5: error handling. Add variant to BFFHError
actors::load(self.executor.clone(), &self.config, self.resources.clone())?;
let tlsconfig = TlsConfig::new(self.config.tlskeylog.as_ref(), !self.config.is_quiet())
.into_diagnostic()?;
let tlsconfig = TlsConfig::new(self.config.tlskeylog.as_ref(), !self.config.is_quiet())?;
let acceptor = tlsconfig.make_tls_acceptor(&self.config.tlsconfig)?;
let apiserver = self.executor.run(APIServer::bind(
@ -179,13 +257,13 @@ impl Diflouroborane {
self.executor.spawn(apiserver.handle_until(rx));
let f = async {
let mut sig = None;
let mut sig;
while {
sig = signals.next().await;
sig.is_none()
} {}
tracing::info!(signal = %sig.unwrap(), "Received signal");
tx.send(());
_ = tx.send(()); // ignore result, as an Err means that the executor we want to stop has already stopped
};
self.executor.run(f);

View File

@ -2,16 +2,15 @@ use serde::{Deserialize, Serialize};
use std::path::Path;
use tracing_subscriber::fmt::format::Format;
use tracing_subscriber::prelude::*;
use tracing_subscriber::reload::Handle;
use tracing_subscriber::{reload, EnvFilter};
use tracing_subscriber::EnvFilter;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LogConfig {
#[serde(default, skip_serializing_if = "Option::is_none")]
/// Log filter string in the tracing format `target[span{field=value}]=level`.
/// lvalue is optional and multiple filters can be combined with comma.
/// e.g. `warn,diflouroborane::actors=debug` will only print `WARN` and `ERROR` unless the
/// message is logged in a span below `diflouroborane::actors` (i.e. by an actor task) in
/// e.g. `warn,difluoroborane::actors=debug` will only print `WARN` and `ERROR` unless the
/// message is logged in a span below `difluoroborane::actors` (i.e. by an actor task) in
/// which case `DEBUG` and `INFO` will also be printed.
pub filter: Option<String>,

View File

@ -85,10 +85,13 @@ impl Inner {
self.db.put(&self.id.as_bytes(), &state).unwrap();
tracing::trace!("Updated DB, sending update signal");
AUDIT
let res = AUDIT
.get()
.unwrap()
.log(self.id.as_str(), &format!("{}", state));
if let Err(e) = res {
tracing::error!("Writing to the audit log failed for {} {}: {e}", self.id.as_str(), state);
}
self.signal.set(state);
tracing::trace!("Sent update signal");
@ -161,7 +164,7 @@ impl Resource {
fn set_state(&self, state: MachineState) {
let mut serializer = AllocSerializer::<1024>::default();
serializer.serialize_value(&state);
serializer.serialize_value(&state).expect("serializing a MachineState shoud be infallible");
let archived = ArchivedValue::new(serializer.into_serializer().into_inner());
self.inner.set_state(archived)
}

View File

@ -3,7 +3,6 @@ use crate::utils::oid::ObjectIdentifier;
use once_cell::sync::Lazy;
use rkyv::{Archive, Archived, Deserialize, Infallible};
use std::fmt;
use std::fmt::Write;
use std::str::FromStr;
//use crate::oidvalue;

View File

@ -2,6 +2,7 @@ use crate::resources::Resource;
use std::collections::HashMap;
use std::sync::Arc;
#[derive(Debug)]
struct Inner {
id: HashMap<String, Resource>,
}
@ -19,7 +20,7 @@ impl Inner {
}
}
#[derive(Clone)]
#[derive(Clone, Debug)]
pub struct ResourcesHandle {
inner: Arc<Inner>,
}

View File

@ -1,12 +1,12 @@
use rkyv::ser::Serializer;
use rkyv::ser::serializers::AllocSerializer;
use thiserror::Error;
use crate::db;
use crate::db::{AlignedAdapter, ArchivedValue, RawDB, DB};
use lmdb::{DatabaseFlags, Environment, EnvironmentFlags, Transaction, WriteFlags};
use miette::{Diagnostic, LabeledSpan, Severity, SourceCode};
use std::any::TypeId;
use std::error::Error;
use std::fmt::{Debug, Display, Formatter};
use miette::Diagnostic;
use std::fmt::Debug;
use std::{path::Path, sync::Arc};
use crate::resources::state::State;
@ -17,7 +17,7 @@ pub struct StateDB {
db: DB<AlignedAdapter<State>>,
}
#[derive(Debug, Error, Diagnostic)]
#[derive(Clone, Debug, PartialEq, Eq, Error, Diagnostic)]
pub enum StateDBError {
#[error("opening the state db environment failed")]
#[diagnostic(
@ -54,8 +54,8 @@ impl StateDB {
}
pub fn open_with_env(env: Arc<Environment>) -> Result<Self, StateDBError> {
let db = unsafe { RawDB::open(&env, Some("state")) };
let db = db.map_err(|e| StateDBError::Open(e.into()))?;
let db = RawDB::open(&env, Some("state"))
.map_err(|e| StateDBError::Open(e.into()))?;
Ok(Self::new(env, db))
}
@ -66,8 +66,8 @@ impl StateDB {
pub fn create_with_env(env: Arc<Environment>) -> Result<Self, StateDBError> {
let flags = DatabaseFlags::empty();
let db = unsafe { RawDB::create(&env, Some("state"), flags) };
let db = db.map_err(|e| StateDBError::Create(e.into()))?;
let db = RawDB::create(&env, Some("state"), flags)
.map_err(|e| StateDBError::Create(e.into()))?;
Ok(Self::new(env, db))
}
@ -99,6 +99,30 @@ impl StateDB {
self.db.put(&mut txn, key, val, flags)?;
Ok(txn.commit()?)
}
pub fn load_map(&self, map: &std::collections::HashMap<String, State>) -> miette::Result<()> {
use miette::IntoDiagnostic;
let mut txn = self.env.begin_rw_txn().into_diagnostic()?;
let flags = WriteFlags::empty();
for (key, val) in map {
let mut serializer = AllocSerializer::<1024>::default();
serializer.serialize_value(val).into_diagnostic()?;
let serialized = ArchivedValue::new(serializer.into_serializer().into_inner());
self.db.put(&mut txn, &key.as_bytes(), &serialized, flags)?;
}
txn.commit().into_diagnostic()?;
Ok(())
}
pub fn dump_map(&self) -> miette::Result<std::collections::HashMap<String, State>> {
let mut map = std::collections::HashMap::new();
for (key, val) in self.get_all(&self.begin_ro_txn()?)? {
let key_str = core::str::from_utf8(&key).map_err(|_e| miette::Error::msg("state key not UTF8"))?.to_string();
let val_state: State = rkyv::Deserialize::deserialize(val.as_ref(), &mut rkyv::Infallible).unwrap();
map.insert(key_str, val_state);
}
Ok(map)
}
}
#[cfg(test)]

View File

@ -1,5 +1,5 @@
use std::fmt::{Debug, Display, Formatter};
use std::{fmt, hash::Hasher};
use std::fmt;
use std::ops::Deref;

View File

@ -14,8 +14,6 @@ use inventory;
use rkyv::ser::{ScratchSpace, Serializer};
use serde::ser::SerializeMap;
use std::collections::HashMap;
use std::ops::Deref;
@ -275,10 +273,6 @@ pub struct ImplDebugInfo {
/// [statevalue_register](macro@crate::statevalue_register) macro with your OID as first and type
/// as second parameter like so:
///
/// ```no_run
/// struct MyStruct;
/// statevalue_register!(ObjectIdentifier::from_str("1.3.6.1.4.1.48398.612.1.14").unwrap(), MyStruct)
/// ```
pub struct ImplEntry<'a> {
id: ImplId<'a>,
data: ImplData<'a>,

View File

@ -1,6 +1,7 @@
use crate::authorization::permissions::Permission;
use crate::authorization::roles::Roles;
use crate::resources::Resource;
use crate::users::db::User;
use crate::users::{db, UserRef};
use crate::Users;
use tracing::Span;
@ -16,25 +17,27 @@ impl SessionManager {
Self { users, roles }
}
pub fn try_open(&self, parent: &Span, uid: impl AsRef<str>) -> Option<SessionHandle> {
self.users
.get_user(uid.as_ref())
.map(|user| self.open(parent, user))
}
// TODO: make infallible
pub fn open(&self, parent: &Span, uid: impl AsRef<str>) -> Option<SessionHandle> {
let uid = uid.as_ref();
if let Some(user) = self.users.get_user(uid) {
pub fn open(&self, parent: &Span, user: User) -> SessionHandle {
let uid = user.id.as_str();
let span = tracing::info_span!(
target: "bffh::api",
parent: parent,
"session",
uid = uid,
uid,
);
tracing::trace!(parent: &span, uid, ?user, "opening session");
Some(SessionHandle {
SessionHandle {
span,
users: self.users.clone(),
roles: self.roles.clone(),
user: UserRef::new(user.id),
})
} else {
None
}
}
}

View File

@ -1,17 +1,19 @@
use std::fs::File;
use std::io;
use std::io::BufReader;
use std::path::Path;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use crate::capnp::TlsListen;
use futures_rustls::TlsAcceptor;
use miette::IntoDiagnostic;
use miette::Diagnostic;
use rustls::version::{TLS12, TLS13};
use rustls::{Certificate, PrivateKey, ServerConfig, SupportedCipherSuite};
use thiserror::Error;
use tracing::Level;
use crate::keylog::KeyLogFile;
use crate::tls::Error::KeyLogOpen;
fn lookup_cipher_suite(name: &str) -> Option<SupportedCipherSuite> {
match name {
@ -47,8 +49,32 @@ pub struct TlsConfig {
keylog: Option<Arc<KeyLogFile>>,
}
#[derive(Debug, Error, Diagnostic)]
pub enum Error {
#[error("failed to open certificate file at path {0}")]
OpenCertFile(PathBuf, #[source] io::Error),
#[error("failed to open private key file at path {0}")]
OpenKeyFile(PathBuf, #[source] io::Error),
#[error("failed to read system certs")]
SystemCertsFile(#[source] io::Error),
#[error("failed to read from key file")]
ReadKeyFile(#[source] io::Error),
#[error("private key file must contain a single PEM-encoded private key")]
KeyFileFormat,
#[error("invalid TLS version {0}")]
TlsVersion(String),
#[error("Initializing TLS context failed")]
Builder(
#[from]
#[source]
rustls::Error,
),
#[error("failed to initialize key log")]
KeyLogOpen(#[source] io::Error),
}
impl TlsConfig {
pub fn new(keylogfile: Option<impl AsRef<Path>>, warn: bool) -> io::Result<Self> {
pub fn new(keylogfile: Option<impl AsRef<Path>>, warn: bool) -> Result<Self, Error> {
let span = tracing::span!(Level::INFO, "tls");
let _guard = span.enter();
@ -57,7 +83,11 @@ impl TlsConfig {
}
if let Some(path) = keylogfile {
let keylog = Some(KeyLogFile::new(path).map(|ok| Arc::new(ok))?);
let keylog = Some(
KeyLogFile::new(path)
.map(|ok| Arc::new(ok))
.map_err(KeyLogOpen)?,
);
Ok(Self { keylog })
} else {
Ok(Self { keylog: None })
@ -75,27 +105,31 @@ impl TlsConfig {
}
}
pub fn make_tls_acceptor(&self, config: &TlsListen) -> miette::Result<TlsAcceptor> {
pub fn make_tls_acceptor(&self, config: &TlsListen) -> Result<TlsAcceptor, Error> {
let span = tracing::debug_span!("tls");
let _guard = span.enter();
tracing::debug!(path = %config.certfile.as_path().display(), "reading certificates");
let mut certfp = BufReader::new(File::open(config.certfile.as_path()).into_diagnostic()?);
let path = config.certfile.as_path();
tracing::debug!(path = %path.display(), "reading certificates");
let mut certfp =
BufReader::new(File::open(path).map_err(|e| Error::OpenCertFile(path.into(), e))?);
let certs = rustls_pemfile::certs(&mut certfp)
.into_diagnostic()?
.map_err(Error::SystemCertsFile)?
.into_iter()
.map(Certificate)
.collect();
tracing::debug!(path = %config.keyfile.as_path().display(), "reading private key");
let mut keyfp = BufReader::new(File::open(config.keyfile.as_path()).into_diagnostic()?);
let key = match rustls_pemfile::read_one(&mut keyfp).into_diagnostic()? {
let path = config.keyfile.as_path();
tracing::debug!(path = %path.display(), "reading private key");
let mut keyfp =
BufReader::new(File::open(path).map_err(|err| Error::OpenKeyFile(path.into(), err))?);
let key = match rustls_pemfile::read_one(&mut keyfp).map_err(Error::ReadKeyFile)? {
Some(rustls_pemfile::Item::PKCS8Key(key) | rustls_pemfile::Item::RSAKey(key)) => {
PrivateKey(key)
}
_ => {
tracing::error!("private key file invalid");
miette::bail!("private key file must contain a PEM-encoded private key")
return Err(Error::KeyFileFormat);
}
};
@ -104,20 +138,19 @@ impl TlsConfig {
.with_safe_default_kx_groups();
let tls_builder = if let Some(ref min) = config.tls_min_version {
match min.as_str() {
let v = min.to_lowercase();
match v.as_str() {
"tls12" => tls_builder.with_protocol_versions(&[&TLS12]),
"tls13" => tls_builder.with_protocol_versions(&[&TLS13]),
x => miette::bail!("TLS version {} is invalid", x),
_ => return Err(Error::TlsVersion(v)),
}
} else {
tls_builder.with_safe_default_protocol_versions()
}
.into_diagnostic()?;
}?;
let mut tls_config = tls_builder
.with_no_client_auth()
.with_single_cert(certs, key)
.into_diagnostic()?;
.with_single_cert(certs, key)?;
if let Some(keylog) = &self.keylog {
tls_config.key_log = keylog.clone();

View File

@ -2,7 +2,6 @@ use lmdb::{DatabaseFlags, Environment, RwTransaction, Transaction, WriteFlags};
use rkyv::Infallible;
use std::collections::HashMap;
use miette::{Context, IntoDiagnostic};
use std::sync::Arc;
use crate::db;
@ -11,6 +10,8 @@ use rkyv::ser::serializers::AllocSerializer;
use rkyv::ser::Serializer;
use rkyv::Deserialize;
pub use crate::db::Error;
#[derive(
Clone,
PartialEq,
@ -34,11 +35,9 @@ fn hash_pw(pw: &[u8]) -> argon2::Result<String> {
}
impl User {
pub fn check_password(&self, pwd: &[u8]) -> miette::Result<bool> {
pub fn check_password(&self, pwd: &[u8]) -> Result<bool, argon2::Error> {
if let Some(ref encoded) = self.userdata.passwd {
argon2::verify_encoded(encoded, pwd)
.into_diagnostic()
.wrap_err("Stored password is an invalid string")
} else {
Ok(false)
}
@ -183,8 +182,8 @@ impl UserDB {
}
pub fn clear_txn(&self, txn: &mut RwTransaction) -> Result<(), db::Error> {
self.db.clear(txn);
Ok(())
// TODO: why was the result ignored here?
self.db.clear(txn)
}
pub fn get_all(&self) -> Result<HashMap<String, UserData>, db::Error> {

View File

@ -7,10 +7,10 @@ use std::collections::HashMap;
use std::fmt::{Display, Formatter};
use std::io::Write;
use clap::ArgMatches;
use miette::{Context, Diagnostic, IntoDiagnostic, SourceOffset, SourceSpan};
use miette::{Diagnostic, IntoDiagnostic, SourceSpan};
use std::path::Path;
use std::sync::Arc;
use thiserror::Error;
pub mod db;
@ -69,17 +69,20 @@ pub struct Users {
userdb: &'static UserDB,
}
#[derive(Clone, Debug, PartialEq, Eq, Error, Diagnostic)]
#[error(transparent)]
#[repr(transparent)]
pub struct Error(#[from] pub db::Error);
impl Users {
pub fn new(env: Arc<Environment>) -> miette::Result<Self> {
pub fn new(env: Arc<Environment>) -> Result<Self, Error> {
let span = tracing::debug_span!("users", ?env, "Creating Users handle");
let _guard = span.enter();
let userdb = USERDB
.get_or_try_init(|| {
let userdb = USERDB.get_or_try_init(|| {
tracing::debug!("Global resource not yet initialized, initializing…");
unsafe { UserDB::create(env) }
})
.wrap_err("Failed to open userdb")?;
})?;
Ok(Self { userdb })
}
@ -170,6 +173,29 @@ impl Users {
Ok(())
}
pub fn load_map(&mut self, dump: &HashMap<String,UserData>) -> miette::Result<()> {
let mut txn = unsafe { self.userdb.get_rw_txn() }?;
self.userdb.clear_txn(&mut txn)?;
for (uid, data) in dump {
let user = db::User {
id: uid.clone(),
userdata: data.clone(),
};
tracing::trace!(%uid, ?user, "Storing user object");
if let Err(e) = self.userdb.put_txn(&mut txn, uid.as_str(), &user) {
tracing::warn!(error=?e, "failed to add user")
}
}
txn.commit().map_err(crate::db::Error::from)?;
Ok(())
}
pub fn dump_map(&self) -> miette::Result<HashMap<String, UserData>> {
return Ok(self.userdb.get_all()?)
}
pub fn dump_file(&self, path_str: &str, force: bool) -> miette::Result<usize> {
let path = Path::new(path_str);
let exists = path.exists();
@ -200,7 +226,7 @@ impl Users {
}
let mut file = fs::File::create(path).into_diagnostic()?;
let users = self.userdb.get_all()?;
let users = self.dump_map()?;
let encoded = toml::ser::to_vec(&users).into_diagnostic()?;
file.write_all(&encoded[..]).into_diagnostic()?;

View File

@ -1,5 +1,5 @@
use clap::{Arg, Command, ValueHint};
use diflouroborane::{config, Diflouroborane};
use difluoroborane::{config, Difluoroborane};
use std::str::FromStr;
use std::{env, io, io::Write, path::PathBuf};
@ -15,12 +15,12 @@ fn main() -> miette::Result<()> {
FabAccess {apiver}\n\
\t[{build_kind} build built on {build_time}]\n\
\t {rustc_version}\n\t {cargo_version}",
version=diflouroborane::env::PKG_VERSION,
version=difluoroborane::env::PKG_VERSION,
apiver="0.3",
rustc_version=diflouroborane::env::RUST_VERSION,
cargo_version=diflouroborane::env::CARGO_VERSION,
build_time=diflouroborane::env::BUILD_TIME_3339,
build_kind=diflouroborane::env::BUILD_RUST_CHANNEL))
rustc_version=difluoroborane::env::RUST_VERSION,
cargo_version=difluoroborane::env::CARGO_VERSION,
build_time=difluoroborane::env::BUILD_TIME_3339,
build_kind=difluoroborane::env::BUILD_RUST_CHANNEL))
.about(clap::crate_description!())
.arg(Arg::new("config")
.help("Path to the config file to use")
@ -57,10 +57,18 @@ fn main() -> miette::Result<()> {
.help("Check config for validity")
.long("check"))
.arg(
Arg::new("dump")
Arg::new("dump-db")
.help("Dump all internal databases")
.long("dump")
.conflicts_with("load"))
.long("dump-db")
.alias("dump")
.conflicts_with("dump-users")
.conflicts_with("load-users")
.conflicts_with("load-db")
.takes_value(true)
.value_name("FILE")
.value_hint(ValueHint::AnyPath)
.default_missing_value("bffh-db.toml")
)
.arg(
Arg::new("dump-users")
.help("Dump the users db to the given file as TOML")
@ -69,18 +77,33 @@ fn main() -> miette::Result<()> {
.value_name("FILE")
.value_hint(ValueHint::AnyPath)
.default_missing_value("users.toml")
.conflicts_with("load"))
.conflicts_with("load-users")
.conflicts_with("load-db")
.conflicts_with("dump-db")
)
.arg(
Arg::new("force")
.help("force ops that may clobber")
.long("force")
)
.arg(
Arg::new("load")
.help("Load values into the internal databases")
.long("load")
Arg::new("load-users")
.help("Load users into the internal databases")
.long("load-users")
.alias("load")
.takes_value(true)
.conflicts_with("dump"))
.conflicts_with("dump-db")
.conflicts_with("load-db")
.conflicts_with("dump-users")
)
.arg(
Arg::new("load-db")
.help("Load values into the internal databases")
.long("load-db")
.takes_value(true)
.conflicts_with("dump-db")
.conflicts_with("load-users")
.conflicts_with("dump-users"))
.arg(Arg::new("keylog")
.help("log TLS keys into PATH. If no path is specified the value of the envvar SSLKEYLOGFILE is used.")
.long("tls-key-log")
@ -98,7 +121,7 @@ fn main() -> miette::Result<()> {
let configpath = matches
.value_of("config")
.unwrap_or("/etc/diflouroborane.dhall");
.unwrap_or("/etc/difluoroborane.dhall");
// Check for the --print-default option first because we don't need to do anything else in that
// case.
@ -137,10 +160,18 @@ fn main() -> miette::Result<()> {
let mut config = config::read(&PathBuf::from_str(configpath).unwrap())?;
if matches.is_present("dump") {
return Err(miette::miette!("DB Dumping is currently not implemented, except for the users db, using `--dump-users`"));
if matches.is_present("dump-db") {
let mut bffh = Difluoroborane::new(config)?;
let fname = matches.value_of("dump-db").unwrap();
bffh.dump_db(fname)?;
return Ok(());
} else if matches.is_present("load-db") {
let mut bffh = Difluoroborane::new(config)?;
let fname = matches.value_of("load-db").unwrap();
bffh.load_db(fname)?;
return Ok(());
} else if matches.is_present("dump-users") {
let bffh = Diflouroborane::new(config)?;
let bffh = Difluoroborane::new(config)?;
let number = bffh.users.dump_file(
matches.value_of("dump-users").unwrap(),
@ -150,12 +181,12 @@ fn main() -> miette::Result<()> {
tracing::info!("successfully dumped {} users", number);
return Ok(());
} else if matches.is_present("load") {
let bffh = Diflouroborane::new(config)?;
} else if matches.is_present("load-users") {
let bffh = Difluoroborane::new(config)?;
bffh.users.load_file(matches.value_of("load").unwrap())?;
bffh.users.load_file(matches.value_of("load-users").unwrap())?;
tracing::info!("loaded users from {}", matches.value_of("load").unwrap());
tracing::info!("loaded users from {}", matches.value_of("load-users").unwrap());
return Ok(());
} else {
@ -179,7 +210,7 @@ fn main() -> miette::Result<()> {
}
config.logging.format = matches.value_of("log format").unwrap_or("full").to_string();
let mut bffh = Diflouroborane::new(config)?;
let mut bffh = Difluoroborane::new(config)?;
bffh.run()?;
}

View File

@ -1,4 +1,4 @@
fn main() {
// Extract build-time information using the `shadow-rs` crate
shadow_rs::new();
shadow_rs::new().unwrap();
}

8
cargo-cross-config Normal file
View File

@ -0,0 +1,8 @@
[target.armv7-unknown-linux-gnueabihf]
linker = "arm-linux-gnueabihf-gcc"
[target.arm-unknown-linux-gnueabihf]
linker = "arm-linux-gnueabi-gcc"
[target.aarch64-unknown-linux-gnu]
linker = "aarch64-linux-gnu-gcc"

View File

@ -229,6 +229,9 @@
-- Linking up machines to initiators. Similar to actors a machine can have several initiators assigned but an
-- initiator can only be assigned to one machine.
-- The below is once again how you have to define *no* initiators.
init_connections = [] : List { machine : Text, initiator : Text }
init_connections = [] : List { machine : Text, initiator : Text },
--init_connections = [{ machine = "Testmachine", initiator = "Initiator" }]
instanceurl = "https://example.com",
spacename = "examplespace"
}

View File

@ -2,7 +2,7 @@ version: "3.8"
services:
bffh-a:
image: registry.gitlab.com/fabinfra/fabaccess/bffh:dev-latest
command: ["sh", "-c", "diflouroborane -c /etc/bffh/bffh.dhall --load=/etc/bffh; diflouroborane -c /etc/bffh/bffh.dhall"]
command: ["sh", "-c", "difluoroborane -c /etc/bffh/bffh.dhall --load=/etc/bffh; difluoroborane -c /etc/bffh/bffh.dhall"]
volumes:
# generate a sample config.toml by running "docker run registry.gitlab.com/fabinfra/fabaccess/bffh:dev-latest --print-default > examples/config.toml" from the project root. You may have to delete the ipv6 listen section.
- "./config_a:/etc/bffh"
@ -12,7 +12,7 @@ services:
image: eclipse-mosquitto
bffh-b:
image: registry.gitlab.com/fabinfra/fabaccess/bffh:dev-latest
command: ["sh", "-c", "diflouroborane -c /etc/bffh/bffh.dhall --load=/etc/bffh; diflouroborane -c /etc/bffh/bffh.dhall"]
command: ["sh", "-c", "difluoroborane -c /etc/bffh/bffh.dhall --load=/etc/bffh; difluoroborane -c /etc/bffh/bffh.dhall"]
volumes:
# generate a sample config.toml by running "docker run registry.gitlab.com/fabinfra/fabaccess/bffh:dev-latest --print-default > examples/config.toml" from the project root. You may have to delete the ipv6 listen section.
- "./config_b:/etc/bffh"

View File

@ -8,4 +8,4 @@ edition = "2021"
[dependencies]
sdk-proc = { path = "sdk_proc" }
futures-util = "0.3"
diflouroborane = { path = "../.." }
difluoroborane = { path = "../.." }

View File

@ -30,7 +30,8 @@ where
}
let schedule = |t| (QUEUE.deref()).send(t).unwrap();
let (proc, handle) = LightProc::recoverable(future, schedule);
let span = tracing::trace_span!("runtime.spawn", kind = "local");
let (proc, handle) = LightProc::recoverable(future, schedule, span, None);
let handle = handle.on_panic(
|err: Box<dyn Any + Send>| match err.downcast::<&'static str>() {

View File

@ -17,7 +17,8 @@ where
let future = async move { fut.await };
let schedule = move |t| sender.send(t).unwrap();
let (proc, handle) = LightProc::build(future, schedule);
let span = tracing::trace_span!("runtime.spawn", kind = "local");
let (proc, handle) = LightProc::build(future, schedule, span, None);
proc.schedule();

View File

@ -9,6 +9,7 @@
//! # Example Usage
//!
//! ```rust
//! use tracing::Span;
//! use lightproc::prelude::*;
//!
//! // ... future that does work
@ -23,6 +24,8 @@
//! let panic_recoverable = LightProc::recoverable(
//! future,
//! schedule_function,
//! Span::current(),
//! None,
//! );
//! ```
@ -60,6 +63,7 @@ impl LightProc {
/// # Example
/// ```rust
/// # use std::any::Any;
/// # use tracing::Span;
/// # use lightproc::prelude::*;
/// #
/// # // ... basic schedule function with no waker logic
@ -72,9 +76,11 @@ impl LightProc {
/// let (proc, handle) = LightProc::recoverable(
/// future,
/// schedule_function,
/// Span::current(),
/// None
/// );
/// let handle = handle.on_panic(|s: &mut EmptyProcState, e: Box<dyn Any + Send>| {
/// let reason = e.downcast::<String>();
/// let handle = handle.on_panic(|e: Box<dyn Any + Send>| {
/// let reason = e.downcast::<String>().unwrap();
/// println!("future panicked!: {}", &reason);
/// });
/// ```
@ -110,13 +116,6 @@ impl LightProc {
/// # // ... basic schedule function with no waker logic
/// # fn schedule_function(proc: LightProc) {;}
/// #
/// # // ... process stack with a lifecycle callback
/// # let proc_stack =
/// # ProcStack::default()
/// # .with_after_panic(|s: &mut EmptyProcState| {
/// # println!("After panic started!");
/// # });
/// #
/// // ... creating a standard process
/// let standard = LightProc::build(
/// future,

View File

@ -395,7 +395,7 @@ where
unsafe fn tick(ptr: *const ()) {
let mut raw = Self::from_ptr(ptr);
// Enter the span associated with the process to track execution time if enabled.
let _guard = (&(*raw.pdata).span).enter();
let guard = (&(*raw.pdata).span).enter();
// Create a context from the raw proc pointer and the vtable inside its pdata.
let waker = ManuallyDrop::new(Waker::from_raw(RawWaker::new(
@ -487,6 +487,8 @@ where
(*raw.pdata).notify();
}
// the tracing guard is inside the proc, so it must be dropped first
drop(guard);
// Drop the proc reference.
Self::decrement(ptr);
break;

View File

@ -49,8 +49,7 @@ impl<R> RecoverableHandle<R> {
///
/// ```rust
/// # use std::any::Any;
/// use lightproc::proc_stack::ProcStack;
/// use lightproc::proc_state::EmptyProcState;
/// # use tracing::Span;
/// # use lightproc::prelude::*;
/// #
/// # // ... future that does work
@ -61,21 +60,16 @@ impl<R> RecoverableHandle<R> {
/// # // ... basic schedule function with no waker logic
/// # fn schedule_function(proc: LightProc) {;}
/// #
/// # // ... process stack with a lifecycle callback
/// # let proc_stack =
/// # ProcStack::default()
/// # .with_after_panic(|s: &mut EmptyProcState| {
/// # println!("After panic started!");
/// # });
/// #
/// // ... creating a recoverable process
/// let (proc, recoverable) = LightProc::recoverable(
/// future,
/// schedule_function,
/// Span::current(),
/// None
/// );
///
/// recoverable
/// .on_return(|_e: Box<dyn Any + Send>| {
/// .on_panic(|_e: Box<dyn Any + Send>| {
/// println!("Inner future panicked");
/// });
/// ```

2
rust-toolchain.toml Normal file
View File

@ -0,0 +1,2 @@
[toolchain]
channel = "1.66"