mirror of
https://gitlab.com/fabinfra/fabaccess/bffh.git
synced 2025-06-11 19:03:21 +02:00
Restructure
This commit is contained in:
191
bffhd/config.rs
Normal file
191
bffhd/config.rs
Normal file
@ -0,0 +1,191 @@
|
||||
use std::default::Default;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::collections::HashMap;
|
||||
|
||||
use serde::{Serialize, Deserialize, Deserializer, Serializer};
|
||||
|
||||
use std::fmt::Formatter;
|
||||
use std::net::{SocketAddr, IpAddr, ToSocketAddrs};
|
||||
use std::str::FromStr;
|
||||
use crate::permissions::{PermRule, RoleIdentifier};
|
||||
use serde::de::Error;
|
||||
|
||||
type Result<T> = std::result::Result<T, serde_dhall::Error>;
|
||||
|
||||
pub fn read(path: &Path) -> Result<Config> {
|
||||
serde_dhall::from_file(path)
|
||||
.parse()
|
||||
.map_err(Into::into)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Config {
|
||||
/// A list of address/port pairs to listen on.
|
||||
// TODO: This should really be a variant type; that is something that can figure out itself if
|
||||
// it contains enough information to open a socket (i.e. it checks if it's a valid path (=>
|
||||
// Unix socket) or IPv4/v6 address)
|
||||
pub listens: Vec<Listen>,
|
||||
|
||||
/// Machine descriptions to load
|
||||
//pub machines: HashMap<MachineIdentifier, MachineDescription>,
|
||||
|
||||
/// Actors to load and their configuration options
|
||||
pub actors: HashMap<String, ModuleConfig>,
|
||||
|
||||
/// Initiators to load and their configuration options
|
||||
pub initiators: HashMap<String, ModuleConfig>,
|
||||
|
||||
pub mqtt_url: String,
|
||||
|
||||
pub actor_connections: Vec<(String, String)>,
|
||||
pub init_connections: Vec<(String, String)>,
|
||||
|
||||
pub db_path: PathBuf,
|
||||
|
||||
pub roles: HashMap<RoleIdentifier, RoleConfig>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct RoleConfig {
|
||||
#[serde(default = "Vec::new")]
|
||||
pub parents: Vec<RoleIdentifier>,
|
||||
#[serde(default = "Vec::new")]
|
||||
pub permissions: Vec<PermRule>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ModuleConfig {
|
||||
pub module: String,
|
||||
pub params: HashMap<String, String>
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Listen {
|
||||
address: String,
|
||||
port: Option<u16>,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Listen {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}:{}", &self.address, self.port.unwrap_or(DEFAULT_PORT))
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSocketAddrs for Listen {
|
||||
type Iter = <(String, u16) as ToSocketAddrs>::Iter;
|
||||
|
||||
fn to_socket_addrs(&self) -> std::io::Result<Self::Iter> {
|
||||
if let Some(port) = self.port {
|
||||
(self.address.as_str(), port).to_socket_addrs()
|
||||
} else {
|
||||
(self.address.as_str(), DEFAULT_PORT).to_socket_addrs()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> serde::Deserialize<'de> for Listen {
|
||||
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
|
||||
where D: Deserializer<'de>
|
||||
{
|
||||
deserializer.deserialize_str(ListenVisitor)
|
||||
}
|
||||
}
|
||||
impl serde::Serialize for Listen {
|
||||
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
|
||||
where S: Serializer
|
||||
{
|
||||
if let Some(port) = self.port {
|
||||
serializer.serialize_str(&format!("{}:{}", self.address, port))
|
||||
} else {
|
||||
serializer.serialize_str(&self.address)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct ListenVisitor;
|
||||
impl<'de> serde::de::Visitor<'de> for ListenVisitor {
|
||||
type Value = Listen;
|
||||
|
||||
fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result {
|
||||
write!(formatter, "A string encoding a valid IP or Hostname (e.g. 127.0.0.1 or [::1]) with \
|
||||
or without a defined port")
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, v: &str) -> std::result::Result<Self::Value, E>
|
||||
where E: Error
|
||||
{
|
||||
let sockaddr = SocketAddr::from_str(v);
|
||||
if let Ok(address) = sockaddr {
|
||||
return Ok(Listen {
|
||||
address: address.ip().to_string(),
|
||||
port: Some(address.port()),
|
||||
})
|
||||
}
|
||||
|
||||
let ipaddr = IpAddr::from_str(v);
|
||||
if let Ok(address) = ipaddr {
|
||||
return Ok(Listen {
|
||||
address: address.to_string(),
|
||||
port: None,
|
||||
})
|
||||
}
|
||||
|
||||
let mut split = v.split(':');
|
||||
let address = split.next()
|
||||
.expect("str::split should always return at least one element")
|
||||
.to_string();
|
||||
let port = if let Some(port) = split.next() {
|
||||
let port: u16 = port.parse()
|
||||
.map_err(|_| {
|
||||
E::custom(&format!("Expected valid ip address or hostname with or without \
|
||||
port. Failed to parse \"{}\".", v))
|
||||
})?;
|
||||
|
||||
Some(port)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(Listen { address, port })
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Config {
|
||||
fn default() -> Self {
|
||||
let mut actors: HashMap::<String, ModuleConfig> = HashMap::new();
|
||||
let mut initiators: HashMap::<String, ModuleConfig> = HashMap::new();
|
||||
|
||||
actors.insert("Actor".to_string(), ModuleConfig {
|
||||
module: "Shelly".to_string(),
|
||||
params: HashMap::new(),
|
||||
});
|
||||
initiators.insert("Initiator".to_string(), ModuleConfig {
|
||||
module: "TCP-Listen".to_string(),
|
||||
params: HashMap::new(),
|
||||
});
|
||||
|
||||
Config {
|
||||
listens: vec![
|
||||
Listen {
|
||||
address: "127.0.0.1".to_string(),
|
||||
port: None,
|
||||
}
|
||||
],
|
||||
actors,
|
||||
initiators,
|
||||
mqtt_url: "tcp://localhost:1883".to_string(),
|
||||
actor_connections: vec![
|
||||
("Testmachine".to_string(), "Actor".to_string()),
|
||||
],
|
||||
init_connections: vec![
|
||||
("Initiator".to_string(), "Testmachine".to_string()),
|
||||
],
|
||||
|
||||
db_path: PathBuf::from("/run/bffh/database"),
|
||||
roles: HashMap::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// The default port in the non-assignable i.e. free-use area
|
||||
pub const DEFAULT_PORT: u16 = 59661;
|
234
bffhd/db.rs
Normal file
234
bffhd/db.rs
Normal file
@ -0,0 +1,234 @@
|
||||
use std::{
|
||||
marker::PhantomData,
|
||||
};
|
||||
|
||||
pub use lmdb::{
|
||||
Environment,
|
||||
|
||||
DatabaseFlags,
|
||||
WriteFlags,
|
||||
EnvironmentFlags,
|
||||
|
||||
Transaction,
|
||||
RoTransaction,
|
||||
RwTransaction,
|
||||
};
|
||||
|
||||
use rkyv::{Fallible, Serialize, ser::serializers::AllocSerializer, AlignedVec, Archived};
|
||||
|
||||
mod raw;
|
||||
use raw::RawDB;
|
||||
|
||||
mod typed;
|
||||
// re-exports
|
||||
pub use typed::{
|
||||
DB,
|
||||
TypedCursor,
|
||||
|
||||
Adapter,
|
||||
OutputBuffer,
|
||||
OutputWriter,
|
||||
};
|
||||
|
||||
mod hash;
|
||||
pub use hash::{
|
||||
HashDB,
|
||||
Entry,
|
||||
};
|
||||
|
||||
mod fix;
|
||||
pub use fix::LMDBorrow;
|
||||
|
||||
pub mod state;
|
||||
pub use state::{
|
||||
StateDB,
|
||||
};
|
||||
|
||||
mod resources;
|
||||
pub use resources::{
|
||||
ResourceDB,
|
||||
};
|
||||
|
||||
mod pass;
|
||||
pub use pass::{
|
||||
PassDB,
|
||||
};
|
||||
|
||||
mod user;
|
||||
pub use user::{
|
||||
UserDB,
|
||||
};
|
||||
|
||||
use lmdb::Error;
|
||||
use rkyv::Deserialize;
|
||||
use rkyv::ser::serializers::AlignedSerializer;
|
||||
use std::sync::Arc;
|
||||
use std::path::Path;
|
||||
use crate::db::user::User;
|
||||
use std::collections::HashMap;
|
||||
use crate::state::{OwnedEntry, State};
|
||||
use std::iter::FromIterator;
|
||||
use std::ops::Deref;
|
||||
use crate::oid::{ArchivedObjectIdentifier, ObjectIdentifier};
|
||||
use crate::state::value::SerializeValue;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum DBError {
|
||||
LMDB(lmdb::Error),
|
||||
RKYV(<AllocSerializer<1024> as Fallible>::Error),
|
||||
}
|
||||
|
||||
pub(crate) type Result<T> = std::result::Result<T, DBError>;
|
||||
|
||||
impl From<lmdb::Error> for DBError {
|
||||
fn from(e: lmdb::Error) -> Self {
|
||||
Self::LMDB(e)
|
||||
}
|
||||
}
|
||||
|
||||
type Ser = AllocSerializer<1024>;
|
||||
#[derive(Clone)]
|
||||
struct AllocAdapter<V> {
|
||||
phantom: PhantomData<V>,
|
||||
}
|
||||
|
||||
impl<V> Fallible for AllocAdapter<V> {
|
||||
type Error = DBError;
|
||||
}
|
||||
|
||||
impl<V: Serialize<Ser>> Adapter for AllocAdapter<V> {
|
||||
type Serializer = Ser;
|
||||
type Value = V;
|
||||
|
||||
fn new_serializer() -> Self::Serializer {
|
||||
Self::Serializer::default()
|
||||
}
|
||||
|
||||
fn from_ser_err(e: <Self::Serializer as Fallible>::Error) -> Self::Error {
|
||||
DBError::RKYV(e)
|
||||
}
|
||||
fn from_db_err(e: lmdb::Error) -> Self::Error {
|
||||
e.into()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct AlignedAdapter<V> {
|
||||
phantom: PhantomData<V>,
|
||||
}
|
||||
impl<V> Fallible for AlignedAdapter<V> {
|
||||
type Error = lmdb::Error;
|
||||
}
|
||||
impl<V: Serialize<AlignedSerializer<AlignedVec>>> Adapter for AlignedAdapter<V> {
|
||||
type Serializer = AlignedSerializer<AlignedVec>;
|
||||
type Value = V;
|
||||
|
||||
fn new_serializer() -> Self::Serializer {
|
||||
Self::Serializer::default()
|
||||
}
|
||||
|
||||
fn from_ser_err(_: <Self::Serializer as Fallible>::Error) -> <Self as Fallible>::Error {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
fn from_db_err(e: Error) -> <Self as Fallible>::Error {
|
||||
e
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Databases {
|
||||
pub userdb: UserDB,
|
||||
pub passdb: PassDB,
|
||||
pub resourcedb: ResourceDB,
|
||||
pub statedb: StateDB,
|
||||
}
|
||||
|
||||
impl Databases {
|
||||
pub fn open<P: AsRef<Path>>(path: P) -> Result<Self> {
|
||||
let env = Arc::new(Environment::new()
|
||||
.open(&Path::join(path.as_ref(), "internal"))?
|
||||
);
|
||||
let userdb = unsafe { UserDB::open(env.clone())? };
|
||||
let passdb = unsafe { PassDB::open(env.clone())? };
|
||||
let resourcedb = unsafe { ResourceDB::open(env)? };
|
||||
|
||||
let statedb = StateDB::open(&Path::join(path.as_ref(), "state"))?;
|
||||
|
||||
Ok(Self { userdb, passdb, resourcedb, statedb })
|
||||
}
|
||||
|
||||
pub fn create<P: AsRef<Path>>(path: P) -> Result<Self> {
|
||||
let env = Arc::new(Environment::new()
|
||||
.set_max_dbs(16)
|
||||
.open(path.as_ref())?
|
||||
);
|
||||
let userdb = unsafe { UserDB::create(env.clone())? };
|
||||
let passdb = unsafe { PassDB::create(env.clone())? };
|
||||
let resourcedb = unsafe { ResourceDB::create(env)? };
|
||||
|
||||
let statedb = StateDB::create(&Path::join(path.as_ref(), "state"))?;
|
||||
|
||||
Ok(Self { userdb, passdb, resourcedb, statedb })
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, serde::Serialize)]
|
||||
pub struct Dump {
|
||||
users: HashMap<String, User>,
|
||||
passwds: HashMap<String, String>,
|
||||
states: HashMap<String, (State, State)>,
|
||||
}
|
||||
|
||||
impl Dump {
|
||||
pub fn new(dbs: &Databases) -> Result<Self> {
|
||||
let users = HashMap::from_iter(dbs.userdb.get_all()?.into_iter());
|
||||
let passwds = HashMap::from_iter(dbs.passdb.get_all()?.into_iter());
|
||||
let mut states = HashMap::new();
|
||||
for (name, id) in dbs.resourcedb.get_all()?.into_iter() {
|
||||
let input = dbs.statedb.get_input(id)?.map(|input| {
|
||||
let input: &Archived<State> = input.deref();
|
||||
let hash: u64 = input.hash;
|
||||
let inner = input.inner.iter()
|
||||
.map(|entry| {
|
||||
|
||||
let oid: &ArchivedObjectIdentifier = &entry.oid;
|
||||
let bytes: &[u8] = oid.deref();
|
||||
let mut vec = Vec::with_capacity(bytes.len());
|
||||
vec.copy_from_slice(bytes);
|
||||
let oid = ObjectIdentifier::new_unchecked(vec.into_boxed_slice());
|
||||
|
||||
let val: Box<dyn SerializeValue> = entry.val
|
||||
.deserialize(&mut rkyv::Infallible).unwrap();
|
||||
|
||||
OwnedEntry { oid, val }
|
||||
}).collect();
|
||||
State { hash, inner }
|
||||
}).unwrap_or(State::build().finish());
|
||||
|
||||
let output = dbs.statedb.get_output(id)?.map(|output| {
|
||||
let output: &Archived<State> = output.deref();
|
||||
let hash: u64 = output.hash;
|
||||
let inner = output.inner.iter().map(|entry| {
|
||||
|
||||
let oid: &ArchivedObjectIdentifier = &entry.oid;
|
||||
let bytes: &[u8] = oid.deref();
|
||||
let mut vec = Vec::with_capacity(bytes.len());
|
||||
vec.copy_from_slice(bytes);
|
||||
let oid = ObjectIdentifier::new_unchecked(vec.into_boxed_slice());
|
||||
|
||||
let val: Box<dyn SerializeValue> = entry.val
|
||||
.deserialize(&mut rkyv::Infallible).unwrap();
|
||||
|
||||
OwnedEntry { oid, val }
|
||||
}).collect();
|
||||
|
||||
State { hash, inner }
|
||||
}).unwrap_or(State::build().finish());
|
||||
|
||||
let old = states.insert(name, (input, output));
|
||||
assert!(old.is_none());
|
||||
}
|
||||
|
||||
Ok(Self { users, passwds, states })
|
||||
}
|
||||
}
|
58
bffhd/db/fix.rs
Normal file
58
bffhd/db/fix.rs
Normal file
@ -0,0 +1,58 @@
|
||||
use std::{
|
||||
ptr::NonNull,
|
||||
ops::Deref,
|
||||
};
|
||||
|
||||
use crate::db::Transaction;
|
||||
use std::fmt::{Debug, Formatter};
|
||||
|
||||
/// Memory Fixpoint for a value in the DB
|
||||
///
|
||||
/// LMDB binds lifetimes of buffers to the transaction that returned the buffer. As long as this
|
||||
/// transaction is not `commit()`ed, `abort()`ed or `reset()`ed the pages containing these values
|
||||
/// are not returned into circulation.
|
||||
/// This struct encodes this by binding a live reference to the Transaction to the returned
|
||||
/// and interpreted buffer. The placeholder `T` is the container for the transaction. This may be a
|
||||
/// plain `RoTransaction<'env>`, a `Rc<RoTxn>` (meaning Fix is !Send) or an `Arc<RoTxn>`, depending
|
||||
/// on your needs.
|
||||
pub struct LMDBorrow<T, V> {
|
||||
ptr: NonNull<V>,
|
||||
txn: T,
|
||||
}
|
||||
|
||||
impl<'env, T, V> LMDBorrow<T, V>
|
||||
where T: Transaction,
|
||||
{
|
||||
pub unsafe fn new(ptr: NonNull<V>, txn: T) -> Self {
|
||||
Self { ptr: ptr.into(), txn }
|
||||
}
|
||||
|
||||
pub fn unwrap_txn(self) -> T {
|
||||
self.txn
|
||||
}
|
||||
}
|
||||
|
||||
impl<'env, T, V> Deref for LMDBorrow<T, V>
|
||||
{
|
||||
type Target = V;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
// As long as the transaction is kept alive (which it is, because it's in self) state is a
|
||||
// valid pointer so this is safe.
|
||||
unsafe { self.ptr.as_ref() }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'env, T, V: Debug> Debug for LMDBorrow<T, V> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{:?}", self.deref())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'env, T, V: serde::Serialize> serde::Serialize for LMDBorrow<T, V> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where S: serde::Serializer
|
||||
{
|
||||
self.deref().serialize(serializer)
|
||||
}
|
||||
}
|
164
bffhd/db/hash.rs
Normal file
164
bffhd/db/hash.rs
Normal file
@ -0,0 +1,164 @@
|
||||
use std::{
|
||||
marker::PhantomData,
|
||||
hash::{
|
||||
Hash,
|
||||
Hasher,
|
||||
BuildHasher,
|
||||
},
|
||||
collections::hash_map::RandomState,
|
||||
};
|
||||
|
||||
use rkyv::{
|
||||
Archive,
|
||||
Archived,
|
||||
Serialize,
|
||||
Deserialize,
|
||||
Fallible,
|
||||
};
|
||||
|
||||
use super::{
|
||||
DB,
|
||||
Adapter,
|
||||
OutputBuffer,
|
||||
|
||||
Environment,
|
||||
|
||||
DatabaseFlags,
|
||||
WriteFlags,
|
||||
|
||||
Transaction,
|
||||
RwTransaction,
|
||||
};
|
||||
|
||||
|
||||
#[derive(Archive, Serialize, Deserialize)]
|
||||
/// The entry as it is stored inside the database.
|
||||
pub struct Entry<K: Archive, V: Archive> {
|
||||
pub key: K,
|
||||
pub val: V,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub struct HashAdapter<K, A> {
|
||||
k: PhantomData<K>,
|
||||
a: PhantomData<A>,
|
||||
}
|
||||
impl<K, A> HashAdapter<K, A> {
|
||||
pub fn new() -> Self {
|
||||
Self { k: PhantomData, a: PhantomData }
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, A: Fallible> Fallible for HashAdapter<K, A> { type Error = <A as Fallible>::Error; }
|
||||
impl<K, A: Adapter> Adapter for HashAdapter<K, A>
|
||||
where K: Archive,
|
||||
Entry<K, A::Value>: Serialize<A::Serializer>,
|
||||
{
|
||||
type Serializer = A::Serializer;
|
||||
type Value = Entry<K, A::Value>;
|
||||
|
||||
fn new_serializer() -> Self::Serializer
|
||||
{ A::new_serializer() }
|
||||
|
||||
fn from_ser_err(e: <Self::Serializer as Fallible>::Error) -> <A as Fallible>::Error
|
||||
{ A::from_ser_err(e) }
|
||||
|
||||
fn from_db_err(e: lmdb::Error) -> <A as Fallible>::Error
|
||||
{ A::from_db_err(e) }
|
||||
}
|
||||
|
||||
|
||||
const DEFAULT_HASH_FLAGS: libc::c_uint =
|
||||
DatabaseFlags::INTEGER_KEY.bits() + DatabaseFlags::DUP_SORT.bits();
|
||||
|
||||
pub struct HashDB<A, K, H = RandomState>
|
||||
{
|
||||
db: DB<HashAdapter<K, A>>,
|
||||
hash_builder: H,
|
||||
}
|
||||
|
||||
impl<A, K> HashDB<A, K>
|
||||
{
|
||||
pub unsafe fn create(env: &Environment, name: Option<&str>) -> lmdb::Result<Self> {
|
||||
Self::create_with_hasher(env, name, RandomState::new())
|
||||
}
|
||||
pub unsafe fn open(env: &Environment, name: Option<&str>) -> lmdb::Result<Self> {
|
||||
Self::open_with_hasher(env, name, RandomState::new())
|
||||
}
|
||||
}
|
||||
|
||||
impl<A, K, H: BuildHasher> HashDB<A, K, H>
|
||||
{
|
||||
fn new(db: DB<HashAdapter<K, A>>, hash_builder: H) -> Self {
|
||||
Self { db, hash_builder }
|
||||
}
|
||||
|
||||
pub unsafe fn create_with_hasher(env: &Environment, name: Option<&str>, hash_builder: H)
|
||||
-> lmdb::Result<Self>
|
||||
{
|
||||
let flags = DatabaseFlags::from_bits(DEFAULT_HASH_FLAGS).unwrap();
|
||||
DB::create(env, name, flags).map(|db| Self::new(db, hash_builder))
|
||||
}
|
||||
pub unsafe fn open_with_hasher(env: &Environment, name: Option<&str>, hash_builder: H)
|
||||
-> lmdb::Result<Self>
|
||||
{
|
||||
DB::open(env, name).map(|db| Self::new(db, hash_builder))
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl<A, K, H> HashDB<A, K, H>
|
||||
where A: Adapter,
|
||||
HashAdapter<K, A>: Adapter<Value=Entry<K, A::Value>>,
|
||||
H: BuildHasher,
|
||||
K: Hash + Archive,
|
||||
K::Archived: PartialEq<K>,
|
||||
{
|
||||
/// Retrieve an entry from the hashdb
|
||||
///
|
||||
/// The result is a view pinned to the lifetime of the transaction. You can get owned Values
|
||||
/// using [`Deserialize`].
|
||||
pub fn get<'txn, T: Transaction>(&self, txn: &'txn T, key: &K)
|
||||
-> Result<
|
||||
Option<&'txn Archived<<HashAdapter<K, A> as Adapter>::Value>>,
|
||||
<HashAdapter<K, A> as Fallible>::Error
|
||||
>
|
||||
{
|
||||
let mut hasher = self.hash_builder.build_hasher();
|
||||
key.hash(&mut hasher);
|
||||
let hash = hasher.finish();
|
||||
|
||||
let mut cursor = self.db.open_ro_cursor(txn)?;
|
||||
let i = cursor
|
||||
.iter_dup_of(&hash.to_ne_bytes()).filter_map(|r| r.ok())
|
||||
.map(|(_keybuf, entry)| entry);
|
||||
for entry in i {
|
||||
let entry: &Archived<Entry<K, A::Value>> = entry;
|
||||
if entry.key == *key {
|
||||
return Ok(Some(entry));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, A, K, H> HashDB<A, K, H>
|
||||
where A: Adapter,
|
||||
A::Serializer: OutputBuffer,
|
||||
H: BuildHasher,
|
||||
K: Hash + Serialize<A::Serializer>,
|
||||
K::Archived: PartialEq<K>,
|
||||
{
|
||||
pub fn insert_entry(&self, txn: &mut RwTransaction, entry: &Entry<K, A::Value>)
|
||||
-> Result<(), A::Error>
|
||||
{
|
||||
let mut hasher = self.hash_builder.build_hasher();
|
||||
entry.key.hash(&mut hasher);
|
||||
let hash = hasher.finish();
|
||||
|
||||
self.db.put(txn, &hash.to_ne_bytes(), entry, WriteFlags::empty())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
73
bffhd/db/pass.rs
Normal file
73
bffhd/db/pass.rs
Normal file
@ -0,0 +1,73 @@
|
||||
use std::sync::Arc;
|
||||
use super::Environment;
|
||||
use super::AllocAdapter;
|
||||
use super::DB;
|
||||
use super::raw::RawDB;
|
||||
use super::{DatabaseFlags, WriteFlags};
|
||||
use crate::db::Result;
|
||||
use super::Transaction;
|
||||
|
||||
use argon2;
|
||||
|
||||
type Adapter = AllocAdapter<String>;
|
||||
#[derive(Clone)]
|
||||
pub struct PassDB {
|
||||
env: Arc<Environment>,
|
||||
db: DB<Adapter>,
|
||||
}
|
||||
|
||||
impl PassDB {
|
||||
pub unsafe fn new(env: Arc<Environment>, db: RawDB) -> Self {
|
||||
let db = DB::new_unchecked(db);
|
||||
Self { env, db }
|
||||
}
|
||||
|
||||
pub unsafe fn open(env: Arc<Environment>) -> Result<Self> {
|
||||
let db = RawDB::open(&env, Some("pass"))?;
|
||||
Ok(Self::new(env, db))
|
||||
}
|
||||
|
||||
pub unsafe fn create(env: Arc<Environment>) -> Result<Self> {
|
||||
let flags = DatabaseFlags::empty();
|
||||
let db = RawDB::create(&env, Some("pass"), flags)?;
|
||||
Ok(Self::new(env, db))
|
||||
}
|
||||
|
||||
pub fn check_pw<P: AsRef<[u8]>>(&self, uid: &str, inpass: P) -> Result<Option<bool>> {
|
||||
let txn = self.env.begin_ro_txn()?;
|
||||
if let Some(pass) = self.db.get(&txn, &uid.as_bytes())? {
|
||||
Ok(argon2::verify_encoded(pass.as_str(), inpass.as_ref())
|
||||
.ok())
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_password<P: AsRef<[u8]>>(&self, uid: &str, password: P) -> Result<()> {
|
||||
let cfg = argon2::Config::default();
|
||||
let salt: [u8; 10] = rand::random();
|
||||
let enc = argon2::hash_encoded(password.as_ref(), &salt, &cfg)
|
||||
.expect("Hashing password failed for static valid config");
|
||||
|
||||
let flags = WriteFlags::empty();
|
||||
let mut txn = self.env.begin_rw_txn()?;
|
||||
self.db.put(&mut txn, &uid.as_bytes(), &enc, flags)?;
|
||||
txn.commit()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_all(&self) -> Result<Vec<(String, String)>> {
|
||||
let txn = self.env.begin_ro_txn()?;
|
||||
let mut cursor = self.db.open_ro_cursor(&txn)?;
|
||||
let iter = cursor.iter_start();
|
||||
let mut out = Vec::new();
|
||||
for pass in iter {
|
||||
let (uid, pass) = pass?;
|
||||
let uid = unsafe { std::str::from_utf8_unchecked(uid).to_string() };
|
||||
let pass = pass.as_str().to_string();
|
||||
out.push((uid, pass));
|
||||
}
|
||||
|
||||
Ok(out)
|
||||
}
|
||||
}
|
62
bffhd/db/raw.rs
Normal file
62
bffhd/db/raw.rs
Normal file
@ -0,0 +1,62 @@
|
||||
use lmdb::{
|
||||
Transaction,
|
||||
RwTransaction,
|
||||
Environment,
|
||||
DatabaseFlags,
|
||||
WriteFlags,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct RawDB {
|
||||
db: lmdb::Database,
|
||||
}
|
||||
|
||||
impl RawDB {
|
||||
pub fn open(env: &Environment, name: Option<&str>) -> lmdb::Result<Self> {
|
||||
env.open_db(name).map(|db| Self { db })
|
||||
}
|
||||
|
||||
pub fn create(env: &Environment, name: Option<&str>, flags: DatabaseFlags) -> lmdb::Result<Self> {
|
||||
env.create_db(name, flags).map(|db| Self { db })
|
||||
}
|
||||
|
||||
pub fn get<'txn, T: Transaction, K>(&self, txn: &'txn T, key: &K) -> lmdb::Result<Option<&'txn [u8]>>
|
||||
where K: AsRef<[u8]>
|
||||
{
|
||||
match txn.get(self.db, key) {
|
||||
Ok(buf) => Ok(Some(buf)),
|
||||
Err(lmdb::Error::NotFound) => Ok(None),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn put<K, V>(&self, txn: &mut RwTransaction, key: &K, value: &V, flags: WriteFlags)
|
||||
-> lmdb::Result<()>
|
||||
where K: AsRef<[u8]>,
|
||||
V: AsRef<[u8]>,
|
||||
{
|
||||
txn.put(self.db, key, value, flags)
|
||||
}
|
||||
|
||||
pub fn reserve<'txn, K>(&self, txn: &'txn mut RwTransaction, key: &K, size: usize, flags: WriteFlags)
|
||||
-> lmdb::Result<&'txn mut [u8]>
|
||||
where K: AsRef<[u8]>
|
||||
{
|
||||
txn.reserve(self.db, key, size, flags)
|
||||
}
|
||||
|
||||
pub fn del<K, V>(&self, txn: &mut RwTransaction, key: &K, value: Option<&V>) -> lmdb::Result<()>
|
||||
where K: AsRef<[u8]>,
|
||||
V: AsRef<[u8]>,
|
||||
{
|
||||
txn.del(self.db, key, value.map(AsRef::as_ref))
|
||||
}
|
||||
|
||||
pub fn iter<'txn, C: lmdb::Cursor<'txn>>(&self, cursor: &'txn mut C) -> lmdb::Iter<'txn> {
|
||||
cursor.iter_start()
|
||||
}
|
||||
|
||||
pub fn open_ro_cursor<'txn, T: Transaction>(&self, txn: &'txn T) -> lmdb::Result<lmdb::RoCursor<'txn>> {
|
||||
txn.open_ro_cursor(self.db)
|
||||
}
|
||||
}
|
72
bffhd/db/resources.rs
Normal file
72
bffhd/db/resources.rs
Normal file
@ -0,0 +1,72 @@
|
||||
use rkyv::{Archive, Serialize, Deserialize};
|
||||
|
||||
use super::{
|
||||
DB,
|
||||
};
|
||||
use crate::db::{AlignedAdapter, AllocAdapter};
|
||||
use crate::db::raw::RawDB;
|
||||
use std::sync::Arc;
|
||||
use crate::db::{Environment, DatabaseFlags};
|
||||
use crate::db::Result;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
#[derive(Archive, Serialize, Deserialize)]
|
||||
#[derive(serde::Serialize, serde::Deserialize)]
|
||||
pub struct Resource {
|
||||
uuid: u128,
|
||||
id: String,
|
||||
name_idx: u64,
|
||||
description_idx: u64,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ResourceDB {
|
||||
env: Arc<Environment>,
|
||||
db: DB<AllocAdapter<Resource>>,
|
||||
id_index: DB<AlignedAdapter<u64>>,
|
||||
}
|
||||
|
||||
impl ResourceDB {
|
||||
pub unsafe fn new(env: Arc<Environment>, db: RawDB, id_index: RawDB) -> Self {
|
||||
let db = DB::new_unchecked(db);
|
||||
let id_index = DB::new_unchecked(id_index);
|
||||
|
||||
Self { env, db, id_index }
|
||||
}
|
||||
|
||||
pub unsafe fn open(env: Arc<Environment>) -> Result<Self> {
|
||||
let db = RawDB::open(&env, Some("resources"))?;
|
||||
let idx = RawDB::open(&env, Some("resources-idx"))?;
|
||||
Ok(Self::new(env, db, idx))
|
||||
}
|
||||
|
||||
pub unsafe fn create(env: Arc<Environment>) -> Result<Self> {
|
||||
let flags = DatabaseFlags::empty();
|
||||
let db = RawDB::create(&env, Some("resources"), flags)?;
|
||||
let idx = RawDB::create(&env, Some("resources-idx"), flags)?;
|
||||
Ok(Self::new(env, db, idx))
|
||||
}
|
||||
|
||||
pub fn lookup_id<S: AsRef<str>>(&self, id: S) -> Result<Option<u64>> {
|
||||
let txn = self.env.begin_ro_txn()?;
|
||||
let id = self.id_index.get(&txn, &id.as_ref().as_bytes()).map(|ok| {
|
||||
ok.map(|num| *num)
|
||||
})?;
|
||||
Ok(id)
|
||||
}
|
||||
|
||||
pub fn get_all(&self) -> Result<Vec<(String, u64)>> {
|
||||
let txn = self.env.begin_ro_txn()?;
|
||||
let mut cursor = self.id_index.open_ro_cursor(&txn)?;
|
||||
let iter = cursor.iter_start();
|
||||
let mut out = Vec::new();
|
||||
|
||||
for id in iter {
|
||||
let (name, id) = id?;
|
||||
let name = unsafe { std::str::from_utf8_unchecked(name).to_string() };
|
||||
out.push((name, *id));
|
||||
}
|
||||
|
||||
Ok(out)
|
||||
}
|
||||
}
|
195
bffhd/db/state.rs
Normal file
195
bffhd/db/state.rs
Normal file
@ -0,0 +1,195 @@
|
||||
use std::{
|
||||
sync::Arc,
|
||||
path::Path,
|
||||
};
|
||||
|
||||
use rkyv::{Archived};
|
||||
|
||||
use super::{
|
||||
DB,
|
||||
Environment,
|
||||
|
||||
EnvironmentFlags,
|
||||
DatabaseFlags,
|
||||
WriteFlags,
|
||||
|
||||
Adapter,
|
||||
AllocAdapter,
|
||||
DBError,
|
||||
|
||||
Transaction,
|
||||
RoTransaction,
|
||||
RwTransaction,
|
||||
|
||||
LMDBorrow,
|
||||
};
|
||||
|
||||
use crate::state::State;
|
||||
|
||||
type StateAdapter = AllocAdapter<State>;
|
||||
|
||||
/// State Database containing the currently set state
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct StateDB {
|
||||
/// The environment for all the databases below
|
||||
env: Arc<Environment>,
|
||||
|
||||
input: DB<StateAdapter>,
|
||||
output: DB<StateAdapter>,
|
||||
|
||||
// TODO: Index resource name/id/uuid -> u64
|
||||
}
|
||||
|
||||
impl StateDB {
|
||||
fn open_env<P: AsRef<Path>>(path: P) -> lmdb::Result<Environment> {
|
||||
Environment::new()
|
||||
.set_flags( EnvironmentFlags::WRITE_MAP
|
||||
| EnvironmentFlags::NO_SUB_DIR
|
||||
| EnvironmentFlags::NO_TLS
|
||||
| EnvironmentFlags::NO_READAHEAD)
|
||||
.set_max_dbs(2)
|
||||
.open(path.as_ref())
|
||||
}
|
||||
|
||||
fn new(env: Environment, input: DB<StateAdapter>, output: DB<StateAdapter>) -> Self {
|
||||
Self { env: Arc::new(env), input, output }
|
||||
}
|
||||
|
||||
pub fn init<P: AsRef<Path>>(path: P) -> lmdb::Result<Self> {
|
||||
let env = Self::open_env(path)?;
|
||||
let input = unsafe {
|
||||
DB::create(&env, Some("input"), DatabaseFlags::INTEGER_KEY)?
|
||||
};
|
||||
let output = unsafe {
|
||||
DB::create(&env, Some("output"), DatabaseFlags::INTEGER_KEY)?
|
||||
};
|
||||
|
||||
Ok(Self::new(env, input, output))
|
||||
}
|
||||
|
||||
pub fn open<P: AsRef<Path>>(path: P) -> lmdb::Result<Self> {
|
||||
let env = Self::open_env(path)?;
|
||||
let input = unsafe { DB::open(&env, Some("input"))? };
|
||||
let output = unsafe { DB::open(&env, Some("output"))? };
|
||||
|
||||
Ok(Self::new(env, input, output))
|
||||
}
|
||||
|
||||
pub fn create<P: AsRef<Path>>(path: P) -> lmdb::Result<Self> {
|
||||
let flags = DatabaseFlags::empty();
|
||||
let env = Self::open_env(path)?;
|
||||
let input = unsafe { DB::create(&env, Some("input"), flags)? };
|
||||
let output = unsafe { DB::create(&env, Some("output"), flags)? };
|
||||
|
||||
Ok(Self::new(env, input, output))
|
||||
}
|
||||
|
||||
fn update_txn(&self, txn: &mut RwTransaction, key: u64, input: &State, output: &State)
|
||||
-> Result<(), DBError>
|
||||
{
|
||||
let flags = WriteFlags::empty();
|
||||
let k = key.to_ne_bytes();
|
||||
self.input.put(txn, &k, input, flags)?;
|
||||
self.output.put(txn, &k, output, flags)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn update(&self, key: u64, input: &State, output: &State)
|
||||
-> Result<(), DBError>
|
||||
{
|
||||
let mut txn = self.env.begin_rw_txn().map_err(StateAdapter::from_db_err)?;
|
||||
self.update_txn(&mut txn, key, input, output)?;
|
||||
|
||||
txn.commit().map_err(StateAdapter::from_db_err)
|
||||
}
|
||||
|
||||
fn get(&self, db: &DB<StateAdapter>, key: u64)
|
||||
-> Result<Option<LMDBorrow<RoTransaction, Archived<State>>>, DBError>
|
||||
{
|
||||
let txn = self.env.begin_ro_txn().map_err(StateAdapter::from_db_err)?;
|
||||
if let Some(state) = db.get(&txn, &key.to_ne_bytes())? {
|
||||
let ptr = state.into();
|
||||
Ok(Some(unsafe { LMDBorrow::new(ptr, txn) }))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn get_input(&self, key: u64)
|
||||
-> Result<Option<LMDBorrow<RoTransaction, Archived<State>>>, DBError>
|
||||
{ self.get(&self.input, key) }
|
||||
|
||||
#[inline(always)]
|
||||
pub fn get_output(&self, key: u64)
|
||||
-> Result<Option<LMDBorrow<RoTransaction, Archived<State>>>, DBError>
|
||||
{ self.get(&self.output, key) }
|
||||
|
||||
pub fn accessor(&self, key: u64) -> StateAccessor {
|
||||
StateAccessor::new(key, self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct StateAccessor {
|
||||
key: u64,
|
||||
db: StateDB
|
||||
}
|
||||
|
||||
impl StateAccessor {
|
||||
pub fn new(key: u64, db: StateDB) -> Self {
|
||||
Self { key, db }
|
||||
}
|
||||
|
||||
pub fn get_input(&self)
|
||||
-> Result<Option<LMDBorrow<RoTransaction, Archived<State>>>, DBError>
|
||||
{
|
||||
self.db.get_input(self.key)
|
||||
}
|
||||
|
||||
pub fn get_output(&self)
|
||||
-> Result<Option<LMDBorrow<RoTransaction, Archived<State>>>, DBError>
|
||||
{
|
||||
self.db.get_output(self.key)
|
||||
}
|
||||
|
||||
pub fn set(&self, input: &State, output: &State) -> Result<(), DBError> {
|
||||
self.db.update(self.key, input, output)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
use crate::state::value::Vec3u8;
|
||||
use crate::state::value::{OID_COLOUR, OID_POWERED, OID_INTENSITY};
|
||||
use std::ops::Deref;
|
||||
|
||||
#[test]
|
||||
fn construct_state() {
|
||||
let tmpdir = tempfile::tempdir().unwrap();
|
||||
let mut tmppath = tmpdir.path().to_owned();
|
||||
tmppath.push("db");
|
||||
let db = StateDB::init(tmppath).unwrap();
|
||||
let b = State::build()
|
||||
.add(OID_COLOUR.clone(), Box::new(Vec3u8 { a: 1, b: 2, c: 3}))
|
||||
.add(OID_POWERED.clone(), Box::new(true))
|
||||
.add(OID_INTENSITY.clone(), Box::new(1023))
|
||||
.finish();
|
||||
println!("({}) {:?}", b.hash(), b);
|
||||
|
||||
let c = State::build()
|
||||
.add(OID_COLOUR.clone(), Box::new(Vec3u8 { a: 1, b: 2, c: 3}))
|
||||
.add(OID_POWERED.clone(), Box::new(true))
|
||||
.add(OID_INTENSITY.clone(), Box::new(1023))
|
||||
.finish();
|
||||
|
||||
let key = rand::random();
|
||||
db.update(key, &b, &c).unwrap();
|
||||
let d = db.get_input(key).unwrap().unwrap();
|
||||
let e = db.get_output(key).unwrap().unwrap();
|
||||
assert_eq!(&b, d.deref());
|
||||
assert_eq!(&c, e.deref());
|
||||
}
|
||||
}
|
242
bffhd/db/typed.rs
Normal file
242
bffhd/db/typed.rs
Normal file
@ -0,0 +1,242 @@
|
||||
use std::{
|
||||
fmt,
|
||||
any::type_name,
|
||||
marker::PhantomData,
|
||||
};
|
||||
|
||||
use rkyv::{
|
||||
Archived,
|
||||
archived_root,
|
||||
|
||||
Serialize,
|
||||
|
||||
ser::{
|
||||
Serializer,
|
||||
serializers::AllocSerializer,
|
||||
},
|
||||
|
||||
util::AlignedVec,
|
||||
|
||||
Fallible,
|
||||
};
|
||||
|
||||
use lmdb::{
|
||||
Environment,
|
||||
DatabaseFlags,
|
||||
WriteFlags,
|
||||
|
||||
Transaction,
|
||||
RwTransaction,
|
||||
};
|
||||
|
||||
use super::RawDB;
|
||||
|
||||
pub trait Adapter: Fallible {
|
||||
type Serializer: rkyv::ser::Serializer;
|
||||
type Value: Serialize<Self::Serializer>;
|
||||
|
||||
fn new_serializer() -> Self::Serializer;
|
||||
|
||||
fn from_ser_err(e: <Self::Serializer as Fallible>::Error) -> <Self as Fallible>::Error;
|
||||
fn from_db_err(e: lmdb::Error) -> <Self as Fallible>::Error;
|
||||
}
|
||||
|
||||
struct AdapterPrettyPrinter<A: Adapter>(PhantomData<A>);
|
||||
|
||||
impl<A: Adapter> AdapterPrettyPrinter<A> {
|
||||
pub fn new() -> Self { Self(PhantomData) }
|
||||
}
|
||||
|
||||
impl<A: Adapter> fmt::Debug for AdapterPrettyPrinter<A> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct(&type_name::<A>())
|
||||
.field("serializer", &type_name::<A::Serializer>())
|
||||
.field("value", &type_name::<A::Value>())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
pub trait OutputBuffer {
|
||||
type Buffer: AsRef<[u8]>;
|
||||
fn into_slice(self) -> Self::Buffer;
|
||||
}
|
||||
|
||||
impl<const N: usize> OutputBuffer for AllocSerializer<N> {
|
||||
type Buffer = AlignedVec;
|
||||
fn into_slice(self) -> Self::Buffer {
|
||||
self.into_serializer().into_inner()
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: This should be possible to autoimplement for Sized Serializers
|
||||
pub trait OutputWriter: Fallible {
|
||||
fn write_into(&mut self, buf: &mut [u8]) -> Result<(), Self::Error>;
|
||||
}
|
||||
|
||||
pub struct DB<A> {
|
||||
db: RawDB,
|
||||
phantom: PhantomData<A>,
|
||||
}
|
||||
impl<A> Clone for DB<A> {
|
||||
fn clone(&self) -> Self {
|
||||
Self {
|
||||
db: self.db.clone(),
|
||||
phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<A: Adapter> fmt::Debug for DB<A> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("DB")
|
||||
.field("db", &self.db)
|
||||
.field("adapter", &AdapterPrettyPrinter::<A>::new())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<A> DB<A> {
|
||||
pub unsafe fn new_unchecked(db: RawDB) -> Self {
|
||||
Self { db, phantom: PhantomData }
|
||||
}
|
||||
|
||||
fn new(db: RawDB) -> Self {
|
||||
unsafe { Self::new_unchecked(db) }
|
||||
}
|
||||
|
||||
/// Open the underlying DB, creating it if necessary
|
||||
///
|
||||
/// This function is unsafe since if the DB does not contain `A::Archived` we may end up doing
|
||||
/// random memory reads or writes
|
||||
pub unsafe fn create(env: &Environment, name: Option<&str>, flags: DatabaseFlags)
|
||||
-> lmdb::Result<Self>
|
||||
{
|
||||
RawDB::create(env, name, flags).map(Self::new)
|
||||
}
|
||||
|
||||
/// Open the underlying DB
|
||||
///
|
||||
/// This function is unsafe since if the DB does not contain `A::Archived` we may end up doing
|
||||
/// random memory reads or writes
|
||||
pub unsafe fn open(env: &Environment, name: Option<&str>) -> lmdb::Result<Self> {
|
||||
RawDB::open(env, name).map(Self::new)
|
||||
}
|
||||
}
|
||||
|
||||
impl<A: Adapter> DB<A>
|
||||
{
|
||||
pub fn del<K: AsRef<[u8]>>(&self, txn: &mut RwTransaction, key: &K) -> Result<(), A::Error> {
|
||||
let v: Option<&Vec<u8>> = None;
|
||||
self.db.del(txn, key, v).map_err(A::from_db_err)
|
||||
}
|
||||
}
|
||||
|
||||
impl<A: Adapter> DB<A>
|
||||
{
|
||||
pub fn get<'txn, T: Transaction, K: AsRef<[u8]>>(&self, txn: &'txn T, key: &K)
|
||||
-> Result<Option<&'txn Archived<A::Value>>, A::Error>
|
||||
{
|
||||
if let Some(buf) = self.db.get(txn, key).map_err(A::from_db_err)? {
|
||||
Ok(Some(unsafe { archived_root::<A::Value>(buf.as_ref()) }))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn open_ro_cursor<'txn, T: Transaction>(&self, txn: &'txn T)
|
||||
-> Result<TypedCursor<lmdb::RoCursor<'txn>, A>, A::Error>
|
||||
{
|
||||
let c = self.db.open_ro_cursor(txn)
|
||||
.map_err(A::from_db_err)?;
|
||||
// Safe because we are providing both Adapter and cursor and know it matches
|
||||
Ok(unsafe { TypedCursor::new(c) })
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, A> DB<A>
|
||||
where A: Adapter,
|
||||
A::Serializer: OutputBuffer,
|
||||
{
|
||||
pub fn put<K: AsRef<[u8]>>(&self, txn: &mut RwTransaction, key: &K, val: &A::Value, flags: WriteFlags)
|
||||
-> Result<usize, A::Error>
|
||||
{
|
||||
let mut serializer = A::new_serializer();
|
||||
let pos = serializer.serialize_value(val)
|
||||
.map_err(A::from_ser_err)?;
|
||||
|
||||
let buf = serializer.into_slice();
|
||||
self.db.put(txn, key, &buf, flags)
|
||||
.map_err(A::from_db_err)?;
|
||||
|
||||
Ok(pos)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, A> DB<A>
|
||||
where A: Adapter,
|
||||
A::Serializer: OutputWriter,
|
||||
{
|
||||
pub fn put_nocopy<K: AsRef<[u8]>>(&self, txn: &mut RwTransaction, key: &K, val: &A::Value, flags: WriteFlags)
|
||||
-> Result<usize, A::Error>
|
||||
{
|
||||
let mut serializer = A::new_serializer();
|
||||
let pos = serializer.serialize_value(val)
|
||||
.map_err(A::from_ser_err)?;
|
||||
|
||||
let mut buf = self.db.reserve(txn, &key.as_ref(), pos, flags)
|
||||
.map_err(A::from_db_err)?;
|
||||
serializer.write_into(&mut buf)
|
||||
.map_err(A::from_ser_err)?;
|
||||
|
||||
Ok(pos)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct TypedCursor<C, A> {
|
||||
cursor: C,
|
||||
phantom: PhantomData<A>,
|
||||
}
|
||||
|
||||
impl<'txn, C, A> TypedCursor<C, A>
|
||||
where C: lmdb::Cursor<'txn>,
|
||||
A: Adapter,
|
||||
{
|
||||
// Unsafe because we don't know if the given adapter matches the given cursor
|
||||
pub unsafe fn new(cursor: C) -> Self {
|
||||
Self { cursor, phantom: PhantomData }
|
||||
}
|
||||
|
||||
pub fn iter_start(&mut self) -> Iter<'txn, A> {
|
||||
let iter = self.cursor.iter_start();
|
||||
// Safe because `new` isn't :P
|
||||
unsafe { Iter::new(iter) }
|
||||
}
|
||||
|
||||
pub fn iter_dup_of<K: AsRef<[u8]>>(&mut self, key: &K) -> Iter<'txn, A> {
|
||||
let iter = self.cursor.iter_dup_of(key);
|
||||
// Safe because `new` isn't :P
|
||||
unsafe { Iter::new(iter) }
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Iter<'txn, A> {
|
||||
iter: lmdb::Iter<'txn>,
|
||||
phantom: PhantomData<A>,
|
||||
}
|
||||
|
||||
impl<'txn, A: Adapter> Iter<'txn, A> {
|
||||
pub unsafe fn new(iter: lmdb::Iter<'txn>) -> Self {
|
||||
Self { iter, phantom: PhantomData }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'txn, A: Adapter> Iterator for Iter<'txn, A>
|
||||
where Archived<A::Value>: 'txn
|
||||
{
|
||||
type Item = Result<(&'txn [u8], &'txn Archived<A::Value>), A::Error>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.iter.next().map(|r| r
|
||||
.map_err(A::from_db_err)
|
||||
.map(|(key, buf)| { (key, unsafe { archived_root::<A::Value>(buf) }) }))
|
||||
}
|
||||
}
|
71
bffhd/db/user.rs
Normal file
71
bffhd/db/user.rs
Normal file
@ -0,0 +1,71 @@
|
||||
use std::sync::Arc;
|
||||
use super::{DB, AllocAdapter, Environment, Result};
|
||||
use crate::db::raw::RawDB;
|
||||
use crate::db::{DatabaseFlags, LMDBorrow, RoTransaction, WriteFlags, };
|
||||
|
||||
use rkyv::{Archive, Serialize, Deserialize, Archived};
|
||||
|
||||
type Adapter = AllocAdapter<User>;
|
||||
#[derive(Clone)]
|
||||
pub struct UserDB {
|
||||
env: Arc<Environment>,
|
||||
db: DB<Adapter>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Archive, Serialize, Deserialize, serde::Serialize, serde::Deserialize)]
|
||||
pub struct User {
|
||||
id: u128,
|
||||
username: String,
|
||||
roles: Vec<String>,
|
||||
}
|
||||
|
||||
impl UserDB {
|
||||
pub unsafe fn new(env: Arc<Environment>, db: RawDB) -> Self {
|
||||
let db = DB::new_unchecked(db);
|
||||
Self { env, db }
|
||||
}
|
||||
|
||||
pub unsafe fn open(env: Arc<Environment>) -> Result<Self> {
|
||||
let db = RawDB::open(&env, Some("user"))?;
|
||||
Ok(Self::new(env, db))
|
||||
}
|
||||
|
||||
pub unsafe fn create(env: Arc<Environment>) -> Result<Self> {
|
||||
let flags = DatabaseFlags::empty();
|
||||
let db = RawDB::create(&env, Some("user"), flags)?;
|
||||
Ok(Self::new(env, db))
|
||||
}
|
||||
|
||||
pub fn get(&self, uid: &str) -> Result<Option<LMDBorrow<RoTransaction, Archived<User>>>> {
|
||||
let txn = self.env.begin_ro_txn()?;
|
||||
if let Some(state) = self.db.get(&txn, &uid.as_bytes())? {
|
||||
let ptr = state.into();
|
||||
Ok(Some(unsafe { LMDBorrow::new(ptr, txn) }))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn put(&self, uid: &str, user: &User) -> Result<()> {
|
||||
let mut txn = self.env.begin_rw_txn()?;
|
||||
let flags = WriteFlags::empty();
|
||||
self.db.put(&mut txn, &uid.as_bytes(), user, flags)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_all(&self) -> Result<Vec<(String, User)>> {
|
||||
let txn = self.env.begin_ro_txn()?;
|
||||
let mut cursor = self.db.open_ro_cursor(&txn)?;
|
||||
let iter = cursor.iter_start();
|
||||
let mut out = Vec::new();
|
||||
let mut deserializer = rkyv::Infallible;
|
||||
for user in iter {
|
||||
let (uid, user) = user?;
|
||||
let uid = unsafe { std::str::from_utf8_unchecked(uid).to_string() };
|
||||
let user: User = user.deserialize(&mut deserializer).unwrap();
|
||||
out.push((uid, user));
|
||||
}
|
||||
|
||||
Ok(out)
|
||||
}
|
||||
}
|
86
bffhd/error.rs
Normal file
86
bffhd/error.rs
Normal file
@ -0,0 +1,86 @@
|
||||
use std::io;
|
||||
use std::fmt;
|
||||
use serde_dhall;
|
||||
|
||||
use rsasl::SaslError;
|
||||
|
||||
use crate::db::DBError;
|
||||
|
||||
//FIXME use crate::network;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
Dhall(serde_dhall::Error),
|
||||
SASL(SaslError),
|
||||
IO(io::Error),
|
||||
Boxed(Box<dyn std::error::Error>),
|
||||
Capnp(capnp::Error),
|
||||
DB(DBError),
|
||||
Denied,
|
||||
}
|
||||
|
||||
impl fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Error::Dhall(e) => {
|
||||
write!(f, "Dhall coding error: {}", e)
|
||||
},
|
||||
Error::SASL(e) => {
|
||||
write!(f, "SASL Error: {}", e)
|
||||
},
|
||||
Error::IO(e) => {
|
||||
write!(f, "IO Error: {}", e)
|
||||
},
|
||||
Error::Boxed(e) => {
|
||||
write!(f, "{}", e)
|
||||
},
|
||||
Error::Capnp(e) => {
|
||||
write!(f, "Cap'n Proto Error: {}", e)
|
||||
},
|
||||
Error::DB(e) => {
|
||||
write!(f, "DB Error: {:?}", e)
|
||||
},
|
||||
Error::Denied => {
|
||||
write!(f, "You do not have the permission required to do that.")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SaslError> for Error {
|
||||
fn from(e: SaslError) -> Error {
|
||||
Error::SASL(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<io::Error> for Error {
|
||||
fn from(e: io::Error) -> Error {
|
||||
Error::IO(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<serde_dhall::Error> for Error {
|
||||
fn from(e: serde_dhall::Error) -> Error {
|
||||
Error::Dhall(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Box<dyn std::error::Error>> for Error {
|
||||
fn from(e: Box<dyn std::error::Error>) -> Error {
|
||||
Error::Boxed(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<capnp::Error> for Error {
|
||||
fn from(e: capnp::Error) -> Error {
|
||||
Error::Capnp(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<DBError> for Error {
|
||||
fn from(e: DBError) -> Error {
|
||||
Error::DB(e)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) type Result<T> = std::result::Result<T, Error>;
|
32
bffhd/lib.rs
Normal file
32
bffhd/lib.rs
Normal file
@ -0,0 +1,32 @@
|
||||
#![forbid(unused_imports)]
|
||||
|
||||
pub mod config;
|
||||
pub mod db;
|
||||
pub mod error;
|
||||
pub mod network;
|
||||
pub mod oid;
|
||||
pub mod permissions;
|
||||
pub mod resource;
|
||||
pub mod schema;
|
||||
pub mod state;
|
||||
pub mod varint;
|
||||
|
||||
use intmap::IntMap;
|
||||
use resource::ResourceDriver;
|
||||
|
||||
#[derive(Debug)]
|
||||
struct InitiatorDriver;
|
||||
#[derive(Debug)]
|
||||
struct ActorDriver;
|
||||
|
||||
#[derive(Debug)]
|
||||
struct System {
|
||||
resources: IntMap<ResourceDriver>,
|
||||
initiators: IntMap<InitiatorDriver>,
|
||||
actors: IntMap<ActorDriver>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Accountant {
|
||||
|
||||
}
|
25
bffhd/network.rs
Normal file
25
bffhd/network.rs
Normal file
@ -0,0 +1,25 @@
|
||||
use std::{
|
||||
sync::Arc,
|
||||
collections::HashMap,
|
||||
};
|
||||
|
||||
use futures_signals::signal::{
|
||||
Mutable,
|
||||
MutableSignalCloned
|
||||
};
|
||||
|
||||
use crate::state::State;
|
||||
|
||||
type ResourceState = Mutable<Arc<State>>;
|
||||
type ResourceStateSignal = MutableSignalCloned<Arc<State>>;
|
||||
|
||||
/// Connection Broker between Resources and Subscribers
|
||||
///
|
||||
/// This serves as touch-off point between resources and anybody else. It doesn't drive
|
||||
/// any state updates, it only allows subscribers to subscribe to the resources that are
|
||||
/// driving the state updates
|
||||
pub struct Network {
|
||||
sources: HashMap<u64, ResourceState>,
|
||||
}
|
||||
|
||||
|
894
bffhd/oid.rs
Normal file
894
bffhd/oid.rs
Normal file
@ -0,0 +1,894 @@
|
||||
//! oid crate by https://github.com/UnnecessaryEngineering/oid turned into vendore'd module
|
||||
//!
|
||||
//! [Object Identifiers] are a standard of the [ITU] used to reference objects, things, and
|
||||
//! concepts in a globally unique way. This crate provides for data structures and methods
|
||||
//! to build, parse, and format OIDs.
|
||||
//!
|
||||
//!
|
||||
//! ## Parsing OID String Representation
|
||||
//! ```ignore
|
||||
//! use crate::oid::prelude::*;
|
||||
//!
|
||||
//! fn main() -> Result<(), ObjectIdentifierError> {
|
||||
//! let oid = ObjectIdentifier::try_from("0.1.2.3")?;
|
||||
//! Ok(())
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
//! ## Parsing OID Binary Representation
|
||||
//! ```ignore
|
||||
//! use prelude::*;
|
||||
//!
|
||||
//! fn main() -> Result<(), ObjectIdentifierError> {
|
||||
//! let oid = ObjectIdentifier::try_from(vec![0x00, 0x01, 0x02, 0x03])?;
|
||||
//! Ok(())
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
//! ## Encoding OID as String Representation
|
||||
//! ```ignore
|
||||
//! use prelude::*;
|
||||
//!
|
||||
//! fn main() -> Result<(), ObjectIdentifierError> {
|
||||
//! let oid = ObjectIdentifier::try_from("0.1.2.3")?;
|
||||
//! let oid: String = oid.into();
|
||||
//! assert_eq!(oid, "0.1.2.3");
|
||||
//! Ok(())
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
//! ## Encoding OID as Binary Representation
|
||||
//! ```ignore
|
||||
//! use oid::prelude::*;
|
||||
//!
|
||||
//! fn main() -> Result<(), ObjectIdentifierError> {
|
||||
//! let oid = ObjectIdentifier::try_from(vec![0x00, 0x01, 0x02, 0x03])?;
|
||||
//! let oid: Vec<u8> = oid.into();
|
||||
//! assert_eq!(oid, vec![0x00, 0x01, 0x02, 0x03]);
|
||||
//! Ok(())
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
//! [Object Identifiers]: https://en.wikipedia.org/wiki/Object_identifier
|
||||
//! [ITU]: https://en.wikipedia.org/wiki/International_Telecommunications_Union
|
||||
|
||||
use core::convert::{TryFrom };
|
||||
|
||||
use rkyv::{Archive, Serialize};
|
||||
use rkyv::vec::{ArchivedVec, VecResolver};
|
||||
use std::ops::Deref;
|
||||
use std::fmt;
|
||||
use std::fmt::Formatter;
|
||||
use rkyv::ser::Serializer;
|
||||
use std::str::FromStr;
|
||||
use crate::varint::VarU128;
|
||||
use std::convert::TryInto;
|
||||
|
||||
type Node = u128;
|
||||
type VarNode = VarU128;
|
||||
|
||||
/// Convenience module for quickly importing the public interface (e.g., `use oid::prelude::*`)
|
||||
pub mod prelude {
|
||||
pub use super::ObjectIdentifier;
|
||||
pub use super::ObjectIdentifierRoot::*;
|
||||
pub use super::ObjectIdentifierError;
|
||||
pub use core::convert::{TryFrom, TryInto};
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
|
||||
#[repr(u8)]
|
||||
pub enum ObjectIdentifierRoot {
|
||||
ItuT = 0,
|
||||
Iso = 1,
|
||||
JointIsoItuT = 2,
|
||||
}
|
||||
|
||||
impl Into<String> for ObjectIdentifierRoot {
|
||||
fn into(self) -> String {
|
||||
format!("{}", self as u8)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<u8> for ObjectIdentifierRoot {
|
||||
type Error = ObjectIdentifierError;
|
||||
fn try_from(value: u8) -> Result<ObjectIdentifierRoot, Self::Error> {
|
||||
match value {
|
||||
0 => Ok(ObjectIdentifierRoot::ItuT),
|
||||
1 => Ok(ObjectIdentifierRoot::Iso),
|
||||
2 => Ok(ObjectIdentifierRoot::JointIsoItuT),
|
||||
_ => Err(ObjectIdentifierError::IllegalRootNode),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Object Identifier Errors
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
|
||||
pub enum ObjectIdentifierError {
|
||||
/// Failed to parse OID due to illegal root node (must be 0-2 decimal)
|
||||
IllegalRootNode,
|
||||
/// Failed to parse OID due to illegal first node (must be 0-39 decimal)
|
||||
IllegalFirstChildNode,
|
||||
/// Failed to parse OID due to illegal child node value (except first node)
|
||||
IllegalChildNodeValue,
|
||||
}
|
||||
|
||||
/// Object Identifier (OID)
|
||||
#[derive(Clone, Eq, PartialEq, Hash)]
|
||||
#[repr(transparent)]
|
||||
pub struct ObjectIdentifier {
|
||||
nodes: Box<[u8]>,
|
||||
}
|
||||
|
||||
impl ObjectIdentifier {
|
||||
#[inline(always)]
|
||||
pub const fn new_unchecked(nodes: Box<[u8]>) -> Self {
|
||||
Self { nodes }
|
||||
}
|
||||
pub fn from_box(nodes: Box<[u8]>) -> Result<Self, ObjectIdentifierError> {
|
||||
if nodes.len() < 1 {
|
||||
return Err(ObjectIdentifierError::IllegalRootNode);
|
||||
};
|
||||
ObjectIdentifierRoot::try_from(nodes[0] / 40)?;
|
||||
|
||||
let mut parsing_big_int = false;
|
||||
let mut big_int: Node = 0;
|
||||
for i in 1..nodes.len() {
|
||||
if !parsing_big_int && nodes[i] < 128 {} else {
|
||||
if big_int > 0 {
|
||||
if big_int >= Node::MAX >> 7 {
|
||||
return Err(ObjectIdentifierError::IllegalChildNodeValue);
|
||||
}
|
||||
big_int <<= 7;
|
||||
};
|
||||
big_int |= (nodes[i] & !0x80) as Node;
|
||||
parsing_big_int = nodes[i] & 0x80 != 0;
|
||||
}
|
||||
if big_int > 0 && !parsing_big_int {
|
||||
big_int = 0;
|
||||
}
|
||||
}
|
||||
Ok(Self { nodes })
|
||||
}
|
||||
|
||||
pub fn build<B: AsRef<[Node]>>(root: ObjectIdentifierRoot, first: u8, children: B)
|
||||
-> Result<Self, ObjectIdentifierError>
|
||||
{
|
||||
if first > 40 {
|
||||
return Err(ObjectIdentifierError::IllegalFirstChildNode);
|
||||
}
|
||||
|
||||
let children = children.as_ref();
|
||||
let mut vec = Vec::with_capacity(children.len() + 1);
|
||||
vec.push((root as u8) * 40 + first);
|
||||
for child in children {
|
||||
let var: VarNode = child.into();
|
||||
vec.extend_from_slice(var.as_bytes())
|
||||
}
|
||||
Ok(Self { nodes: vec.into_boxed_slice() })
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn root(&self) -> Result<ObjectIdentifierRoot, ObjectIdentifierError> {
|
||||
ObjectIdentifierRoot::try_from(self.nodes[0] / 40)
|
||||
}
|
||||
#[inline(always)]
|
||||
pub const fn first_node(&self) -> u8 {
|
||||
self.nodes[0] % 40
|
||||
}
|
||||
#[inline(always)]
|
||||
pub fn child_nodes(&self) -> &[u8] {
|
||||
&self.nodes[1..]
|
||||
}
|
||||
#[inline(always)]
|
||||
pub const fn as_bytes(&self) -> &[u8] {
|
||||
&self.nodes
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for ObjectIdentifier {
|
||||
type Target = [u8];
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.nodes
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for ObjectIdentifier {
|
||||
type Err = ObjectIdentifierError;
|
||||
|
||||
fn from_str(value: &str) -> Result<Self, Self::Err> {
|
||||
let mut nodes = value.split(".");
|
||||
let root = nodes.next()
|
||||
.and_then(|n| n.parse::<u8>().ok())
|
||||
.and_then(|n| n.try_into().ok())
|
||||
.ok_or(ObjectIdentifierError::IllegalRootNode)?;
|
||||
|
||||
let first = nodes.next()
|
||||
.and_then(|n| parse_string_first_node(n).ok())
|
||||
.ok_or(ObjectIdentifierError::IllegalFirstChildNode)?;
|
||||
|
||||
let mut children = if let (_, Some(hint)) = nodes.size_hint() {
|
||||
Vec::with_capacity(hint)
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
for child in nodes.map(|n| n.parse().ok()) {
|
||||
if let Some(c) = child {
|
||||
children.push(c);
|
||||
} else {
|
||||
return Err(ObjectIdentifierError::IllegalChildNodeValue);
|
||||
}
|
||||
}
|
||||
|
||||
ObjectIdentifier::build(root, first, children)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for ObjectIdentifier {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
let show: String = self.into();
|
||||
write!(f, "{}", show)
|
||||
}
|
||||
}
|
||||
impl fmt::Debug for ObjectIdentifier {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
let show: String = self.into();
|
||||
write!(f, "{}", show)
|
||||
}
|
||||
}
|
||||
|
||||
#[repr(transparent)]
|
||||
pub struct ArchivedObjectIdentifier {
|
||||
archived: ArchivedVec<u8>
|
||||
}
|
||||
|
||||
impl Deref for ArchivedObjectIdentifier {
|
||||
type Target = [u8];
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.archived.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for ArchivedObjectIdentifier {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", &convert_to_string(self.archived.as_slice())
|
||||
.unwrap_or_else(|e| format!("Invalid OID: {:?}", e)))
|
||||
}
|
||||
}
|
||||
|
||||
impl Archive for ObjectIdentifier {
|
||||
type Archived = ArchivedObjectIdentifier;
|
||||
type Resolver = VecResolver;
|
||||
|
||||
unsafe fn resolve(&self, pos: usize, resolver: Self::Resolver, out: *mut Self::Archived) {
|
||||
let (oid_pos, oid_out) = rkyv::out_field!(out.archived);
|
||||
ArchivedVec::resolve_from_slice(self.nodes.as_ref(), pos + oid_pos, resolver, oid_out);
|
||||
}
|
||||
}
|
||||
impl Archive for &'static ObjectIdentifier {
|
||||
type Archived = ArchivedObjectIdentifier;
|
||||
type Resolver = VecResolver;
|
||||
|
||||
unsafe fn resolve(&self, pos: usize, resolver: Self::Resolver, out: *mut Self::Archived) {
|
||||
let (oid_pos, oid_out) = rkyv::out_field!(out.archived);
|
||||
ArchivedVec::resolve_from_slice(self.nodes.as_ref(), pos + oid_pos, resolver, oid_out);
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: Serializer + ?Sized> Serialize<S> for ObjectIdentifier
|
||||
where [u8]: rkyv::SerializeUnsized<S>
|
||||
{
|
||||
fn serialize(&self, serializer: &mut S) -> Result<Self::Resolver, S::Error> {
|
||||
ArchivedVec::serialize_from_slice(self.nodes.as_ref(), serializer)
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_string_first_node(first_child_node: &str) -> Result<u8, ObjectIdentifierError> {
|
||||
let first_child_node: u8 = first_child_node
|
||||
.parse()
|
||||
.map_err(|_| ObjectIdentifierError::IllegalFirstChildNode)?;
|
||||
if first_child_node > 39 {
|
||||
return Err(ObjectIdentifierError::IllegalFirstChildNode);
|
||||
}
|
||||
Ok(first_child_node)
|
||||
}
|
||||
|
||||
fn parse_string_child_node(
|
||||
node_str: &str,
|
||||
out: &mut Vec<u8>
|
||||
) -> Result<(), ObjectIdentifierError> {
|
||||
let node: Node = node_str.parse()
|
||||
.map_err(|_| ObjectIdentifierError::IllegalChildNodeValue)?;
|
||||
// TODO bench against !*node &= 0x80, compiler may already optimize better
|
||||
if node <= 127 {
|
||||
out.push(node as u8);
|
||||
} else {
|
||||
let vi: VarNode = node.into();
|
||||
out.extend_from_slice(vi.as_bytes());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
impl ObjectIdentifier {
|
||||
fn from_string<S>(value: S) -> Result<ObjectIdentifier, ObjectIdentifierError>
|
||||
where
|
||||
S: AsRef<str>,
|
||||
{
|
||||
ObjectIdentifier::from_str(value.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
fn convert_to_string(nodes: &[u8]) -> Result<String, ObjectIdentifierError> {
|
||||
assert!(nodes.len() > 0);
|
||||
let root = nodes[0] / 40;
|
||||
let mut out = root.to_string();
|
||||
|
||||
out.push('.');
|
||||
let first = nodes[0] % 40;
|
||||
out.extend(first.to_string().chars());
|
||||
|
||||
let mut parsing_big_int = false;
|
||||
let mut big_int: Node = 0;
|
||||
for i in 1..nodes.len() {
|
||||
if !parsing_big_int && nodes[i] < 128 {
|
||||
// less than 7 bit of node value
|
||||
out.push('.');
|
||||
let nr = nodes[i].to_string();
|
||||
out.extend(nr.chars());
|
||||
} else {
|
||||
if big_int > 0 {
|
||||
if big_int >= Node::MAX >> 7 {
|
||||
return Err(ObjectIdentifierError::IllegalChildNodeValue);
|
||||
}
|
||||
big_int <<= 7;
|
||||
};
|
||||
big_int += (nodes[i] & !0x80) as Node;
|
||||
parsing_big_int = nodes[i] & 0x80 != 0;
|
||||
}
|
||||
if big_int > 0 && !parsing_big_int {
|
||||
out.push('.');
|
||||
out.extend(big_int.to_string().chars());
|
||||
big_int = 0;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
impl Into<String> for &ObjectIdentifier {
|
||||
fn into(self) -> String {
|
||||
convert_to_string(&self.nodes)
|
||||
.expect("Valid OID object couldn't be serialized.")
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<String> for ObjectIdentifier {
|
||||
fn into(self) -> String {
|
||||
(&self).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Into<&'a [u8]> for &'a ObjectIdentifier {
|
||||
fn into(self) -> &'a [u8] {
|
||||
&self.nodes
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<Vec<u8>> for ObjectIdentifier {
|
||||
fn into(self) -> Vec<u8> {
|
||||
self.nodes.into_vec()
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&str> for ObjectIdentifier {
|
||||
type Error = ObjectIdentifierError;
|
||||
fn try_from(value: &str) -> Result<ObjectIdentifier, Self::Error> {
|
||||
ObjectIdentifier::from_string(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<String> for ObjectIdentifier {
|
||||
type Error = ObjectIdentifierError;
|
||||
fn try_from(value: String) -> Result<ObjectIdentifier, Self::Error> {
|
||||
ObjectIdentifier::from_string(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&[u8]> for ObjectIdentifier {
|
||||
type Error = ObjectIdentifierError;
|
||||
fn try_from(nodes: &[u8]) -> Result<ObjectIdentifier, Self::Error> {
|
||||
Self::from_box(nodes.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<Vec<u8>> for ObjectIdentifier {
|
||||
type Error = ObjectIdentifierError;
|
||||
fn try_from(value: Vec<u8>) -> Result<ObjectIdentifier, Self::Error> {
|
||||
Self::from_box(value.into_boxed_slice())
|
||||
}
|
||||
}
|
||||
|
||||
mod serde_support {
|
||||
use super::*;
|
||||
use core::fmt;
|
||||
use serde::{de, ser};
|
||||
|
||||
struct OidVisitor;
|
||||
|
||||
impl<'de> de::Visitor<'de> for OidVisitor {
|
||||
type Value = ObjectIdentifier;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter.write_str("a valid buffer representing an OID")
|
||||
}
|
||||
|
||||
fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E>
|
||||
where
|
||||
E: de::Error,
|
||||
{
|
||||
ObjectIdentifier::try_from(v).map_err(|err| {
|
||||
E::invalid_value(
|
||||
de::Unexpected::Other(match err {
|
||||
ObjectIdentifierError::IllegalRootNode => "illegal root node",
|
||||
ObjectIdentifierError::IllegalFirstChildNode => "illegal first child node",
|
||||
ObjectIdentifierError::IllegalChildNodeValue => "illegal child node value",
|
||||
}),
|
||||
&"a valid buffer representing an OID",
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: de::Error,
|
||||
{
|
||||
ObjectIdentifier::try_from(v).map_err(|err| {
|
||||
E::invalid_value(
|
||||
de::Unexpected::Other(match err {
|
||||
ObjectIdentifierError::IllegalRootNode => "illegal root node",
|
||||
ObjectIdentifierError::IllegalFirstChildNode => "illegal first child node",
|
||||
ObjectIdentifierError::IllegalChildNodeValue => "illegal child node value",
|
||||
}),
|
||||
&"a string representing an OID",
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> de::Deserialize<'de> for ObjectIdentifier {
|
||||
fn deserialize<D>(deserializer: D) -> Result<ObjectIdentifier, D::Error>
|
||||
where
|
||||
D: de::Deserializer<'de>,
|
||||
{
|
||||
if deserializer.is_human_readable() {
|
||||
deserializer.deserialize_str(OidVisitor)
|
||||
} else {
|
||||
deserializer.deserialize_bytes(OidVisitor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ser::Serialize for ObjectIdentifier {
|
||||
fn serialize<S>(
|
||||
&self,
|
||||
serializer: S,
|
||||
) -> Result<<S as ser::Serializer>::Ok, <S as ser::Serializer>::Error>
|
||||
where
|
||||
S: ser::Serializer,
|
||||
{
|
||||
if serializer.is_human_readable() {
|
||||
let encoded: String = self.into();
|
||||
serializer.serialize_str(&encoded)
|
||||
} else {
|
||||
serializer.serialize_bytes(self.as_bytes())
|
||||
}
|
||||
}
|
||||
}
|
||||
impl ser::Serialize for ArchivedObjectIdentifier {
|
||||
fn serialize<S>(
|
||||
&self,
|
||||
serializer: S,
|
||||
) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: ser::Serializer,
|
||||
{
|
||||
if serializer.is_human_readable() {
|
||||
let encoded: String = convert_to_string(self.deref())
|
||||
.expect("Failed to convert valid OID to String");
|
||||
serializer.serialize_str(&encoded)
|
||||
} else {
|
||||
serializer.serialize_bytes(self.deref())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use super::*;
|
||||
use std::convert::TryInto;
|
||||
|
||||
pub(crate) fn gen_random() -> ObjectIdentifier {
|
||||
let amt: u8 = rand::random::<u8>() % 10 + 1;
|
||||
let mut children = Vec::new();
|
||||
for i in 0..amt {
|
||||
children.push(rand::random());
|
||||
}
|
||||
|
||||
ObjectIdentifier::build(ObjectIdentifierRoot::JointIsoItuT, 25, children)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bincode_serde_roundtrip() {
|
||||
let expected = ObjectIdentifier::build(
|
||||
ObjectIdentifierRoot::ItuT,
|
||||
0x01,
|
||||
vec![1, 2, 3, 5, 8, 13, 21],
|
||||
).unwrap();
|
||||
let buffer: Vec<u8> = bincode::serialize(&expected).unwrap();
|
||||
let actual = bincode::deserialize(&buffer).unwrap();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn encode_binary_root_node_0() {
|
||||
let expected: Vec<u8> = vec![0];
|
||||
let oid = ObjectIdentifier::build(
|
||||
ObjectIdentifierRoot::ItuT,
|
||||
0x00,
|
||||
vec![],
|
||||
).unwrap();
|
||||
let actual: Vec<u8> = oid.into();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn encode_binary_root_node_1() {
|
||||
let expected: Vec<u8> = vec![40];
|
||||
let oid = ObjectIdentifier::build(
|
||||
ObjectIdentifierRoot::Iso,
|
||||
0x00,
|
||||
vec![],
|
||||
).unwrap();
|
||||
let actual: Vec<u8> = oid.into();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn encode_binary_root_node_2() {
|
||||
let expected: Vec<u8> = vec![80];
|
||||
let oid = ObjectIdentifier::build(
|
||||
ObjectIdentifierRoot::JointIsoItuT,
|
||||
0x00,
|
||||
vec![],
|
||||
).unwrap();
|
||||
let actual: Vec<u8> = oid.into();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn encode_binary_example_1() {
|
||||
let expected: Vec<u8> = vec![0x01, 0x01, 0x02, 0x03, 0x05, 0x08, 0x0D, 0x15];
|
||||
let oid = ObjectIdentifier::build(
|
||||
ObjectIdentifierRoot::ItuT,
|
||||
0x01,
|
||||
vec![1, 2, 3, 5, 8, 13, 21],
|
||||
).unwrap();
|
||||
let actual: Vec<u8> = oid.into();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn encode_binary_example_2() {
|
||||
let expected: Vec<u8> = vec![
|
||||
0x77, 0x2A, 0x93, 0x45, 0x83, 0xFF, 0x7F, 0x87, 0xFF, 0xFF, 0xFF, 0x7F, 0x89, 0x53,
|
||||
0x92, 0x30,
|
||||
];
|
||||
let oid = ObjectIdentifier::build(
|
||||
ObjectIdentifierRoot::JointIsoItuT,
|
||||
39,
|
||||
vec![42, 2501, 65535, 2147483647, 1235, 2352],
|
||||
).unwrap();
|
||||
let actual: Vec<u8> = (oid).into();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn encode_string_root_node_0() {
|
||||
let expected = "0.0";
|
||||
let oid = ObjectIdentifier::build(
|
||||
ObjectIdentifierRoot::ItuT,
|
||||
0x00,
|
||||
vec![],
|
||||
).unwrap();
|
||||
let actual: String = (oid).into();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn encode_string_root_node_1() {
|
||||
let expected = "1.0";
|
||||
let oid = ObjectIdentifier::build(
|
||||
ObjectIdentifierRoot::Iso,
|
||||
0x00,
|
||||
vec![],
|
||||
).unwrap();
|
||||
let actual: String = (&oid).into();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn encode_string_root_node_2() {
|
||||
let expected = "2.0";
|
||||
let oid = ObjectIdentifier::build(
|
||||
ObjectIdentifierRoot::JointIsoItuT,
|
||||
0x00,
|
||||
vec![],
|
||||
).unwrap();
|
||||
let actual: String = (&oid).into();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn encode_string_example_1() {
|
||||
let expected = "0.1.1.2.3.5.8.13.21";
|
||||
let oid = ObjectIdentifier::build(
|
||||
ObjectIdentifierRoot::ItuT,
|
||||
0x01,
|
||||
vec![1, 2, 3, 5, 8, 13, 21],
|
||||
).unwrap();
|
||||
let actual: String = (&oid).into();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn encode_string_example_2() {
|
||||
let expected = "2.39.42.2501.65535.2147483647.1235.2352";
|
||||
let oid = ObjectIdentifier::build(
|
||||
ObjectIdentifierRoot::JointIsoItuT,
|
||||
39,
|
||||
vec![42, 2501, 65535, 2147483647, 1235, 2352],
|
||||
).unwrap();
|
||||
let actual: String = (&oid).into();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_binary_root_node_0() {
|
||||
let expected = ObjectIdentifier::build(
|
||||
ObjectIdentifierRoot::ItuT,
|
||||
0x00,
|
||||
vec![],
|
||||
);
|
||||
let actual = vec![0x00].try_into();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_binary_root_node_1() {
|
||||
let expected = ObjectIdentifier::build(
|
||||
ObjectIdentifierRoot::Iso,
|
||||
0x00,
|
||||
vec![],
|
||||
);
|
||||
let actual = vec![40].try_into();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_binary_root_node_2() {
|
||||
let expected = ObjectIdentifier::build(
|
||||
ObjectIdentifierRoot::JointIsoItuT,
|
||||
0x00,
|
||||
vec![],
|
||||
);
|
||||
let actual = vec![80].try_into();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_binary_example_1() {
|
||||
let expected = ObjectIdentifier::build(
|
||||
ObjectIdentifierRoot::ItuT,
|
||||
0x01,
|
||||
vec![1, 2, 3, 5, 8, 13, 21],
|
||||
);
|
||||
let actual = vec![0x01, 0x01, 0x02, 0x03, 0x05, 0x08, 0x0D, 0x15].try_into();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_binary_example_2() {
|
||||
let expected = ObjectIdentifier::build(
|
||||
ObjectIdentifierRoot::JointIsoItuT,
|
||||
39,
|
||||
vec![42, 2501, 65535, 2147483647, 1235, 2352],
|
||||
);
|
||||
let actual = vec![
|
||||
0x77, 0x2A, 0x93, 0x45, 0x83, 0xFF, 0x7F, 0x87, 0xFF, 0xFF, 0xFF, 0x7F, 0x89, 0x53,
|
||||
0x92, 0x30,
|
||||
]
|
||||
.try_into();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_string_root_node_0() {
|
||||
let expected = ObjectIdentifier::build(
|
||||
ObjectIdentifierRoot::ItuT,
|
||||
0x00,
|
||||
vec![],
|
||||
);
|
||||
let actual = "0.0".try_into();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_string_root_node_1() {
|
||||
let expected = ObjectIdentifier::build(
|
||||
ObjectIdentifierRoot::Iso,
|
||||
0x00,
|
||||
vec![],
|
||||
);
|
||||
let actual = "1.0".try_into();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_string_root_node_2() {
|
||||
let expected = ObjectIdentifier::build(
|
||||
ObjectIdentifierRoot::JointIsoItuT,
|
||||
0x00,
|
||||
vec![],
|
||||
);
|
||||
let actual = "2.0".try_into();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_string_example_1() {
|
||||
let expected = ObjectIdentifier::build(
|
||||
ObjectIdentifierRoot::ItuT,
|
||||
0x01,
|
||||
vec![1, 2, 3, 5, 8, 13, 21],
|
||||
);
|
||||
let actual = "0.1.1.2.3.5.8.13.21".try_into();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_string_example_2() {
|
||||
let expected = ObjectIdentifier::build(
|
||||
ObjectIdentifierRoot::JointIsoItuT,
|
||||
39,
|
||||
vec![42, 2501, 65535, 2147483647, 1235, 2352],
|
||||
);
|
||||
let actual = "2.39.42.2501.65535.2147483647.1235.2352".try_into();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn illegal_oid_root() {
|
||||
let expected = Err(ObjectIdentifierError::IllegalRootNode);
|
||||
for i in 3..core::u8::MAX {
|
||||
let actual = ObjectIdentifierRoot::try_from(i);
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn illegal_first_node_too_large() {
|
||||
let expected = Err(ObjectIdentifierError::IllegalFirstChildNode);
|
||||
for i in 40..core::u8::MAX {
|
||||
let string_val = format!("{}.2.3.4", i);
|
||||
let mut nodes_iter = string_val.split(".");
|
||||
let actual = parse_string_first_node(nodes_iter.next().unwrap());
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn illegal_first_node_empty() {
|
||||
let expected = Err(ObjectIdentifierError::IllegalFirstChildNode);
|
||||
let string_val = String::new();
|
||||
let mut nodes_iter = string_val.split(".");
|
||||
let actual = parse_string_first_node(nodes_iter.next().unwrap());
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn illegal_first_node_large() {
|
||||
let expected = Err(ObjectIdentifierError::IllegalFirstChildNode);
|
||||
let string_val = String::from("40");
|
||||
let mut nodes_iter = string_val.split(".");
|
||||
let actual = parse_string_first_node(nodes_iter.next().unwrap());
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_string_crap() {
|
||||
let expected: Result<ObjectIdentifier, ObjectIdentifierError> =
|
||||
Err(ObjectIdentifierError::IllegalRootNode);
|
||||
let actual = "wtf".try_into();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_string_empty() {
|
||||
let expected: Result<ObjectIdentifier, ObjectIdentifierError> =
|
||||
Err(ObjectIdentifierError::IllegalRootNode);
|
||||
let actual = String::new().try_into();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_binary_empty() {
|
||||
let expected: Result<ObjectIdentifier, ObjectIdentifierError> =
|
||||
Err(ObjectIdentifierError::IllegalRootNode);
|
||||
let actual = vec![].try_into();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_binary_example_over_u128() {
|
||||
let expected: Result<ObjectIdentifier, ObjectIdentifierError> =
|
||||
Err(ObjectIdentifierError::IllegalChildNodeValue);
|
||||
let actual = vec![
|
||||
0x00, 0x89, 0x97, 0xBF, 0xA3, 0xB8, 0xE8, 0xB3, 0xE6, 0xFB, 0xF2, 0xEA, 0xC3, 0xCA,
|
||||
0xF2, 0xBF, 0xFF, 0xFF, 0xFF, 0xFF, 0x7F,
|
||||
]
|
||||
.try_into();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
#[test]
|
||||
fn parse_string_root_node_3plus() {
|
||||
for i in 3..=core::u8::MAX {
|
||||
let expected: Result<ObjectIdentifier, ObjectIdentifierError> =
|
||||
Err(ObjectIdentifierError::IllegalRootNode);
|
||||
let actual = format!("{}", i).try_into();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_string_example_over_u128() {
|
||||
let expected: Result<ObjectIdentifier, ObjectIdentifierError> =
|
||||
Err(ObjectIdentifierError::IllegalChildNodeValue);
|
||||
let actual = "1.1.349239782398732987223423423423423423423423423423434982342342342342342342324523453452345234523452345234523452345234537234987234".try_into();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_string_example_first_node_over_39() {
|
||||
let expected: Result<ObjectIdentifier, ObjectIdentifierError> =
|
||||
Err(ObjectIdentifierError::IllegalFirstChildNode);
|
||||
let actual = "1.40.1.2.3".try_into();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn encode_to_string() {
|
||||
let expected = String::from("1.2.3.4");
|
||||
let actual: String = ObjectIdentifier::build(
|
||||
ObjectIdentifierRoot::Iso,
|
||||
2,
|
||||
vec![3, 4],
|
||||
).unwrap()
|
||||
.into();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn encode_to_bytes() {
|
||||
let expected = vec![0x2A, 0x03, 0x04];
|
||||
let actual: Vec<u8> = ObjectIdentifier::build(
|
||||
ObjectIdentifierRoot::Iso,
|
||||
2,
|
||||
vec![3, 4],
|
||||
).unwrap()
|
||||
.into();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
}
|
505
bffhd/permissions.rs
Normal file
505
bffhd/permissions.rs
Normal file
@ -0,0 +1,505 @@
|
||||
//! Access control logic
|
||||
//!
|
||||
|
||||
use std::fmt;
|
||||
use std::cmp::Ordering;
|
||||
use std::convert::{TryFrom, Into};
|
||||
|
||||
/// A "Role" from the Authorization perspective
|
||||
///
|
||||
/// You can think of a role as a bundle of permissions relating to other roles. In most cases a
|
||||
/// role represents a real-world education or apprenticeship, which gives a person the education
|
||||
/// necessary to use a machine safely.
|
||||
/// Roles are assigned permissions which in most cases evaluate to granting a person the right to
|
||||
/// use certain (potentially) dangerous machines.
|
||||
/// Using this indirection makes administration easier in certain ways; instead of maintaining
|
||||
/// permissions on users directly the user is given a role after having been educated on the safety
|
||||
/// of a machine; if later on a similar enough machine is put to use the administrator can just add
|
||||
/// the permission for that machine to an already existing role instead of manually having to
|
||||
/// assign to all users.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
|
||||
pub struct Role {
|
||||
// If a role doesn't define parents, default to an empty Vec.
|
||||
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||
/// A Role can have parents, inheriting all permissions
|
||||
///
|
||||
/// This makes situations where different levels of access are required easier: Each higher
|
||||
/// level of access sets the lower levels of access as parent, inheriting their permission; if
|
||||
/// you are allowed to manage a machine you are then also allowed to use it and so on
|
||||
parents: Vec<RoleIdentifier>,
|
||||
|
||||
// If a role doesn't define permissions, default to an empty Vec.
|
||||
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||
permissions: Vec<PermRule>,
|
||||
}
|
||||
|
||||
impl Role {
|
||||
pub fn new(parents: Vec<RoleIdentifier>, permissions: Vec<PermRule>) -> Self {
|
||||
Self { parents, permissions }
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Role {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "parents:")?;
|
||||
if self.parents.is_empty() {
|
||||
writeln!(f, " []")?;
|
||||
} else {
|
||||
writeln!(f, "")?;
|
||||
for p in self.parents.iter() {
|
||||
writeln!(f, " - {}", p)?;
|
||||
}
|
||||
}
|
||||
write!(f, "permissions:")?;
|
||||
if self.permissions.is_empty() {
|
||||
writeln!(f, " []")?;
|
||||
} else {
|
||||
writeln!(f, "")?;
|
||||
for p in self.permissions.iter() {
|
||||
writeln!(f, " - {}", p)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
type SourceID = String;
|
||||
|
||||
fn split_once(s: &str, split: char) -> Option<(&str, &str)> {
|
||||
s
|
||||
.find(split)
|
||||
.map(|idx| (&s[..idx], &s[(idx+1)..]))
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
|
||||
#[serde(try_from = "String")]
|
||||
#[serde(into = "String")]
|
||||
/// Universal (relative) id of a role
|
||||
pub struct RoleIdentifier {
|
||||
/// Locally unique name for the role. No other role at this instance no matter the source
|
||||
/// may have the same name
|
||||
name: String,
|
||||
/// Role Source, i.e. the database the role comes from
|
||||
source: SourceID,
|
||||
}
|
||||
|
||||
impl RoleIdentifier {
|
||||
pub fn new<>(name: &str, source: &str) -> Self {
|
||||
Self { name: name.to_string(), source: source.to_string() }
|
||||
}
|
||||
pub fn from_strings(name: String, source: String) -> Self {
|
||||
Self { name, source }
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for RoleIdentifier {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
if self.source != "" {
|
||||
write!(f, "{}/{}", self.name, self.source)
|
||||
} else {
|
||||
write!(f, "{}", self.name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::str::FromStr for RoleIdentifier {
|
||||
type Err = RoleFromStrError;
|
||||
|
||||
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
|
||||
if let Some((name, source)) = split_once(s, '/') {
|
||||
Ok(RoleIdentifier { name: name.to_string(), source: source.to_string() })
|
||||
} else {
|
||||
Ok(RoleIdentifier { name: s.to_string(), source: String::new() })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<String> for RoleIdentifier {
|
||||
type Error = RoleFromStrError;
|
||||
|
||||
fn try_from(s: String) -> std::result::Result<Self, Self::Error> {
|
||||
<RoleIdentifier as std::str::FromStr>::from_str(&s)
|
||||
}
|
||||
}
|
||||
impl Into<String> for RoleIdentifier {
|
||||
fn into(self) -> String {
|
||||
format!("{}", self)
|
||||
}
|
||||
}
|
||||
|
||||
impl RoleIdentifier {
|
||||
pub fn local_from_str(source: String, name: String) -> Self {
|
||||
RoleIdentifier { name, source }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub enum RoleFromStrError {
|
||||
/// No '@' or '%' found. That's strange, huh?
|
||||
Invalid
|
||||
}
|
||||
|
||||
impl fmt::Display for RoleFromStrError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
RoleFromStrError::Invalid
|
||||
=> write!(f, "Rolename are of form 'name%source' or 'name@realm'."),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn is_sep_char(c: char) -> bool {
|
||||
c == '.'
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, serde::Serialize, serde::Deserialize)]
|
||||
/// A set of privileges to a thing
|
||||
pub struct PrivilegesBuf {
|
||||
/// Which permission is required to know about the existance of this thing
|
||||
pub disclose: PermissionBuf,
|
||||
/// Which permission is required to read this thing
|
||||
pub read: PermissionBuf,
|
||||
/// Which permission is required to write parts of this thing
|
||||
pub write: PermissionBuf,
|
||||
/// Which permission is required to manage all parts of this thing
|
||||
pub manage: PermissionBuf
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
|
||||
#[repr(transparent)]
|
||||
#[serde(transparent)]
|
||||
/// An owned permission string
|
||||
///
|
||||
/// This is under the hood just a fancy std::String.
|
||||
// TODO: What is the possible fallout from homograph attacks?
|
||||
// i.e. "bffh.perm" is not the same as "bffհ.реrm" (Armenian 'հ':Հ and Cyrillic 'е':Е)
|
||||
// See also https://util.unicode.org/UnicodeJsps/confusables.jsp
|
||||
pub struct PermissionBuf {
|
||||
inner: String,
|
||||
}
|
||||
impl PermissionBuf {
|
||||
#[inline(always)]
|
||||
/// Allocate an empty `PermissionBuf`
|
||||
pub fn new() -> Self {
|
||||
PermissionBuf { inner: String::new() }
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
/// Allocate a `PermissionBuf` with the given capacity given to the internal [`String`]
|
||||
pub fn with_capacity(cap: usize) -> Self {
|
||||
PermissionBuf { inner: String::with_capacity(cap) }
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn as_permission(&self) -> &Permission {
|
||||
self.as_ref()
|
||||
}
|
||||
|
||||
pub fn push<P: AsRef<Permission>>(&mut self, perm: P) {
|
||||
self._push(perm.as_ref())
|
||||
}
|
||||
|
||||
pub fn _push(&mut self, perm: &Permission) {
|
||||
// in general we always need a separator unless the last byte is one or the string is empty
|
||||
let need_sep = self.inner.chars().rev().next().map(|c| !is_sep_char(c)).unwrap_or(false);
|
||||
if need_sep {
|
||||
self.inner.push('.')
|
||||
}
|
||||
self.inner.push_str(perm.as_str())
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub const fn from_string_unchecked(inner: String) -> Self {
|
||||
Self { inner }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn from_perm(perm: &Permission) -> Self {
|
||||
Self { inner: perm.as_str().to_string() }
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn into_string(self) -> String {
|
||||
self.inner
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.inner.is_empty()
|
||||
}
|
||||
}
|
||||
impl AsRef<String> for PermissionBuf {
|
||||
#[inline(always)]
|
||||
fn as_ref(&self) -> &String {
|
||||
&self.inner
|
||||
}
|
||||
}
|
||||
impl AsRef<str> for PermissionBuf {
|
||||
#[inline(always)]
|
||||
fn as_ref(&self) -> &str {
|
||||
&self.inner.as_str()
|
||||
}
|
||||
}
|
||||
impl AsRef<Permission> for PermissionBuf {
|
||||
#[inline]
|
||||
fn as_ref(&self) -> &Permission {
|
||||
Permission::new(self)
|
||||
}
|
||||
}
|
||||
impl PartialOrd for PermissionBuf {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
let a: &Permission = self.as_ref();
|
||||
a.partial_cmp(other.as_ref())
|
||||
}
|
||||
}
|
||||
impl fmt::Display for PermissionBuf {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.inner.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Hash, Debug)]
|
||||
#[repr(transparent)]
|
||||
/// A borrowed permission string
|
||||
///
|
||||
/// Permissions have total equality and partial ordering.
|
||||
/// Specifically permissions on the same path in a tree can be compared for specificity.
|
||||
/// This means that ```(bffh.perm) > (bffh.perm.sub) == true```
|
||||
/// but ```(bffh.perm) > (unrelated.but.more.specific.perm) == false```.
|
||||
/// This allows to check if PermRule a grants Perm b by checking `a > b`.
|
||||
pub struct Permission(str);
|
||||
impl Permission {
|
||||
#[inline(always)]
|
||||
// We can't make this `const` just yet because `str` is always a fat pointer meaning we can't
|
||||
// just const cast it, and `CoerceUnsized` and friends are currently unstable.
|
||||
pub fn new<S: AsRef<str> + ?Sized>(s: &S) -> &Permission {
|
||||
// Safe because s is a valid reference
|
||||
unsafe { &*(s.as_ref() as *const str as *const Permission) }
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn as_str(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn iter(&self) -> std::str::Split<char> {
|
||||
self.0.split('.')
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for Permission {
|
||||
fn partial_cmp(&self, other: &Permission) -> Option<Ordering> {
|
||||
let mut i = self.iter();
|
||||
let mut j = other.iter();
|
||||
let (mut l, mut r);
|
||||
while {
|
||||
l = i.next();
|
||||
r = j.next();
|
||||
|
||||
l.is_some() && r.is_some()
|
||||
} {
|
||||
if l.unwrap() != r.unwrap() {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
match (l,r) {
|
||||
(None, None) => Some(Ordering::Equal),
|
||||
(Some(_), None) => Some(Ordering::Less),
|
||||
(None, Some(_)) => Some(Ordering::Greater),
|
||||
(Some(_), Some(_)) => unreachable!("Broken contract in Permission::partial_cmp: sides \
|
||||
should never be both Some!"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<Permission> for Permission {
|
||||
#[inline]
|
||||
fn as_ref(&self) -> &Permission {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
|
||||
#[serde(try_from = "String")]
|
||||
#[serde(into = "String")]
|
||||
pub enum PermRule {
|
||||
/// The permission is precise,
|
||||
///
|
||||
/// i.e. `Base("bffh.perm")` grants bffh.perm but does not grant permission for bffh.perm.sub
|
||||
Base(PermissionBuf),
|
||||
/// The permissions is for the children of the node
|
||||
///
|
||||
/// i.e. `Children("bffh.perm")` grants bffh.perm.sub, bffh.perm.sub.two *BUT NOT* bffh.perm
|
||||
/// itself.
|
||||
Children(PermissionBuf),
|
||||
/// The permissions is for the subtree marked by the node
|
||||
///
|
||||
/// i.e. `Children("bffh.perm")` grants bffh.perm.sub, bffh.perm.sub.two and also bffh.perm
|
||||
/// itself.
|
||||
Subtree(PermissionBuf),
|
||||
// This lacks what LDAP calls "ONELEVEL": The ability to grant the exact children but not several
|
||||
// levels deep, i.e. `Onelevel("bffh.perm")` grants bffh.perm.sub *BUT NOT* bffh.perm.sub.two or
|
||||
// bffh.perm itself.
|
||||
// I can't think of a reason to use that so I'm skipping it for now.
|
||||
}
|
||||
|
||||
impl PermRule {
|
||||
// Does this rule match that permission
|
||||
pub fn match_perm<P: AsRef<Permission> + ?Sized>(&self, perm: &P) -> bool {
|
||||
match self {
|
||||
PermRule::Base(ref base) => base.as_permission() == perm.as_ref(),
|
||||
PermRule::Children(ref parent) => parent.as_permission() > perm.as_ref() ,
|
||||
PermRule::Subtree(ref parent) => parent.as_permission() >= perm.as_ref(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for PermRule {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
PermRule::Base(perm)
|
||||
=> write!(f, "{}", perm),
|
||||
PermRule::Children(parent)
|
||||
=> write!(f,"{}.+", parent),
|
||||
PermRule::Subtree(parent)
|
||||
=> write!(f,"{}.*", parent),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<String> for PermRule {
|
||||
fn into(self) -> String {
|
||||
match self {
|
||||
PermRule::Base(perm) => perm.into_string(),
|
||||
PermRule::Children(mut perm) => {
|
||||
perm.push(Permission::new("+"));
|
||||
perm.into_string()
|
||||
},
|
||||
PermRule::Subtree(mut perm) => {
|
||||
perm.push(Permission::new("+"));
|
||||
perm.into_string()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<String> for PermRule {
|
||||
type Error = &'static str;
|
||||
|
||||
fn try_from(mut input: String) -> std::result::Result<Self, Self::Error> {
|
||||
// Check out specifically the last two chars
|
||||
let len = input.len();
|
||||
if len <= 2 {
|
||||
Err("Input string for PermRule is too short")
|
||||
} else {
|
||||
match &input[len-2..len] {
|
||||
".+" => {
|
||||
input.truncate(len-2);
|
||||
Ok(PermRule::Children(PermissionBuf::from_string_unchecked(input)))
|
||||
},
|
||||
".*" => {
|
||||
input.truncate(len-2);
|
||||
Ok(PermRule::Subtree(PermissionBuf::from_string_unchecked(input)))
|
||||
},
|
||||
_ => Ok(PermRule::Base(PermissionBuf::from_string_unchecked(input))),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn permission_ord_test() {
|
||||
assert!(PermissionBuf::from_string_unchecked("bffh.perm".to_string())
|
||||
> PermissionBuf::from_string_unchecked("bffh.perm.sub".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn permission_simple_check_test() {
|
||||
let perm = PermissionBuf::from_string_unchecked("test.perm".to_string());
|
||||
let rule = PermRule::Base(perm.clone());
|
||||
|
||||
assert!(rule.match_perm(&perm));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn permission_children_checks_only_children() {
|
||||
let perm = PermissionBuf::from_string_unchecked("test.perm".to_string());
|
||||
let rule = PermRule::Children(perm.clone());
|
||||
|
||||
assert_eq!(rule.match_perm(&perm), false);
|
||||
|
||||
let perm2 = PermissionBuf::from_string_unchecked("test.perm.child".to_string());
|
||||
let perm3 = PermissionBuf::from_string_unchecked("test.perm.child.deeper".to_string());
|
||||
assert!(rule.match_perm(&perm2));
|
||||
assert!(rule.match_perm(&perm3));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn permission_subtree_checks_base() {
|
||||
let perm = PermissionBuf::from_string_unchecked("test.perm".to_string());
|
||||
let rule = PermRule::Subtree(perm.clone());
|
||||
|
||||
assert!(rule.match_perm(&perm));
|
||||
|
||||
let perm2 = PermissionBuf::from_string_unchecked("test.perm.child".to_string());
|
||||
let perm3 = PermissionBuf::from_string_unchecked("test.perm.child.deeper".to_string());
|
||||
|
||||
assert!(rule.match_perm(&perm2));
|
||||
assert!(rule.match_perm(&perm3));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn format_and_read_compatible() {
|
||||
use std::convert::TryInto;
|
||||
|
||||
let testdata = vec![
|
||||
("testrole", "testsource"),
|
||||
("", "norole"),
|
||||
("nosource", "")
|
||||
].into_iter().map(|(n,s)| (n.to_string(), s.to_string()));
|
||||
|
||||
for (name, source) in testdata {
|
||||
let role = RoleIdentifier { name, source };
|
||||
|
||||
let fmt_string = format!("{}", &role);
|
||||
|
||||
println!("{:?} is formatted: {}", &role, &fmt_string);
|
||||
|
||||
let parsed: RoleIdentifier = fmt_string.try_into().unwrap();
|
||||
|
||||
println!("Which parses into {:?}", &parsed);
|
||||
|
||||
assert_eq!(role, parsed);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn rules_from_string_test() {
|
||||
assert_eq!(
|
||||
PermRule::Base(PermissionBuf::from_string_unchecked("bffh.perm".to_string())),
|
||||
PermRule::try_from("bffh.perm".to_string()).unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
PermRule::Children(PermissionBuf::from_string_unchecked("bffh.perm".to_string())),
|
||||
PermRule::try_from("bffh.perm.+".to_string()).unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
PermRule::Subtree(PermissionBuf::from_string_unchecked("bffh.perm".to_string())),
|
||||
PermRule::try_from("bffh.perm.*".to_string()).unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rules_from_string_edgecases_test() {
|
||||
assert!(PermRule::try_from("*".to_string()).is_err());
|
||||
assert!(PermRule::try_from("+".to_string()).is_err());
|
||||
}
|
||||
}
|
141
bffhd/resource.rs
Normal file
141
bffhd/resource.rs
Normal file
@ -0,0 +1,141 @@
|
||||
use std::fmt::Debug;
|
||||
use async_trait::async_trait;
|
||||
|
||||
use futures_signals::signal::Mutable;
|
||||
use async_oneshot::Sender;
|
||||
use async_channel::Receiver;
|
||||
|
||||
use crate::state::State;
|
||||
use crate::db::{
|
||||
state::StateAccessor,
|
||||
};
|
||||
|
||||
/// A resource in BFFH has to contain several different parts;
|
||||
/// - Currently set state
|
||||
/// - Execution state of attached actors (⇒ BFFH's job)
|
||||
/// - Output of interal logic of a resource
|
||||
/// ⇒ Resource logic gets read access to set state and write access to output state.
|
||||
/// ⇒ state `update` happens via resource logic. This logic should do access control. If the update
|
||||
/// succeeds then BFFH stores those input parameters ("set" state) and results / output state.
|
||||
/// Storing input parameters is relevant so that BFFH can know that an "update" is a no-op
|
||||
/// without having to run the module code.
|
||||
/// ⇒ in fact actors only really care about the output state, and shouldn't (need to) see "set"
|
||||
/// state.
|
||||
/// ⇒ example reserving:
|
||||
/// - Claimant sends 'update' message with a new state
|
||||
/// - Doesn't set the state until `update` has returned Ok.
|
||||
/// - This runs the `update` function with that new state and the claimants user context returning
|
||||
/// either an Ok or an Error.
|
||||
/// - Error is returned to Claimant to show user, stop.
|
||||
/// - On ok:
|
||||
/// - Commit new "set" state, storing it and making it visible to all other claimants
|
||||
/// - Commit new output state, storing it and notifying all connected actors / Notify
|
||||
/// ⇒ BFFHs job in this whole ordeal is:
|
||||
/// - Message passing primitives so that update message are queued
|
||||
/// - As reliable as possible storage system for input and output state
|
||||
/// - Again message passing so that updates are broadcasted to all Notify and Actors.
|
||||
/// ⇒ Resource module's job is:
|
||||
/// - Validating updates semantically i.e. are the types correct
|
||||
/// - Check authorization of updates i.e. is this user allowed to do that
|
||||
#[async_trait]
|
||||
pub trait Resource: Debug {
|
||||
/// Run whatever internal logic this resource has for the given State update, and return the
|
||||
/// new output state that this update produces.
|
||||
async fn on_update(&mut self, input: &State) -> Result<State, Error>;
|
||||
async fn shutdown(&mut self);
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Passthrough;
|
||||
#[async_trait]
|
||||
impl Resource for Passthrough {
|
||||
async fn on_update(&mut self, input: &State) -> Result<State, Error> {
|
||||
Ok(input.clone())
|
||||
}
|
||||
|
||||
async fn shutdown(&mut self) {}
|
||||
}
|
||||
|
||||
/// Error type a resource implementation can produce
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
Internal(Box<dyn std::error::Error>),
|
||||
Denied,
|
||||
}
|
||||
|
||||
// TODO: more message context
|
||||
pub struct Update {
|
||||
pub state: State,
|
||||
pub errchan: Sender<Error>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ResourceDriver {
|
||||
// putput
|
||||
res: Box<dyn Resource>,
|
||||
|
||||
// input
|
||||
rx: Receiver<Update>,
|
||||
|
||||
// output
|
||||
db: StateAccessor,
|
||||
|
||||
signal: Mutable<State>,
|
||||
}
|
||||
|
||||
impl ResourceDriver {
|
||||
pub async fn drive_to_end(&mut self) {
|
||||
while let Ok(update) = self.rx.recv().await {
|
||||
let state = update.state;
|
||||
let mut errchan = update.errchan;
|
||||
|
||||
match self.res.on_update(&state).await {
|
||||
Ok(outstate) => {
|
||||
// FIXME: Send any error here to some global error collector. A failed write to
|
||||
// the DB is not necessarily fatal, but it means that BFFH is now in an
|
||||
// inconsistent state until a future update succeeds with writing to the DB.
|
||||
// Not applying the new state isn't correct either since we don't know what the
|
||||
// internal logic of the resource has done to make this happen.
|
||||
// Another half right solution is to unwrap and recreate everything.
|
||||
// "Best" solution would be to tell the resource to rollback their interal
|
||||
// changes on a fatal failure and then notify the Claimant, while simply trying
|
||||
// again for temporary failures.
|
||||
let _ = self.db.set(&state, &outstate);
|
||||
self.signal.set(outstate);
|
||||
},
|
||||
Err(e) => {
|
||||
let _ = errchan.send(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::pin::Pin;
|
||||
use std::task::Poll;
|
||||
use std::future::Future;
|
||||
use super::*;
|
||||
|
||||
#[futures_test::test]
|
||||
async fn test_passthrough_is_id() {
|
||||
let inp = crate::state::tests::gen_random();
|
||||
|
||||
let mut res = Passthrough;
|
||||
let out = res.on_update(&inp).await.unwrap();
|
||||
assert_eq!(inp, out);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_passthrough_is_always_ready() {
|
||||
let inp = State::build().finish();
|
||||
|
||||
let mut res = Passthrough;
|
||||
let mut cx = futures_test::task::panic_context();
|
||||
if let Poll::Ready(_) = Pin::new(&mut res.on_update(&inp)).poll(&mut cx) {
|
||||
return;
|
||||
}
|
||||
panic!("Passthrough returned Poll::Pending")
|
||||
}
|
||||
}
|
41
bffhd/schema.rs
Normal file
41
bffhd/schema.rs
Normal file
@ -0,0 +1,41 @@
|
||||
pub use capnpc::schema_capnp;
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub mod auth_capnp {
|
||||
include!(concat!(env!("OUT_DIR"), "/schema/auth_capnp.rs"));
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub mod main_capnp {
|
||||
include!(concat!(env!("OUT_DIR"), "/schema/main_capnp.rs"));
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub mod utils_capnp {
|
||||
include!(concat!(env!("OUT_DIR"), "/schema/utils_capnp.rs"));
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub mod resource_capnp {
|
||||
include!(concat!(env!("OUT_DIR"), "/schema/resource_capnp.rs"));
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub mod resources_capnp {
|
||||
include!(concat!(env!("OUT_DIR"), "/schema/resources_capnp.rs"));
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub mod role_capnp {
|
||||
include!(concat!(env!("OUT_DIR"), "/schema/role_capnp.rs"));
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub mod user_capnp {
|
||||
include!(concat!(env!("OUT_DIR"), "/schema/user_capnp.rs"));
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub mod users_capnp {
|
||||
include!(concat!(env!("OUT_DIR"), "/schema/users_capnp.rs"));
|
||||
}
|
246
bffhd/state.rs
Normal file
246
bffhd/state.rs
Normal file
@ -0,0 +1,246 @@
|
||||
use std::{
|
||||
fmt,
|
||||
|
||||
collections::{
|
||||
hash_map::DefaultHasher
|
||||
},
|
||||
hash::{
|
||||
Hash,
|
||||
Hasher
|
||||
},
|
||||
};
|
||||
|
||||
use rkyv::{
|
||||
Archive,
|
||||
Archived,
|
||||
|
||||
Serialize,
|
||||
Deserialize,
|
||||
|
||||
out_field,
|
||||
};
|
||||
|
||||
pub mod value;
|
||||
use value::{SerializeValue, RegisteredImpl};
|
||||
use crate::state::value::{TypeOid, DynVal, DynOwnedVal, };
|
||||
use crate::oid::ObjectIdentifier;
|
||||
use serde::ser::SerializeMap;
|
||||
use std::ops::Deref;
|
||||
use std::fmt::Formatter;
|
||||
use serde::Deserializer;
|
||||
use serde::de::{Error, MapAccess};
|
||||
|
||||
#[derive(serde::Serialize, serde::Deserialize)]
|
||||
#[derive(Archive, Serialize, Deserialize)]
|
||||
#[derive(Clone, PartialEq)]
|
||||
#[archive_attr(derive(Debug))]
|
||||
/// State object of a resource
|
||||
///
|
||||
/// This object serves three functions:
|
||||
/// 1. it is constructed by modification via Claims or via internal resource logic
|
||||
/// 2. it is serializable and storable in the database
|
||||
/// 3. it is sendable and forwarded to all Actors and Notifys
|
||||
pub struct State {
|
||||
pub hash: u64,
|
||||
pub inner: Vec<OwnedEntry>,
|
||||
}
|
||||
|
||||
impl State {
|
||||
pub fn build() -> StateBuilder {
|
||||
StateBuilder::new()
|
||||
}
|
||||
pub fn hash(&self) -> u64 {
|
||||
self.hash
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<Archived<State>> for State {
|
||||
fn eq(&self, other: &Archived<Self>) -> bool {
|
||||
self.hash == other.hash
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for State {}
|
||||
|
||||
impl fmt::Debug for State {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let mut sf = f.debug_struct("State");
|
||||
for OwnedEntry { oid, val } in self.inner.iter() {
|
||||
let k: String = oid.into();
|
||||
sf.field(k.as_ref(), val);
|
||||
}
|
||||
sf.finish()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct StateBuilder {
|
||||
hasher: DefaultHasher,
|
||||
inner: Vec<OwnedEntry>
|
||||
}
|
||||
|
||||
impl StateBuilder {
|
||||
pub fn new() -> Self {
|
||||
let hasher = DefaultHasher::new();
|
||||
Self { inner: Vec::new(), hasher }
|
||||
}
|
||||
|
||||
pub fn finish(self) -> State {
|
||||
State {
|
||||
hash: self.hasher.finish(),
|
||||
inner: self.inner,
|
||||
}
|
||||
}
|
||||
|
||||
/// Add key-value pair to the State being built.
|
||||
///
|
||||
/// We have to use this split system here because type erasure prevents us from limiting values
|
||||
/// to `Hash`. Specifically, you can't have a trait object of `Hash` because `Hash` depends on
|
||||
/// `Self`. In this function however the compiler still knows the exact type of `V` and can
|
||||
/// call statically call its `hash` method.
|
||||
pub fn add<V>(mut self, oid: ObjectIdentifier, val: Box<V>) -> Self
|
||||
where V: SerializeValue + Hash + Archive,
|
||||
Archived<V>: TypeOid + RegisteredImpl,
|
||||
{
|
||||
// Hash before creating the StateEntry struct which removes the type information
|
||||
oid.hash(&mut self.hasher);
|
||||
val.hash(&mut self.hasher);
|
||||
self.inner.push(OwnedEntry { oid, val });
|
||||
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Entry<'a> {
|
||||
pub oid: &'a ObjectIdentifier,
|
||||
pub val: &'a dyn SerializeValue,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Archive, Serialize, Deserialize)]
|
||||
#[archive_attr(derive(Debug))]
|
||||
pub struct OwnedEntry {
|
||||
pub oid: ObjectIdentifier,
|
||||
pub val: Box<dyn SerializeValue>,
|
||||
}
|
||||
|
||||
impl PartialEq for OwnedEntry {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.oid == other.oid && self.val.dyn_eq(other.val.as_value())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> serde::Serialize for Entry<'a> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where S: serde::Serializer
|
||||
{
|
||||
let mut ser = serializer.serialize_map(Some(1))?;
|
||||
ser.serialize_entry(&self.oid, &DynVal(self.val))?;
|
||||
ser.end()
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::Serialize for OwnedEntry {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where S: serde::Serializer
|
||||
{
|
||||
let mut ser = serializer.serialize_map(Some(1))?;
|
||||
ser.serialize_entry(&self.oid, &DynVal(self.val.deref()))?;
|
||||
ser.end()
|
||||
}
|
||||
}
|
||||
impl<'de> serde::Deserialize<'de> for OwnedEntry {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where D: Deserializer<'de>
|
||||
{
|
||||
deserializer.deserialize_map(OwnedEntryVisitor)
|
||||
}
|
||||
}
|
||||
|
||||
struct OwnedEntryVisitor;
|
||||
impl<'de> serde::de::Visitor<'de> for OwnedEntryVisitor {
|
||||
type Value = OwnedEntry;
|
||||
|
||||
fn expecting(&self, formatter: &mut Formatter) -> fmt::Result {
|
||||
write!(formatter, "an one entry map from OID to some value object")
|
||||
}
|
||||
|
||||
fn visit_map<A: MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error>
|
||||
{
|
||||
let oid: ObjectIdentifier = map.next_key()?
|
||||
.ok_or(A::Error::missing_field("oid"))?;
|
||||
let val: DynOwnedVal = map.next_value()?;
|
||||
Ok(OwnedEntry { oid, val: val.0 })
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub mod tests {
|
||||
use super::*;
|
||||
use crate::state::value::*;
|
||||
|
||||
pub(crate) fn gen_random() -> State {
|
||||
let amt: u8 = rand::random::<u8>() % 20;
|
||||
|
||||
let mut sb = State::build();
|
||||
for _ in 0..amt {
|
||||
let oid = crate::oid::tests::gen_random();
|
||||
sb = match rand::random::<u32>()%12 {
|
||||
0 => sb.add(oid, Box::new(rand::random::<bool>())),
|
||||
1 => sb.add(oid, Box::new(rand::random::<u8>())),
|
||||
2 => sb.add(oid, Box::new(rand::random::<u16>())),
|
||||
3 => sb.add(oid, Box::new(rand::random::<u32>())),
|
||||
4 => sb.add(oid, Box::new(rand::random::<u64>())),
|
||||
5 => sb.add(oid, Box::new(rand::random::<u128>())),
|
||||
6 => sb.add(oid, Box::new(rand::random::<i8>())),
|
||||
7 => sb.add(oid, Box::new(rand::random::<i16>())),
|
||||
8 => sb.add(oid, Box::new(rand::random::<i32>())),
|
||||
9 => sb.add(oid, Box::new(rand::random::<i64>())),
|
||||
10 => sb.add(oid, Box::new(rand::random::<i128>())),
|
||||
11 => sb.add(oid, Box::new(rand::random::<Vec3u8>())),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
sb.finish()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_equal_state_is_eq() {
|
||||
let stateA = State::build()
|
||||
.add(OID_POWERED.clone(), Box::new(false))
|
||||
.add(OID_INTENSITY.clone(), Box::new(1024))
|
||||
.finish();
|
||||
|
||||
let stateB = State::build()
|
||||
.add(OID_POWERED.clone(), Box::new(false))
|
||||
.add(OID_INTENSITY.clone(), Box::new(1024))
|
||||
.finish();
|
||||
|
||||
assert_eq!(stateA, stateB);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_unequal_state_is_ne() {
|
||||
let stateA = State::build()
|
||||
.add(OID_POWERED.clone(), Box::new(true))
|
||||
.add(OID_INTENSITY.clone(), Box::new(512))
|
||||
.finish();
|
||||
|
||||
let stateB = State::build()
|
||||
.add(OID_POWERED.clone(), Box::new(false))
|
||||
.add(OID_INTENSITY.clone(), Box::new(1024))
|
||||
.finish();
|
||||
|
||||
assert_ne!(stateA, stateB);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_state_is_clone() {
|
||||
let stateA = gen_random();
|
||||
|
||||
let stateB = stateA.clone();
|
||||
let stateC = stateB.clone();
|
||||
drop(stateA);
|
||||
|
||||
assert_eq!(stateC, stateB);
|
||||
}
|
||||
}
|
592
bffhd/state/value.rs
Normal file
592
bffhd/state/value.rs
Normal file
@ -0,0 +1,592 @@
|
||||
use core::{
|
||||
ptr,
|
||||
};
|
||||
use std::{
|
||||
fmt,
|
||||
any::Any,
|
||||
hash::Hash,
|
||||
str::FromStr,
|
||||
};
|
||||
|
||||
use rkyv::{Archive, Archived, Serialize, Deserialize, out_field, Fallible, DeserializeUnsized, ArchivePointee, ArchiveUnsized, ArchivedMetadata, SerializeUnsized, };
|
||||
use rkyv_dyn::{DynSerializer, DynError, DynDeserializer};
|
||||
use rkyv_typename::TypeName;
|
||||
use ptr_meta::{DynMetadata, Pointee};
|
||||
|
||||
use inventory;
|
||||
use crate::oid::{ObjectIdentifier};
|
||||
use rkyv::ser::{Serializer, ScratchSpace};
|
||||
use std::collections::HashMap;
|
||||
use std::alloc::Layout;
|
||||
use serde::ser::SerializeMap;
|
||||
use std::fmt::Formatter;
|
||||
use serde::de::Error as SerdeError;
|
||||
use std::mem::MaybeUninit;
|
||||
|
||||
/// Adding a custom type to BFFH state management:
|
||||
///
|
||||
/// 1. Implement `serde`'s [`Serialize`](serde::Serialize) and [`Deserialize`](serde::Deserialize)
|
||||
/// - `derive()`d instances work just fine, but keep stability over releases in mind.
|
||||
/// 2. Implement rkyv's [`Serialize`](rkyv::Serialize).
|
||||
/// 3. Implement TypeOid on your Archived type (i.e. `<T as Archive>::Archived`)
|
||||
/// 4. Implement this
|
||||
pub trait Value: Any + fmt::Debug + erased_serde::Serialize + Sync {
|
||||
/// Initialize `&mut self` from `deserializer`
|
||||
///
|
||||
/// At the point this is called &mut self is of undefined value but guaranteed to be well
|
||||
/// aligned and non-null. Any read access into &mut self before all of &self is brought into
|
||||
/// a valid state is however undefined behaviour.
|
||||
/// To this end you *must* initialize `self` **completely**. Serde will do the right thing if
|
||||
/// you directly deserialize the type you're implementing `Value` for, but for manual
|
||||
/// implementations this is important to keep in mind.
|
||||
fn deserialize_init<'de>(&mut self, deserializer: &mut dyn erased_serde::Deserializer<'de>)
|
||||
-> Result<(), erased_serde::Error>;
|
||||
|
||||
/// Implement `PartialEq` dynamically.
|
||||
///
|
||||
/// This should return `true` iff the Value is of the same type and `self` == `other` for
|
||||
/// non-dynamic types would return `true`.
|
||||
/// It is safe to always return `false`.
|
||||
fn dyn_eq(&self, other: &dyn Value) -> bool;
|
||||
|
||||
fn as_value(&self) -> &dyn Value;
|
||||
fn as_any(&self) -> &dyn Any;
|
||||
}
|
||||
erased_serde::serialize_trait_object!(Value);
|
||||
erased_serde::serialize_trait_object!(SerializeValue);
|
||||
erased_serde::serialize_trait_object!(DeserializeValue);
|
||||
|
||||
impl<T> Value for T
|
||||
where T: Any + fmt::Debug + PartialEq + Sync
|
||||
+ erased_serde::Serialize
|
||||
+ for<'de> serde::Deserialize<'de>
|
||||
{
|
||||
fn deserialize_init<'de>(&mut self, deserializer: &mut dyn erased_serde::Deserializer<'de>)
|
||||
-> Result<(), erased_serde::Error>
|
||||
{
|
||||
*self = erased_serde::deserialize(deserializer)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn dyn_eq(&self, other: &dyn Value) -> bool {
|
||||
other.as_any().downcast_ref().map_or(false, |other: &T| other == self)
|
||||
}
|
||||
|
||||
fn as_value(&self) -> &dyn Value {
|
||||
self
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &dyn Any {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for dyn Value {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.dyn_eq(other)
|
||||
}
|
||||
}
|
||||
|
||||
#[repr(transparent)]
|
||||
pub(super) struct DynVal<'a>(pub &'a dyn SerializeValue);
|
||||
impl<'a> serde::Serialize for DynVal<'a> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where S: serde::Serializer
|
||||
{
|
||||
let mut ser = serializer.serialize_map(Some(1))?;
|
||||
let oid = self.0.archived_type_oid();
|
||||
ser.serialize_entry(oid, self.0)?;
|
||||
ser.end()
|
||||
}
|
||||
}
|
||||
#[repr(transparent)]
|
||||
pub(super) struct DynOwnedVal(pub Box<dyn SerializeValue>);
|
||||
impl<'de> serde::Deserialize<'de> for DynOwnedVal {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where D: serde::Deserializer<'de>
|
||||
{
|
||||
deserializer.deserialize_map(DynValVisitor)
|
||||
}
|
||||
}
|
||||
|
||||
struct DynValVisitor;
|
||||
|
||||
impl<'de> serde::de::Visitor<'de> for DynValVisitor {
|
||||
type Value = DynOwnedVal;
|
||||
|
||||
fn expecting(&self, formatter: &mut Formatter) -> fmt::Result {
|
||||
write!(formatter, "an one entry map from OID to some value object")
|
||||
}
|
||||
|
||||
fn visit_map<A: serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error>
|
||||
{
|
||||
// Bad magic code. Problem we have to solve: We only know how to parse whatever comes
|
||||
// after the OID after having looked at the OID. We have zero static type info available
|
||||
// during deserialization. So:
|
||||
|
||||
// Get OID first. That's easy, we know it's the key, we know how to read it.
|
||||
let oid: ObjectIdentifier = map.next_key()?
|
||||
.ok_or(A::Error::missing_field("oid"))?;
|
||||
|
||||
// Get the Value vtable for that OID. Or fail because we don't know that OID, either works.
|
||||
let valimpl = IMPL_REGISTRY.get(ImplId::from_type_oid(&oid))
|
||||
.ok_or(serde::de::Error::invalid_value(
|
||||
serde::de::Unexpected::Other("unknown oid"),
|
||||
&"oid an implementation was registered for",
|
||||
))?;
|
||||
|
||||
// Casting random usize you find on the side of the road as vtable on unchecked pointers.
|
||||
// What could possibly go wrong? >:D
|
||||
let valbox: MaybeUninit<Box<dyn SerializeValue>> = unsafe {
|
||||
// "recreate" vtable as fat ptr metadata (we literally just cast an `usize` but the
|
||||
// only way to put this usize into that spot is by having a valid vtable cast so it's
|
||||
// probably almost safe)
|
||||
let meta = valimpl.pointer_metadata();
|
||||
|
||||
// Don't bother checking here. The only way this could be bad is if the vtable above
|
||||
// is bad an in that case a segfault here would be *much better* than whatever is
|
||||
// going to happen afterwards.
|
||||
let layout = Layout::from_size_align_unchecked(meta.size_of(), meta.align_of());
|
||||
|
||||
// Hello yes I would like a Box the old fashioned way.
|
||||
// Oh you're asking why we're allocating stuff here and never ever bother zeroing or
|
||||
// validate in any other way if this is sane?
|
||||
// Well...
|
||||
let ptr: *mut () = std::alloc::alloc(layout).cast::<()>();
|
||||
let b = Box::from_raw(ptr_meta::from_raw_parts_mut(
|
||||
ptr,
|
||||
meta));
|
||||
|
||||
// We make this a MaybeUninit so `Drop` is never called on the uninitialized value
|
||||
MaybeUninit::new(b)
|
||||
};
|
||||
// ... The only way we can make Value a trait object by having it deserialize *into
|
||||
// it's own uninitialized representation*. Yeah don't worry, this isn't the worst part of
|
||||
// the game yet. >:D
|
||||
let seed = InitIntoSelf(valbox);
|
||||
let val = map.next_value_seed(seed)?;
|
||||
Ok(DynOwnedVal(val))
|
||||
}
|
||||
}
|
||||
|
||||
struct InitIntoSelf(MaybeUninit<Box<dyn SerializeValue>>);
|
||||
|
||||
impl<'de> serde::de::DeserializeSeed<'de> for InitIntoSelf {
|
||||
type Value = Box<dyn SerializeValue>;
|
||||
|
||||
fn deserialize<D>(mut self, deserializer: D) -> Result<Self::Value, D::Error>
|
||||
where D: serde::Deserializer<'de>
|
||||
{
|
||||
let mut deser = <dyn erased_serde::Deserializer>::erase(deserializer);
|
||||
|
||||
// Unsafe as hell but if we never read from this reference before initializing it's not
|
||||
// undefined behaviour.
|
||||
let selfptr = unsafe { &mut *self.0.as_mut_ptr() };
|
||||
|
||||
// Hey, better initialize late than never.
|
||||
selfptr.deserialize_init(&mut deser).map_err(|e|
|
||||
D::Error::custom(e))?;
|
||||
|
||||
// Assuming `deserialize_init` didn't error and did its job this is now safe.
|
||||
unsafe { Ok(self.0.assume_init()) }
|
||||
}
|
||||
}
|
||||
|
||||
pub trait TypeOid {
|
||||
fn type_oid() -> &'static ObjectIdentifier;
|
||||
fn type_name() -> &'static str;
|
||||
fn type_desc() -> &'static str;
|
||||
}
|
||||
|
||||
impl<S: ScratchSpace + Serializer + ?Sized> SerializeUnsized<S> for dyn SerializeValue {
|
||||
fn serialize_unsized(&self, mut serializer: &mut S) -> Result<usize, S::Error> {
|
||||
self.serialize_dynoid(&mut serializer)
|
||||
.map_err(|e| *e.downcast::<S::Error>().unwrap())
|
||||
}
|
||||
|
||||
fn serialize_metadata(&self, serializer: &mut S) -> Result<Self::MetadataResolver, S::Error> {
|
||||
let oid = self.archived_type_oid();
|
||||
oid.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// Serialize dynamic types by storing an OID alongside
|
||||
pub trait SerializeDynOid {
|
||||
fn serialize_dynoid(&self, serializer: &mut dyn DynSerializer) -> Result<usize, DynError>;
|
||||
fn archived_type_oid(&self) -> &'static ObjectIdentifier;
|
||||
}
|
||||
|
||||
impl<T> SerializeDynOid for T
|
||||
where T: for<'a> Serialize<dyn DynSerializer + 'a>,
|
||||
T::Archived: TypeOid,
|
||||
{
|
||||
fn serialize_dynoid(&self, serializer: &mut dyn DynSerializer) -> Result<usize, DynError> {
|
||||
serializer.serialize_value(self)
|
||||
}
|
||||
|
||||
fn archived_type_oid(&self) -> &'static ObjectIdentifier {
|
||||
Archived::<T>::type_oid()
|
||||
}
|
||||
}
|
||||
|
||||
pub trait DeserializeDynOid {
|
||||
unsafe fn deserialize_dynoid(
|
||||
&self,
|
||||
deserializer: &mut dyn DynDeserializer,
|
||||
alloc: &mut dyn FnMut(Layout) -> *mut u8,
|
||||
) -> Result<*mut (), DynError>;
|
||||
|
||||
fn deserialize_dynoid_metadata(
|
||||
&self,
|
||||
deserializer: &mut dyn DynDeserializer,
|
||||
) -> Result<<dyn SerializeValue as Pointee>::Metadata, DynError>;
|
||||
}
|
||||
|
||||
#[ptr_meta::pointee]
|
||||
pub trait SerializeValue: Value + SerializeDynOid {
|
||||
fn dyn_clone(&self) -> Box<dyn SerializeValue>;
|
||||
}
|
||||
|
||||
impl<T: Archive + Value + SerializeDynOid + Clone> SerializeValue for T
|
||||
where
|
||||
T::Archived: RegisteredImpl
|
||||
{
|
||||
fn dyn_clone(&self) -> Box<dyn SerializeValue> {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for dyn SerializeValue {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.dyn_eq(other.as_value())
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for Box<dyn SerializeValue> {
|
||||
fn clone(&self) -> Self {
|
||||
self.dyn_clone()
|
||||
}
|
||||
}
|
||||
|
||||
#[ptr_meta::pointee]
|
||||
pub trait DeserializeValue: Value + DeserializeDynOid {}
|
||||
impl<T: Value + DeserializeDynOid> DeserializeValue for T {}
|
||||
impl ArchivePointee for dyn DeserializeValue {
|
||||
type ArchivedMetadata = ArchivedValueMetadata;
|
||||
|
||||
fn pointer_metadata(archived: &Self::ArchivedMetadata) -> <Self as Pointee>::Metadata {
|
||||
archived.pointer_metadata()
|
||||
}
|
||||
}
|
||||
impl<D: Fallible + ?Sized> DeserializeUnsized<dyn SerializeValue, D> for dyn DeserializeValue {
|
||||
unsafe fn deserialize_unsized(&self,
|
||||
mut deserializer: &mut D,
|
||||
mut alloc: impl FnMut(Layout) -> *mut u8
|
||||
) -> Result<*mut (), D::Error> {
|
||||
self.deserialize_dynoid(&mut deserializer, &mut alloc).map_err(|e| *e.downcast().unwrap())
|
||||
}
|
||||
|
||||
fn deserialize_metadata(&self, mut deserializer: &mut D)
|
||||
-> Result<<dyn SerializeValue as Pointee>::Metadata, D::Error>
|
||||
{
|
||||
self.deserialize_dynoid_metadata(&mut deserializer).map_err(|e| *e.downcast().unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
impl ArchiveUnsized for dyn SerializeValue {
|
||||
type Archived = dyn DeserializeValue;
|
||||
type MetadataResolver = <ObjectIdentifier as Archive>::Resolver;
|
||||
|
||||
unsafe fn resolve_metadata(&self, pos: usize, resolver: Self::MetadataResolver, out: *mut ArchivedMetadata<Self>) {
|
||||
let (oid_pos, oid) = out_field!(out.type_oid);
|
||||
let type_oid = self.archived_type_oid();
|
||||
type_oid.resolve(pos + oid_pos, resolver, oid);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ArchivedValueMetadata {
|
||||
pub type_oid: Archived<ObjectIdentifier>,
|
||||
}
|
||||
|
||||
impl ArchivedValueMetadata {
|
||||
pub unsafe fn emplace(type_oid: Archived<ObjectIdentifier>, out: *mut Self) {
|
||||
let p = ptr::addr_of_mut!((*out).type_oid);
|
||||
ptr::write(p, type_oid);
|
||||
}
|
||||
|
||||
pub fn vtable(&self) -> usize {
|
||||
IMPL_REGISTRY
|
||||
.get(ImplId::from_type_oid(&self.type_oid)).expect(&format!("Unregistered \
|
||||
type \
|
||||
oid \
|
||||
{:?}", self.type_oid))
|
||||
.vtable
|
||||
}
|
||||
|
||||
pub fn pointer_metadata(&self) -> DynMetadata<dyn DeserializeValue> {
|
||||
unsafe { core::mem::transmute(self.vtable()) }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
|
||||
pub struct ImplId<'a> {
|
||||
type_oid: &'a [u8],
|
||||
}
|
||||
|
||||
impl<'a> ImplId<'a> {
|
||||
pub fn from_type_oid(type_oid: &'a [u8]) -> Self {
|
||||
Self { type_oid }
|
||||
}
|
||||
}
|
||||
|
||||
impl ImplId<'static> {
|
||||
fn new<T: TypeOid>() -> Self {
|
||||
Self {
|
||||
type_oid: &T::type_oid()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
struct ImplData<'a> {
|
||||
pub vtable: usize,
|
||||
pub name: &'a str,
|
||||
pub desc: &'a str,
|
||||
pub info: ImplDebugInfo,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
#[doc(hidden)]
|
||||
pub struct ImplDebugInfo {
|
||||
pub file: &'static str,
|
||||
pub line: u32,
|
||||
pub column: u32,
|
||||
}
|
||||
macro_rules! debug_info {
|
||||
() => {
|
||||
ImplDebugInfo {
|
||||
file: core::file!(),
|
||||
line: core::line!(),
|
||||
column: core::column!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ImplData<'_> {
|
||||
pub unsafe fn pointer_metadata<T: ?Sized>(&self) -> DynMetadata<T> {
|
||||
core::mem::transmute(self.vtable)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ImplEntry<'a> {
|
||||
id: ImplId<'a>,
|
||||
data: ImplData<'a>,
|
||||
}
|
||||
inventory::collect!(ImplEntry<'static>);
|
||||
|
||||
impl ImplEntry<'_> {
|
||||
#[doc(hidden)]
|
||||
pub fn new<T: TypeOid + RegisteredImpl>() -> Self {
|
||||
Self {
|
||||
id: ImplId::new::<T>(),
|
||||
data: ImplData {
|
||||
vtable: <T as RegisteredImpl>::vtable(),
|
||||
name: <T as TypeOid>::type_name(),
|
||||
desc: <T as TypeOid>::type_desc(),
|
||||
info: <T as RegisteredImpl>::debug_info(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ImplRegistry {
|
||||
oid_to_data: HashMap<ImplId<'static>, ImplData<'static>>,
|
||||
}
|
||||
|
||||
impl ImplRegistry {
|
||||
fn new() -> Self {
|
||||
Self { oid_to_data: HashMap::new() }
|
||||
}
|
||||
|
||||
fn add_entry(&mut self, entry: &'static ImplEntry) {
|
||||
let old_val = self.oid_to_data.insert(entry.id, entry.data);
|
||||
|
||||
if let Some(old) = old_val {
|
||||
eprintln!("Value impl oid conflict for {:?}", entry.id.type_oid);
|
||||
eprintln!("Existing impl registered at {}:{}:{}",
|
||||
old.info.file, old.info.line, old.info.column);
|
||||
eprintln!("New impl registered at {}:{}:{}",
|
||||
entry.data.info.file, entry.data.info.line, entry.data.info.column);
|
||||
}
|
||||
assert!(old_val.is_none());
|
||||
}
|
||||
|
||||
fn get(&self, type_oid: ImplId) -> Option<ImplData> {
|
||||
self.oid_to_data.get(&type_oid).map(|d| *d)
|
||||
}
|
||||
}
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
// FIXME: Dynamic modules *will* break this.
|
||||
static ref IMPL_REGISTRY: ImplRegistry = {
|
||||
let mut reg = ImplRegistry::new();
|
||||
for entry in inventory::iter::<ImplEntry> {
|
||||
reg.add_entry(entry);
|
||||
}
|
||||
reg
|
||||
};
|
||||
}
|
||||
|
||||
pub unsafe trait RegisteredImpl {
|
||||
fn vtable() -> usize;
|
||||
fn debug_info() -> ImplDebugInfo;
|
||||
}
|
||||
|
||||
macro_rules! oiddeser {
|
||||
( $y:ty, $z:ty ) => {
|
||||
impl DeserializeDynOid for $y
|
||||
where $y: for<'a> Deserialize<$z, (dyn DynDeserializer + 'a)>
|
||||
{
|
||||
unsafe fn deserialize_dynoid(&self, deserializer: &mut dyn DynDeserializer, alloc: &mut dyn FnMut(Layout) -> *mut u8) -> Result<*mut (), DynError> {
|
||||
let ptr = alloc(Layout::new::<$z>()).cast::<$z>();
|
||||
ptr.write(self.deserialize(deserializer)?);
|
||||
Ok(ptr as *mut ())
|
||||
}
|
||||
|
||||
fn deserialize_dynoid_metadata(&self, _: &mut dyn DynDeserializer) -> Result<<dyn
|
||||
SerializeValue as Pointee>::Metadata, DynError> {
|
||||
unsafe {
|
||||
Ok(core::mem::transmute(ptr_meta::metadata(
|
||||
core::ptr::null::<$z>() as *const dyn SerializeValue
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
macro_rules! oidvalue {
|
||||
( $x:ident, $y:ty ) => {
|
||||
oidvalue! {$x, $y, $y}
|
||||
};
|
||||
( $x:ident, $y:ty, $z:ty ) => {
|
||||
oiddeser! {$z, $y}
|
||||
|
||||
impl TypeOid for $z {
|
||||
fn type_oid() -> &'static ObjectIdentifier {
|
||||
&$x
|
||||
}
|
||||
|
||||
fn type_name() -> &'static str {
|
||||
stringify!($y)
|
||||
}
|
||||
|
||||
fn type_desc() -> &'static str {
|
||||
"builtin"
|
||||
}
|
||||
}
|
||||
unsafe impl RegisteredImpl for $z {
|
||||
fn vtable() -> usize {
|
||||
unsafe {
|
||||
core::mem::transmute(ptr_meta::metadata(
|
||||
core::ptr::null::<$z>() as *const dyn DeserializeValue
|
||||
))
|
||||
}
|
||||
}
|
||||
fn debug_info() -> ImplDebugInfo {
|
||||
debug_info!()
|
||||
}
|
||||
}
|
||||
|
||||
inventory::submit! {ImplEntry::new::<$z>()}
|
||||
}
|
||||
}
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
pub static ref OID_BOOL: ObjectIdentifier = {
|
||||
ObjectIdentifier::from_str("1.3.6.1.4.1.48398.612.1.1").unwrap()
|
||||
};
|
||||
pub static ref OID_U8: ObjectIdentifier = {
|
||||
ObjectIdentifier::from_str("1.3.6.1.4.1.48398.612.1.2").unwrap()
|
||||
};
|
||||
pub static ref OID_U16: ObjectIdentifier = {
|
||||
ObjectIdentifier::from_str("1.3.6.1.4.1.48398.612.1.3").unwrap()
|
||||
};
|
||||
pub static ref OID_U32: ObjectIdentifier = {
|
||||
ObjectIdentifier::from_str("1.3.6.1.4.1.48398.612.1.4").unwrap()
|
||||
};
|
||||
pub static ref OID_U64: ObjectIdentifier = {
|
||||
ObjectIdentifier::from_str("1.3.6.1.4.1.48398.612.1.5").unwrap()
|
||||
};
|
||||
pub static ref OID_U128: ObjectIdentifier = {
|
||||
ObjectIdentifier::from_str("1.3.6.1.4.1.48398.612.1.6").unwrap()
|
||||
};
|
||||
pub static ref OID_I8: ObjectIdentifier = {
|
||||
ObjectIdentifier::from_str("1.3.6.1.4.1.48398.612.1.7").unwrap()
|
||||
};
|
||||
pub static ref OID_I16: ObjectIdentifier = {
|
||||
ObjectIdentifier::from_str("1.3.6.1.4.1.48398.612.1.8").unwrap()
|
||||
};
|
||||
pub static ref OID_I32: ObjectIdentifier = {
|
||||
ObjectIdentifier::from_str("1.3.6.1.4.1.48398.612.1.9").unwrap()
|
||||
};
|
||||
pub static ref OID_I64: ObjectIdentifier = {
|
||||
ObjectIdentifier::from_str("1.3.6.1.4.1.48398.612.1.10").unwrap()
|
||||
};
|
||||
pub static ref OID_I128: ObjectIdentifier = {
|
||||
ObjectIdentifier::from_str("1.3.6.1.4.1.48398.612.1.11").unwrap()
|
||||
};
|
||||
pub static ref OID_VEC3U8: ObjectIdentifier = {
|
||||
ObjectIdentifier::from_str("1.3.6.1.4.1.48398.612.1.13").unwrap()
|
||||
};
|
||||
|
||||
pub static ref OID_POWERED: ObjectIdentifier = {
|
||||
ObjectIdentifier::from_str("1.3.6.1.4.1.48398.612.2.1").unwrap()
|
||||
};
|
||||
pub static ref OID_INTENSITY: ObjectIdentifier = {
|
||||
ObjectIdentifier::from_str("1.3.6.1.4.1.48398.612.2.2").unwrap()
|
||||
};
|
||||
pub static ref OID_COLOUR: ObjectIdentifier = {
|
||||
ObjectIdentifier::from_str("1.3.6.1.4.1.48398.612.2.3").unwrap()
|
||||
};
|
||||
}
|
||||
oidvalue!(OID_BOOL, bool);
|
||||
oidvalue!(OID_U8, u8);
|
||||
oidvalue!(OID_U16, u16);
|
||||
oidvalue!(OID_U32, u32);
|
||||
oidvalue!(OID_U64, u64);
|
||||
oidvalue!(OID_U128, u128);
|
||||
oidvalue!(OID_I8, i8);
|
||||
oidvalue!(OID_I16, i16);
|
||||
oidvalue!(OID_I32, i32);
|
||||
oidvalue!(OID_I64, i64);
|
||||
oidvalue!(OID_I128, i128);
|
||||
|
||||
#[derive(serde::Serialize, serde::Deserialize)]
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, rkyv::Archive, rkyv::Serialize, rkyv::Deserialize)]
|
||||
#[archive_attr(derive(TypeName, Debug, PartialEq, serde::Serialize, serde::Deserialize))]
|
||||
pub struct Vec3u8 {
|
||||
pub a: u8,
|
||||
pub b: u8,
|
||||
pub c: u8,
|
||||
}
|
||||
oidvalue!(OID_VEC3U8, Vec3u8, ArchivedVec3u8);
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use rand::Rng;
|
||||
use rand::distributions::Standard;
|
||||
use rand::prelude::Distribution;
|
||||
use crate::state::value::Vec3u8;
|
||||
|
||||
impl Distribution<Vec3u8> for Standard {
|
||||
fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> Vec3u8 {
|
||||
let a = self.sample(rng);
|
||||
let b = self.sample(rng);
|
||||
let c = self.sample(rng);
|
||||
Vec3u8 { a, b, c }
|
||||
}
|
||||
}
|
||||
}
|
163
bffhd/varint.rs
Normal file
163
bffhd/varint.rs
Normal file
@ -0,0 +1,163 @@
|
||||
use std::default::Default;
|
||||
use std::ops::{Deref};
|
||||
|
||||
pub struct VarUInt<const N: usize> {
|
||||
offset: usize,
|
||||
bytes: [u8; N],
|
||||
}
|
||||
|
||||
impl<const N: usize> VarUInt<N> {
|
||||
#[inline(always)]
|
||||
const fn new(bytes: [u8; N], offset: usize) -> Self {
|
||||
Self { bytes, offset }
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn as_bytes(&self) -> &[u8] {
|
||||
&self.bytes[self.offset..]
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn as_mut_bytes(&mut self) -> &mut [u8] {
|
||||
&mut self.bytes[..]
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub const fn into_bytes(self) -> [u8; N] {
|
||||
self.bytes
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl<const N: usize> Default for VarUInt<N> {
|
||||
fn default() -> Self {
|
||||
Self::new([0u8; N], N)
|
||||
}
|
||||
}
|
||||
|
||||
impl<const N: usize> Deref for VarUInt<N> {
|
||||
type Target = [u8];
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_bytes()
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! convert_from {
|
||||
( $x:ty ) => {
|
||||
fn from(inp: $x) -> Self {
|
||||
let mut num = inp;
|
||||
let mut this = Self::default();
|
||||
let bytes = this.as_mut_bytes();
|
||||
|
||||
let mut more = 0u8;
|
||||
let mut idx: usize = bytes.len()-1;
|
||||
|
||||
while num > 0x7f {
|
||||
bytes[idx] = ((num & 0x7f) as u8 | more);
|
||||
num >>= 7;
|
||||
more = 0x80;
|
||||
idx -= 1;
|
||||
}
|
||||
bytes[idx] = (num as u8) | more;
|
||||
|
||||
this.offset = idx;
|
||||
this
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! convert_into {
|
||||
( $x:ty ) => {
|
||||
fn into(self) -> $x {
|
||||
let mut out = 0;
|
||||
|
||||
// [0,1,2,3,4,5,6,7,8,9]
|
||||
// ^ 0
|
||||
// ^offset = 5
|
||||
// ^ len = 10
|
||||
// ^---------^ # of valid bytes = (len - offset)
|
||||
// for i in offset..len ⇒ all valid idx
|
||||
let bytes = self.as_bytes();
|
||||
let len = bytes.len();
|
||||
let mut shift = 0;
|
||||
|
||||
for neg in 1..=len {
|
||||
let idx = len-neg;
|
||||
let val = (bytes[idx] & 0x7f) as $x;
|
||||
let shifted = val << shift;
|
||||
out |= shifted;
|
||||
shift += 7;
|
||||
}
|
||||
|
||||
out
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! impl_convert_from_to {
|
||||
( $num:ty, $req:literal, $nt:ident ) => {
|
||||
pub type $nt = VarUInt<$req>;
|
||||
impl From<$num> for VarUInt<$req> {
|
||||
convert_from! { $num }
|
||||
}
|
||||
impl Into<$num> for VarUInt<$req> {
|
||||
convert_into! { $num }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl_convert_from_to!(u8, 2, VarU8);
|
||||
impl_convert_from_to!(u16, 3, VarU16);
|
||||
impl_convert_from_to!(u32, 5, VarU32);
|
||||
impl_convert_from_to!(u64, 10, VarU64);
|
||||
impl_convert_from_to!(u128, 19, VarU128);
|
||||
|
||||
#[cfg(target_pointer_width = "64")]
|
||||
type VarUsize = VarU64;
|
||||
#[cfg(target_pointer_width = "32")]
|
||||
type VarUsize = VarU32;
|
||||
#[cfg(target_pointer_width = "16")]
|
||||
type VarUsize = VarU16;
|
||||
|
||||
impl<T, const N: usize> From<&T> for VarUInt<N>
|
||||
where T: Copy,
|
||||
VarUInt<N>: From<T>
|
||||
{
|
||||
fn from(t: &T) -> Self {
|
||||
(*t).into()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_varuint() {
|
||||
let inp = u64::MAX;
|
||||
let vi: VarU64 = inp.into();
|
||||
println!("Encoded {} into {:?}", inp, vi.as_bytes());
|
||||
let outp: u64 = vi.into();
|
||||
assert_eq!(inp, outp);
|
||||
|
||||
let inp = 0x80;
|
||||
let vi: VarUInt<10> = inp.into();
|
||||
println!("Encoded {} into {:?}", inp, vi.as_bytes());
|
||||
let outp: u64 = vi.into();
|
||||
assert_eq!(inp, outp);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn minimal() {
|
||||
let a = 5u8;
|
||||
assert_eq!(VarU8::from(a).as_bytes(), &[a]);
|
||||
let a = 200u8;
|
||||
assert_eq!(VarU8::from(a).as_bytes(), &[129, 72]);
|
||||
|
||||
let inp = 128;
|
||||
let vi: VarU32 = inp.into();
|
||||
let expected: &[u8] = &[129, 0];
|
||||
assert_eq!(vi.as_bytes(), expected)
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user