mirror of
https://github.com/aljazceru/pubky-core.git
synced 2026-01-14 19:54:28 +01:00
feat(common): remove unnecessary Result<> in infallible functions
This commit is contained in:
@@ -19,6 +19,7 @@ const CURRENT_VERSION: u8 = 0;
|
||||
const TIMESTAMP_WINDOW: i64 = 45 * 1_000_000;
|
||||
|
||||
#[derive(Debug, PartialEq, Serialize, Deserialize)]
|
||||
/// Implementation of the [Pubky Auth spec](https://pubky.github.io/pubky-core/spec/auth.html).
|
||||
pub struct AuthToken {
|
||||
/// Signature over the token.
|
||||
signature: Signature,
|
||||
@@ -41,6 +42,7 @@ pub struct AuthToken {
|
||||
}
|
||||
|
||||
impl AuthToken {
|
||||
/// Sign a new AuthToken with given capabilities.
|
||||
pub fn sign(keypair: &Keypair, capabilities: impl Into<Capabilities>) -> Self {
|
||||
let timestamp = Timestamp::now();
|
||||
|
||||
@@ -60,10 +62,21 @@ impl AuthToken {
|
||||
token
|
||||
}
|
||||
|
||||
// === Getters ===
|
||||
|
||||
/// Returns the pubky that is providing this AuthToken
|
||||
pub fn pubky(&self) -> &PublicKey {
|
||||
&self.pubky
|
||||
}
|
||||
|
||||
/// Returns the capabilities in this AuthToken.
|
||||
pub fn capabilities(&self) -> &[Capability] {
|
||||
&self.capabilities.0
|
||||
}
|
||||
|
||||
// === Public Methods ===
|
||||
|
||||
/// Parse and verify an AuthToken.
|
||||
pub fn verify(bytes: &[u8]) -> Result<Self, Error> {
|
||||
if bytes[75] > CURRENT_VERSION {
|
||||
return Err(Error::UnknownVersion);
|
||||
@@ -95,19 +108,17 @@ impl AuthToken {
|
||||
}
|
||||
}
|
||||
|
||||
/// Serialize this AuthToken to its canonical binary representation.
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
postcard::to_allocvec(self).unwrap()
|
||||
}
|
||||
|
||||
/// Deserialize an AuthToken from its canonical binary representation.
|
||||
pub fn deserialize(bytes: &[u8]) -> Result<Self, Error> {
|
||||
Ok(postcard::from_bytes(bytes)?)
|
||||
}
|
||||
|
||||
pub fn pubky(&self) -> &PublicKey {
|
||||
&self.pubky
|
||||
}
|
||||
|
||||
/// A unique ID for this [AuthToken], which is a concatenation of
|
||||
/// Returns the unique ID for this [AuthToken], which is a concatenation of
|
||||
/// [AuthToken::pubky] and [AuthToken::timestamp].
|
||||
///
|
||||
/// Assuming that [AuthToken::timestamp] is unique for every [AuthToken::pubky].
|
||||
@@ -133,6 +144,8 @@ pub struct AuthVerifier {
|
||||
}
|
||||
|
||||
impl AuthVerifier {
|
||||
/// Verify an [AuthToken] by parsing it from its canonical binary representation,
|
||||
/// verifying its signature, and confirm it wasn't already used.
|
||||
pub fn verify(&self, bytes: &[u8]) -> Result<AuthToken, Error> {
|
||||
self.gc();
|
||||
|
||||
@@ -168,18 +181,25 @@ impl AuthVerifier {
|
||||
}
|
||||
|
||||
#[derive(thiserror::Error, Debug, PartialEq, Eq)]
|
||||
/// Error verifying an [AuthToken]
|
||||
pub enum Error {
|
||||
#[error("Unknown version")]
|
||||
/// Unknown version
|
||||
UnknownVersion,
|
||||
#[error("AuthToken has a timestamp that is more than 45 seconds in the future")]
|
||||
/// AuthToken has a timestamp that is more than 45 seconds in the future
|
||||
TooFarInTheFuture,
|
||||
#[error("AuthToken has a timestamp that is more than 45 seconds in the past")]
|
||||
/// AuthToken has a timestamp that is more than 45 seconds in the past
|
||||
Expired,
|
||||
#[error("Invalid Signature")]
|
||||
/// Invalid Signature
|
||||
InvalidSignature,
|
||||
#[error(transparent)]
|
||||
Postcard(#[from] postcard::Error),
|
||||
/// Error parsing [AuthToken] using Postcard
|
||||
Parsing(#[from] postcard::Error),
|
||||
#[error("AuthToken already used")]
|
||||
/// AuthToken already used
|
||||
AlreadyUsed,
|
||||
}
|
||||
|
||||
|
||||
@@ -1,10 +1,16 @@
|
||||
//! Capabilities defining what scopes of resources can be accessed with what actions.
|
||||
|
||||
use std::fmt::Display;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
/// A Capability defines the scope of resources and the actions that the holder
|
||||
/// of this capability can access.
|
||||
pub struct Capability {
|
||||
/// Scope of resources (for example directories).
|
||||
pub scope: String,
|
||||
/// Actions allowed on the [Capability::scope].
|
||||
pub actions: Vec<Action>,
|
||||
}
|
||||
|
||||
@@ -19,6 +25,7 @@ impl Capability {
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
/// Actions allowed on a given resource or scope of resources.
|
||||
pub enum Action {
|
||||
/// Can read the scope at the specified path (GET requests).
|
||||
Read,
|
||||
@@ -125,14 +132,19 @@ impl<'de> Deserialize<'de> for Capability {
|
||||
}
|
||||
|
||||
#[derive(thiserror::Error, Debug, PartialEq, Eq)]
|
||||
/// Error parsing a [Capability].
|
||||
pub enum Error {
|
||||
#[error("Capability: Invalid scope: does not start with `/`")]
|
||||
/// Capability: Invalid scope: does not start with `/`
|
||||
InvalidScope,
|
||||
#[error("Capability: Invalid format should be <scope>:<abilities>")]
|
||||
/// Capability: Invalid format should be <scope>:<abilities>
|
||||
InvalidFormat,
|
||||
#[error("Capability: Invalid Action")]
|
||||
/// Capability: Invalid Action
|
||||
InvalidAction,
|
||||
#[error("Capabilities: Invalid capabilities format")]
|
||||
/// Capabilities: Invalid capabilities format
|
||||
InvalidCapabilities,
|
||||
}
|
||||
|
||||
@@ -142,6 +154,7 @@ pub enum Error {
|
||||
pub struct Capabilities(pub Vec<Capability>);
|
||||
|
||||
impl Capabilities {
|
||||
/// Returns true if the list of capabilities contains a given capability.
|
||||
pub fn contains(&self, capability: &Capability) -> bool {
|
||||
self.0.contains(capability)
|
||||
}
|
||||
@@ -227,7 +240,7 @@ mod tests {
|
||||
actions: vec![Action::Read, Action::Write],
|
||||
};
|
||||
|
||||
// Read and write withing directory `/pub/pubky.app/`.
|
||||
// Read and write within directory `/pub/pubky.app/`.
|
||||
let expected_string = "/pub/pubky.app/:rw";
|
||||
|
||||
assert_eq!(cap.to_string(), expected_string);
|
||||
|
||||
@@ -1,9 +1,18 @@
|
||||
//! Constants used across Pubky.
|
||||
|
||||
/// [Reserved param keys](https://www.rfc-editor.org/rfc/rfc9460#name-initial-contents) for HTTPS Resource Records
|
||||
pub mod reserved_param_keys {
|
||||
/// HTTPS (RFC 9460) record's private param key, used to inform browsers
|
||||
/// about the HTTP port to use when the domain is localhost.
|
||||
pub const HTTP_PORT: u16 = 65280;
|
||||
}
|
||||
|
||||
/// Local test network's hardcoded port numbers for local development.
|
||||
pub mod testnet_ports {
|
||||
/// The local test network's hardcorded DHT bootstrapping node's port number.
|
||||
pub const BOOTSTRAP: u16 = 6881;
|
||||
/// The local test network's hardcorded Pkarr Relay port number.
|
||||
pub const PKARR_RELAY: u16 = 15411;
|
||||
/// The local test network's hardcorded HTTP Relay port number.
|
||||
pub const HTTP_RELAY: u16 = 15412;
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
//! Cryptographic functions (hashing, encryption, and signatures).
|
||||
|
||||
use crypto_secretbox::{
|
||||
aead::{Aead, AeadCore, KeyInit, OsRng},
|
||||
XSalsa20Poly1305,
|
||||
@@ -8,57 +10,69 @@ pub use pkarr::{Keypair, PublicKey};
|
||||
|
||||
pub use ed25519_dalek::Signature;
|
||||
|
||||
/// Blake3 Hash.
|
||||
pub type Hash = blake3::Hash;
|
||||
|
||||
pub use blake3::hash;
|
||||
|
||||
pub use blake3::Hasher;
|
||||
|
||||
/// Create a random hash.
|
||||
pub fn random_hash() -> Hash {
|
||||
Hash::from_bytes(random())
|
||||
}
|
||||
|
||||
/// Create an array of random bytes with a size `N`.
|
||||
pub fn random_bytes<const N: usize>() -> [u8; N] {
|
||||
let arr: [u8; N] = random();
|
||||
|
||||
arr
|
||||
}
|
||||
|
||||
pub fn encrypt(plain_text: &[u8], encryption_key: &[u8; 32]) -> Result<Vec<u8>, EncryptError> {
|
||||
/// Encrypt a message using `XSalsa20Poly1305`.
|
||||
pub fn encrypt(plain_text: &[u8], encryption_key: &[u8; 32]) -> Vec<u8> {
|
||||
if plain_text.is_empty() {
|
||||
return plain_text.to_vec();
|
||||
}
|
||||
|
||||
let cipher = XSalsa20Poly1305::new(encryption_key.into());
|
||||
let nonce = XSalsa20Poly1305::generate_nonce(&mut OsRng); // unique per message
|
||||
let ciphertext = cipher.encrypt(&nonce, plain_text)?;
|
||||
let ciphertext = cipher
|
||||
.encrypt(&nonce, plain_text)
|
||||
.expect("XSalsa20Poly1305 encrypt should be infallible");
|
||||
|
||||
let mut out: Vec<u8> = Vec::with_capacity(nonce.len() + ciphertext.len());
|
||||
out.extend_from_slice(nonce.as_slice());
|
||||
out.extend_from_slice(&ciphertext);
|
||||
|
||||
Ok(out)
|
||||
out
|
||||
}
|
||||
|
||||
/// Encrypt an encrypted message using `XSalsa20Poly1305`.
|
||||
pub fn decrypt(bytes: &[u8], encryption_key: &[u8; 32]) -> Result<Vec<u8>, DecryptError> {
|
||||
if bytes.is_empty() {
|
||||
return Ok(bytes.to_vec());
|
||||
}
|
||||
|
||||
let cipher = XSalsa20Poly1305::new(encryption_key.into());
|
||||
|
||||
if bytes.len() < 24 {
|
||||
return Err(DecryptError::PayloadTooSmall(bytes.len()));
|
||||
return Err(DecryptError::TooSmall(bytes.len()));
|
||||
}
|
||||
|
||||
Ok(cipher.decrypt(bytes[..24].into(), &bytes[24..])?)
|
||||
}
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum EncryptError {
|
||||
#[error(transparent)]
|
||||
SecretBox(#[from] crypto_secretbox::Error),
|
||||
}
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
/// Error while decrypting a message
|
||||
pub enum DecryptError {
|
||||
#[error(transparent)]
|
||||
SecretBox(#[from] crypto_secretbox::Error),
|
||||
/// Failed to decrypt message.
|
||||
Fail(#[from] crypto_secretbox::Error),
|
||||
|
||||
#[error("Encrypted message too small, expected at least 24 bytes nonce, receieved {0} bytes")]
|
||||
PayloadTooSmall(usize),
|
||||
#[error("Encrypted message too small, expected at least 24 bytes nonce, received {0} bytes")]
|
||||
/// Encrypted message too small, expected at least 24 bytes nonce, received {0} bytes
|
||||
TooSmall(usize),
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -70,7 +84,7 @@ mod tests {
|
||||
let plain_text = "Plain text!";
|
||||
let encryption_key = [0; 32];
|
||||
|
||||
let encrypted = encrypt(plain_text.as_bytes(), &encryption_key).unwrap();
|
||||
let encrypted = encrypt(plain_text.as_bytes(), &encryption_key);
|
||||
let decrypted = decrypt(&encrypted, &encryption_key).unwrap();
|
||||
|
||||
assert_eq!(decrypted, plain_text.as_bytes())
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
#![doc = include_str!("../README.md")]
|
||||
//!
|
||||
|
||||
#![deny(missing_docs)]
|
||||
#![deny(rustdoc::broken_intra_doc_links)]
|
||||
#![cfg_attr(any(), deny(clippy::unwrap_used))]
|
||||
|
||||
pub mod auth;
|
||||
pub mod capabilities;
|
||||
pub mod constants;
|
||||
@@ -7,5 +14,6 @@ pub mod recovery_file;
|
||||
pub mod session;
|
||||
|
||||
pub mod timestamp {
|
||||
//! Timestamp used across Pubky crates.
|
||||
pub use pubky_timestamp::*;
|
||||
}
|
||||
|
||||
@@ -1 +1,4 @@
|
||||
//! Namespaces using to prepend signed messages to avoid collisions.
|
||||
|
||||
/// Pubky Auth namespace as defined at the [spec](https://pubky.github.io/pubky-core/spec/auth.html)
|
||||
pub const PUBKY_AUTH: &[u8; 10] = b"PUBKY:AUTH";
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
//! Tools for encrypting and decrypting a recovery file storing user's root key's secret.
|
||||
|
||||
use argon2::Argon2;
|
||||
use pkarr::Keypair;
|
||||
|
||||
@@ -6,8 +8,9 @@ use crate::crypto::{decrypt, encrypt};
|
||||
static SPEC_NAME: &str = "recovery";
|
||||
static SPEC_LINE: &str = "pubky.org/recovery";
|
||||
|
||||
/// Decrypt a recovery file.
|
||||
pub fn decrypt_recovery_file(recovery_file: &[u8], passphrase: &str) -> Result<Keypair, Error> {
|
||||
let encryption_key = recovery_file_encryption_key_from_passphrase(passphrase)?;
|
||||
let encryption_key = recovery_file_encryption_key_from_passphrase(passphrase);
|
||||
|
||||
let newline_index = recovery_file
|
||||
.iter()
|
||||
@@ -38,11 +41,12 @@ pub fn decrypt_recovery_file(recovery_file: &[u8], passphrase: &str) -> Result<K
|
||||
Ok(Keypair::from_secret_key(&secret_key))
|
||||
}
|
||||
|
||||
pub fn create_recovery_file(keypair: &Keypair, passphrase: &str) -> Result<Vec<u8>, Error> {
|
||||
let encryption_key = recovery_file_encryption_key_from_passphrase(passphrase)?;
|
||||
/// Encrypt a recovery file.
|
||||
pub fn create_recovery_file(keypair: &Keypair, passphrase: &str) -> Vec<u8> {
|
||||
let encryption_key = recovery_file_encryption_key_from_passphrase(passphrase);
|
||||
let secret_key = keypair.secret_key();
|
||||
|
||||
let encrypted_secret_key = encrypt(&secret_key, &encryption_key)?;
|
||||
let encrypted_secret_key = encrypt(&secret_key, &encryption_key);
|
||||
|
||||
let mut out = Vec::with_capacity(SPEC_LINE.len() + 1 + encrypted_secret_key.len());
|
||||
|
||||
@@ -50,42 +54,44 @@ pub fn create_recovery_file(keypair: &Keypair, passphrase: &str) -> Result<Vec<u
|
||||
out.extend_from_slice(b"\n");
|
||||
out.extend_from_slice(&encrypted_secret_key);
|
||||
|
||||
Ok(out)
|
||||
out
|
||||
}
|
||||
|
||||
fn recovery_file_encryption_key_from_passphrase(passphrase: &str) -> Result<[u8; 32], Error> {
|
||||
fn recovery_file_encryption_key_from_passphrase(passphrase: &str) -> [u8; 32] {
|
||||
let argon2id = Argon2::default();
|
||||
|
||||
let mut out = [0; 32];
|
||||
|
||||
argon2id.hash_password_into(passphrase.as_bytes(), SPEC_NAME.as_bytes(), &mut out)?;
|
||||
argon2id
|
||||
.hash_password_into(passphrase.as_bytes(), SPEC_NAME.as_bytes(), &mut out)
|
||||
.expect("Output is the correct length, so this should be infallible");
|
||||
|
||||
Ok(out)
|
||||
out
|
||||
}
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
/// Error decrypting a recovery file
|
||||
pub enum Error {
|
||||
// === Recovery file ==
|
||||
#[error("Recovery file should start with a spec line, followed by a new line character")]
|
||||
/// Recovery file should start with a spec line, followed by a new line character
|
||||
RecoveryFileMissingSpecLine,
|
||||
|
||||
#[error("Recovery file should start with a spec line, followed by a new line character")]
|
||||
/// Recovery file should start with a spec line, followed by a new line character
|
||||
RecoveryFileVersionNotSupported,
|
||||
|
||||
#[error("Recovery file should contain an encrypted secret key after the new line character")]
|
||||
/// Recovery file should contain an encrypted secret key after the new line character
|
||||
RecoverFileMissingEncryptedSecretKey,
|
||||
|
||||
#[error("Recovery file encrypted secret key should be 32 bytes, got {0}")]
|
||||
/// Recovery file encrypted secret key should be 32 bytes, got {0}
|
||||
RecoverFileInvalidSecretKeyLength(usize),
|
||||
|
||||
#[error(transparent)]
|
||||
Argon(#[from] argon2::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
/// Error while decrypting a message
|
||||
DecryptError(#[from] crate::crypto::DecryptError),
|
||||
|
||||
#[error(transparent)]
|
||||
EncryptError(#[from] crate::crypto::EncryptError),
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -97,7 +103,7 @@ mod tests {
|
||||
let passphrase = "very secure password";
|
||||
let keypair = Keypair::random();
|
||||
|
||||
let recovery_file = create_recovery_file(&keypair, passphrase).unwrap();
|
||||
let recovery_file = create_recovery_file(&keypair, passphrase);
|
||||
let recovered = decrypt_recovery_file(&recovery_file, passphrase).unwrap();
|
||||
|
||||
assert_eq!(recovered.public_key(), keypair.public_key());
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
//! Pubky homeserver session struct.
|
||||
|
||||
use pkarr::PublicKey;
|
||||
use postcard::{from_bytes, to_allocvec};
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -11,6 +13,7 @@ use crate::{capabilities::Capability, timestamp::Timestamp};
|
||||
// TODO: use https://crates.io/crates/user-agent-parser to parse the session
|
||||
// and get more informations from the user-agent.
|
||||
#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
|
||||
/// Pubky homeserver session struct.
|
||||
pub struct Session {
|
||||
version: usize,
|
||||
pubky: PublicKey,
|
||||
@@ -22,6 +25,7 @@ pub struct Session {
|
||||
}
|
||||
|
||||
impl Session {
|
||||
/// Create a new session.
|
||||
pub fn new(pubky: &PublicKey, capabilities: &[Capability], user_agent: Option<String>) -> Self {
|
||||
Self {
|
||||
version: 0,
|
||||
@@ -35,16 +39,19 @@ impl Session {
|
||||
|
||||
// === Getters ===
|
||||
|
||||
/// Returns the pubky of this session authorizes for.
|
||||
pub fn pubky(&self) -> &PublicKey {
|
||||
&self.pubky
|
||||
}
|
||||
|
||||
/// Returns the capabilities this session provide on this session's pubky's resources.
|
||||
pub fn capabilities(&self) -> &Vec<Capability> {
|
||||
&self.capabilities
|
||||
}
|
||||
|
||||
// === Setters ===
|
||||
|
||||
/// Set this session user agent.
|
||||
pub fn set_user_agent(&mut self, user_agent: String) -> &mut Self {
|
||||
self.user_agent = user_agent;
|
||||
|
||||
@@ -55,6 +62,7 @@ impl Session {
|
||||
self
|
||||
}
|
||||
|
||||
/// Set this session's capabilities.
|
||||
pub fn set_capabilities(&mut self, capabilities: Vec<Capability>) -> &mut Self {
|
||||
self.capabilities = capabilities;
|
||||
|
||||
@@ -63,11 +71,13 @@ impl Session {
|
||||
|
||||
// === Public Methods ===
|
||||
|
||||
/// Serialize this session to its canonical binary representation.
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
to_allocvec(self).expect("Session::serialize")
|
||||
}
|
||||
|
||||
pub fn deserialize(bytes: &[u8]) -> Result<Self> {
|
||||
/// Deserialize this session from its canonical binary representation.
|
||||
pub fn deserialize(bytes: &[u8]) -> Result<Self, Error> {
|
||||
if bytes.is_empty() {
|
||||
return Err(Error::EmptyPayload);
|
||||
}
|
||||
@@ -82,16 +92,18 @@ impl Session {
|
||||
// TODO: add `can_read()`, `can_write()` and `is_root()` methods
|
||||
}
|
||||
|
||||
pub type Result<T> = core::result::Result<T, Error>;
|
||||
|
||||
#[derive(thiserror::Error, Debug, PartialEq)]
|
||||
/// Error deserializing a [Session].
|
||||
pub enum Error {
|
||||
#[error("Empty payload")]
|
||||
/// Empty payload
|
||||
EmptyPayload,
|
||||
#[error("Unknown version")]
|
||||
/// Unknown version
|
||||
UnknownVersion,
|
||||
#[error(transparent)]
|
||||
Postcard(#[from] postcard::Error),
|
||||
/// Error parsing the binary representation.
|
||||
Parsing(#[from] postcard::Error),
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
@@ -1,280 +0,0 @@
|
||||
//! Absolutely monotonic unix timestamp in microseconds
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt::Display;
|
||||
use std::{
|
||||
ops::{Add, Sub},
|
||||
sync::Mutex,
|
||||
};
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use rand::Rng;
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use std::time::SystemTime;
|
||||
|
||||
/// ~4% chance of none of 10 clocks have matching id.
|
||||
const CLOCK_MASK: u64 = (1 << 8) - 1;
|
||||
const TIME_MASK: u64 = !0 >> 8;
|
||||
|
||||
pub struct TimestampFactory {
|
||||
clock_id: u64,
|
||||
last_time: u64,
|
||||
}
|
||||
|
||||
impl TimestampFactory {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
clock_id: rand::thread_rng().gen::<u64>() & CLOCK_MASK,
|
||||
last_time: system_time() & TIME_MASK,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn now(&mut self) -> Timestamp {
|
||||
// Ensure absolute monotonicity.
|
||||
self.last_time = (system_time() & TIME_MASK).max(self.last_time + CLOCK_MASK + 1);
|
||||
|
||||
// Add clock_id to the end of the timestamp
|
||||
Timestamp(self.last_time | self.clock_id)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for TimestampFactory {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
static DEFAULT_FACTORY: Lazy<Mutex<TimestampFactory>> =
|
||||
Lazy::new(|| Mutex::new(TimestampFactory::default()));
|
||||
|
||||
/// Absolutely monotonic timestamp since [SystemTime::UNIX_EPOCH] in microseconds as u64.
|
||||
///
|
||||
/// The purpose of this timestamp is to unique per "user", not globally,
|
||||
/// it achieves this by:
|
||||
/// 1. Override the last byte with a random `clock_id`, reducing the probability
|
||||
/// of two matching timestamps across multiple machines/threads.
|
||||
/// 2. Gurantee that the remaining 3 bytes are ever increasing (absolutely monotonic) within
|
||||
/// the same thread regardless of the wall clock value
|
||||
///
|
||||
/// This timestamp is also serialized as BE bytes to remain sortable.
|
||||
/// If a `utf-8` encoding is necessary, it is encoded as [base32::Alphabet::Crockford]
|
||||
/// to act as a sortable Id.
|
||||
///
|
||||
/// U64 of microseconds is valid for the next 500 thousand years!
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Hash, Eq, Ord)]
|
||||
pub struct Timestamp(u64);
|
||||
|
||||
impl Timestamp {
|
||||
pub fn now() -> Self {
|
||||
DEFAULT_FACTORY.lock().unwrap().now()
|
||||
}
|
||||
|
||||
/// Return big endian bytes
|
||||
pub fn to_bytes(&self) -> [u8; 8] {
|
||||
self.0.to_be_bytes()
|
||||
}
|
||||
|
||||
pub fn difference(&self, rhs: &Timestamp) -> i64 {
|
||||
(self.0 as i64) - (rhs.0 as i64)
|
||||
}
|
||||
|
||||
pub fn into_inner(&self) -> u64 {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Timestamp {
|
||||
fn default() -> Self {
|
||||
Timestamp::now()
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Timestamp {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let bytes: [u8; 8] = self.into();
|
||||
f.write_str(&base32::encode(base32::Alphabet::Crockford, &bytes))
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<String> for Timestamp {
|
||||
type Error = TimestampError;
|
||||
|
||||
fn try_from(value: String) -> Result<Self, Self::Error> {
|
||||
match base32::decode(base32::Alphabet::Crockford, &value) {
|
||||
Some(vec) => {
|
||||
let bytes: [u8; 8] = vec
|
||||
.try_into()
|
||||
.map_err(|_| TimestampError::InvalidEncoding)?;
|
||||
|
||||
Ok(bytes.into())
|
||||
}
|
||||
None => Err(TimestampError::InvalidEncoding),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&[u8]> for Timestamp {
|
||||
type Error = TimestampError;
|
||||
|
||||
fn try_from(bytes: &[u8]) -> Result<Self, Self::Error> {
|
||||
let bytes: [u8; 8] = bytes
|
||||
.try_into()
|
||||
.map_err(|_| TimestampError::InvalidBytesLength(bytes.len()))?;
|
||||
|
||||
Ok(bytes.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Timestamp> for [u8; 8] {
|
||||
fn from(timestamp: &Timestamp) -> Self {
|
||||
timestamp.0.to_be_bytes()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<[u8; 8]> for Timestamp {
|
||||
fn from(bytes: [u8; 8]) -> Self {
|
||||
Self(u64::from_be_bytes(bytes))
|
||||
}
|
||||
}
|
||||
|
||||
// === U64 conversion ===
|
||||
|
||||
impl From<Timestamp> for u64 {
|
||||
fn from(value: Timestamp) -> Self {
|
||||
value.into_inner()
|
||||
}
|
||||
}
|
||||
|
||||
impl Add<u64> for &Timestamp {
|
||||
type Output = Timestamp;
|
||||
|
||||
fn add(self, rhs: u64) -> Self::Output {
|
||||
Timestamp(self.0 + rhs)
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub<u64> for &Timestamp {
|
||||
type Output = Timestamp;
|
||||
|
||||
fn sub(self, rhs: u64) -> Self::Output {
|
||||
Timestamp(self.0 - rhs)
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for Timestamp {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
let bytes = self.to_bytes();
|
||||
bytes.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for Timestamp {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
let bytes: [u8; 8] = Deserialize::deserialize(deserializer)?;
|
||||
Ok(Timestamp(u64::from_be_bytes(bytes)))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
/// Return the number of microseconds since [SystemTime::UNIX_EPOCH]
|
||||
fn system_time() -> u64 {
|
||||
SystemTime::now()
|
||||
.duration_since(SystemTime::UNIX_EPOCH)
|
||||
.expect("time drift")
|
||||
.as_micros() as u64
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
/// Return the number of microseconds since [SystemTime::UNIX_EPOCH]
|
||||
pub fn system_time() -> u64 {
|
||||
// Won't be an issue for more than 5000 years!
|
||||
(js_sys::Date::now() as u64 )
|
||||
// Turn miliseconds to microseconds
|
||||
* 1000
|
||||
}
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum TimestampError {
|
||||
#[error("Invalid bytes length, Timestamp should be encoded as 8 bytes, got {0}")]
|
||||
InvalidBytesLength(usize),
|
||||
#[error("Invalid timestamp encoding")]
|
||||
InvalidEncoding,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::collections::HashSet;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn absolutely_monotonic() {
|
||||
const COUNT: usize = 100;
|
||||
|
||||
let mut set = HashSet::with_capacity(COUNT);
|
||||
let mut vec = Vec::with_capacity(COUNT);
|
||||
|
||||
for _ in 0..COUNT {
|
||||
let timestamp = Timestamp::now();
|
||||
|
||||
set.insert(timestamp.clone());
|
||||
vec.push(timestamp);
|
||||
}
|
||||
|
||||
let mut ordered = vec.clone();
|
||||
ordered.sort();
|
||||
|
||||
assert_eq!(set.len(), COUNT, "unique");
|
||||
assert_eq!(ordered, vec, "ordered");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn strings() {
|
||||
const COUNT: usize = 100;
|
||||
|
||||
let mut set = HashSet::with_capacity(COUNT);
|
||||
let mut vec = Vec::with_capacity(COUNT);
|
||||
|
||||
for _ in 0..COUNT {
|
||||
let string = Timestamp::now().to_string();
|
||||
|
||||
set.insert(string.clone());
|
||||
vec.push(string)
|
||||
}
|
||||
|
||||
let mut ordered = vec.clone();
|
||||
ordered.sort();
|
||||
|
||||
assert_eq!(set.len(), COUNT, "unique");
|
||||
assert_eq!(ordered, vec, "ordered");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn to_from_string() {
|
||||
let timestamp = Timestamp::now();
|
||||
let string = timestamp.to_string();
|
||||
let decoded: Timestamp = string.try_into().unwrap();
|
||||
|
||||
assert_eq!(decoded, timestamp)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serde() {
|
||||
let timestamp = Timestamp::now();
|
||||
|
||||
let serialized = postcard::to_allocvec(×tamp).unwrap();
|
||||
|
||||
assert_eq!(serialized, timestamp.to_bytes());
|
||||
|
||||
let deserialized: Timestamp = postcard::from_bytes(&serialized).unwrap();
|
||||
|
||||
assert_eq!(deserialized, timestamp);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user