Merge pull request #20 from pubky/feat/basic-auth

Feat/basic auth
This commit is contained in:
Nuh
2024-07-22 15:46:18 +03:00
committed by GitHub
31 changed files with 4173 additions and 1 deletions

2289
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +1,5 @@
[workspace]
members = []
members = [ "pubky","pubky-*"]
# See: https://github.com/rust-lang/rust/issues/90148#issuecomment-949194352
resolver = "2"

17
pubky-common/Cargo.toml Normal file
View File

@@ -0,0 +1,17 @@
[package]
name = "pubky-common"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
base32 = "0.5.0"
blake3 = "1.5.1"
ed25519-dalek = "2.1.1"
once_cell = "1.19.0"
pkarr = "2.1.0"
rand = "0.8.5"
thiserror = "1.0.60"
postcard = { version = "1.0.8", features = ["alloc"] }
serde = { version = "1.0.204", features = ["derive"] }

220
pubky-common/src/auth.rs Normal file
View File

@@ -0,0 +1,220 @@
//! Client-server Authentication using signed timesteps
use std::sync::{Arc, Mutex};
use ed25519_dalek::ed25519::SignatureBytes;
use crate::{
crypto::{random_hash, Keypair, PublicKey, Signature},
timestamp::Timestamp,
};
// 30 seconds
const TIME_INTERVAL: u64 = 30 * 1_000_000;
#[derive(Debug, PartialEq)]
pub struct AuthnSignature(Box<[u8]>);
impl AuthnSignature {
pub fn new(signer: &Keypair, audience: &PublicKey, token: Option<&[u8]>) -> Self {
let mut bytes = Vec::with_capacity(96);
let time: u64 = Timestamp::now().into();
let time_step = time / TIME_INTERVAL;
let token_hash = token.map_or(random_hash(), crate::crypto::hash);
let signature = signer
.sign(&signable(
&time_step.to_be_bytes(),
&signer.public_key(),
audience,
token_hash.as_bytes(),
))
.to_bytes();
bytes.extend_from_slice(&signature);
bytes.extend_from_slice(token_hash.as_bytes());
Self(bytes.into())
}
/// Sign a randomly generated nonce
pub fn generate(keypair: &Keypair, audience: &PublicKey) -> Self {
AuthnSignature::new(keypair, audience, None)
}
pub fn as_bytes(&self) -> &[u8] {
&self.0
}
}
#[derive(Debug, Clone)]
pub struct AuthnVerifier {
audience: PublicKey,
inner: Arc<Mutex<Vec<[u8; 40]>>>,
// TODO: Support permisisons
// token_hashes: HashSet<[u8; 32]>,
}
impl AuthnVerifier {
pub fn new(audience: PublicKey) -> Self {
Self {
audience,
inner: Arc::new(Mutex::new(Vec::new())),
}
}
pub fn verify(&self, bytes: &[u8], signer: &PublicKey) -> Result<(), AuthnSignatureError> {
self.gc();
if bytes.len() != 96 {
return Err(AuthnSignatureError::InvalidLength(bytes.len()));
}
let signature_bytes: SignatureBytes = bytes[0..64]
.try_into()
.expect("validate token length on instantiating");
let signature = Signature::from(signature_bytes);
let token_hash: [u8; 32] = bytes[64..].try_into().expect("should not be reachable");
let now = Timestamp::now().into_inner();
let past = now - TIME_INTERVAL;
let future = now + TIME_INTERVAL;
let result = verify_at(now, self, &signature, signer, &token_hash);
match result {
Ok(_) => return Ok(()),
Err(AuthnSignatureError::AlreadyUsed) => return Err(AuthnSignatureError::AlreadyUsed),
_ => {}
}
let result = verify_at(past, self, &signature, signer, &token_hash);
match result {
Ok(_) => return Ok(()),
Err(AuthnSignatureError::AlreadyUsed) => return Err(AuthnSignatureError::AlreadyUsed),
_ => {}
}
verify_at(future, self, &signature, signer, &token_hash)
}
// === Private Methods ===
/// Remove all tokens older than two time intervals in the past.
fn gc(&self) {
let threshold = ((Timestamp::now().into_inner() / TIME_INTERVAL) - 2).to_be_bytes();
let mut inner = self.inner.lock().unwrap();
match inner.binary_search_by(|element| element[0..8].cmp(&threshold)) {
Ok(index) | Err(index) => {
inner.drain(0..index);
}
}
}
}
fn verify_at(
time: u64,
verifier: &AuthnVerifier,
signature: &Signature,
signer: &PublicKey,
token_hash: &[u8; 32],
) -> Result<(), AuthnSignatureError> {
let time_step = time / TIME_INTERVAL;
let time_step_bytes = time_step.to_be_bytes();
let result = signer.verify(
&signable(&time_step_bytes, signer, &verifier.audience, token_hash),
signature,
);
if result.is_ok() {
let mut inner = verifier.inner.lock().unwrap();
let mut candidate = [0_u8; 40];
candidate[..8].copy_from_slice(&time_step_bytes);
candidate[8..].copy_from_slice(token_hash);
match inner.binary_search_by(|element| element.cmp(&candidate)) {
Ok(index) | Err(index) => {
inner.insert(index, candidate);
}
};
return Ok(());
}
Err(AuthnSignatureError::InvalidSignature)
}
fn signable(
time_step_bytes: &[u8; 8],
signer: &PublicKey,
audience: &PublicKey,
token_hash: &[u8; 32],
) -> [u8; 115] {
let mut arr = [0; 115];
arr[..11].copy_from_slice(crate::namespaces::PUBKY_AUTHN);
arr[11..19].copy_from_slice(time_step_bytes);
arr[19..51].copy_from_slice(signer.as_bytes());
arr[51..83].copy_from_slice(audience.as_bytes());
arr[83..].copy_from_slice(token_hash);
arr
}
#[derive(thiserror::Error, Debug)]
pub enum AuthnSignatureError {
#[error("AuthnSignature should be 96 bytes long, got {0} bytes instead")]
InvalidLength(usize),
#[error("Invalid signature")]
InvalidSignature,
#[error("Authn signature already used")]
AlreadyUsed,
}
#[cfg(test)]
mod tests {
use crate::crypto::Keypair;
use super::{AuthnSignature, AuthnVerifier};
#[test]
fn sign_verify() {
let keypair = Keypair::random();
let signer = keypair.public_key();
let audience = Keypair::random().public_key();
let verifier = AuthnVerifier::new(audience.clone());
let authn_signature = AuthnSignature::generate(&keypair, &audience);
verifier
.verify(authn_signature.as_bytes(), &signer)
.unwrap();
{
// Invalid signable
let mut invalid = authn_signature.as_bytes().to_vec();
invalid[64..].copy_from_slice(&[0; 32]);
assert!(!verifier.verify(&invalid, &signer).is_ok())
}
{
// Invalid signer
let mut invalid = authn_signature.as_bytes().to_vec();
invalid[0..32].copy_from_slice(&[0; 32]);
assert!(!verifier.verify(&invalid, &signer).is_ok())
}
}
}

View File

@@ -0,0 +1,25 @@
use rand::prelude::Rng;
pub use pkarr::{Keypair, PublicKey};
pub use ed25519_dalek::Signature;
pub type Hash = blake3::Hash;
pub use blake3::hash;
pub fn random_hash() -> Hash {
let mut rng = rand::thread_rng();
Hash::from_bytes(rng.gen())
}
pub fn random_bytes<const N: usize>() -> [u8; N] {
let mut rng = rand::thread_rng();
let mut arr = [0u8; N];
#[allow(clippy::needless_range_loop)]
for i in 0..N {
arr[i] = rng.gen();
}
arr
}

5
pubky-common/src/lib.rs Normal file
View File

@@ -0,0 +1,5 @@
pub mod auth;
pub mod crypto;
pub mod namespaces;
pub mod session;
pub mod timestamp;

View File

@@ -0,0 +1 @@
pub const PUBKY_AUTHN: &[u8; 11] = b"PUBKY:AUTHN";

View File

@@ -0,0 +1,84 @@
use postcard::{from_bytes, to_allocvec};
use serde::{Deserialize, Serialize};
extern crate alloc;
use alloc::vec::Vec;
use crate::timestamp::Timestamp;
// TODO: add IP address?
// TODO: use https://crates.io/crates/user-agent-parser to parse the session
// and get more informations from the user-agent.
#[derive(Clone, Default, Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct Session {
pub version: usize,
pub created_at: u64,
/// User specified name, defaults to the user-agent.
pub name: String,
pub user_agent: String,
}
impl Session {
pub fn new() -> Self {
Self {
created_at: Timestamp::now().into_inner(),
..Default::default()
}
}
// === Setters ===
pub fn set_user_agent(&mut self, user_agent: String) -> &mut Self {
self.user_agent = user_agent;
if self.name.is_empty() {
self.name.clone_from(&self.user_agent)
}
self
}
// === Public Methods ===
pub fn serialize(&self) -> Vec<u8> {
to_allocvec(self).expect("Session::serialize")
}
pub fn deserialize(bytes: &[u8]) -> Result<Self> {
if bytes[0] > 0 {
return Err(Error::UnknownVersion);
}
Ok(from_bytes(bytes)?)
}
}
pub type Result<T> = core::result::Result<T, Error>;
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Unknown version")]
UnknownVersion,
#[error(transparent)]
Postcard(#[from] postcard::Error),
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn serialize() {
let mut session = Session::default();
session.user_agent = "foo".to_string();
let serialized = session.serialize();
assert_eq!(serialized, [0, 0, 0, 3, 102, 111, 111,]);
let deseiralized = Session::deserialize(&serialized).unwrap();
assert_eq!(deseiralized, session)
}
}

View File

@@ -0,0 +1,229 @@
//! Monotonic unix timestamp in microseconds
use std::fmt::Display;
use std::time::SystemTime;
use std::{
ops::{Add, Sub},
sync::Mutex,
};
use once_cell::sync::Lazy;
use rand::Rng;
/// ~4% chance of none of 10 clocks have matching id.
const CLOCK_MASK: u64 = (1 << 8) - 1;
const TIME_MASK: u64 = !0 >> 8;
pub struct TimestampFactory {
clock_id: u64,
last_time: u64,
}
impl TimestampFactory {
pub fn new() -> Self {
Self {
clock_id: rand::thread_rng().gen::<u64>() & CLOCK_MASK,
last_time: system_time() & TIME_MASK,
}
}
pub fn now(&mut self) -> Timestamp {
// Ensure monotonicity.
self.last_time = (system_time() & TIME_MASK).max(self.last_time + CLOCK_MASK + 1);
// Add clock_id to the end of the timestamp
Timestamp(self.last_time | self.clock_id)
}
}
impl Default for TimestampFactory {
fn default() -> Self {
Self::new()
}
}
static DEFAULT_FACTORY: Lazy<Mutex<TimestampFactory>> =
Lazy::new(|| Mutex::new(TimestampFactory::default()));
/// Monotonic timestamp since [SystemTime::UNIX_EPOCH] in microseconds as u64.
///
/// The purpose of this timestamp is to unique per "user", not globally,
/// it achieves this by:
/// 1. Override the last byte with a random `clock_id`, reducing the probability
/// of two matching timestamps across multiple machines/threads.
/// 2. Gurantee that the remaining 3 bytes are ever increasing (monotonic) within
/// the same thread regardless of the wall clock value
///
/// This timestamp is also serialized as BE bytes to remain sortable.
/// If a `utf-8` encoding is necessary, it is encoded as [base32::Alphabet::Crockford]
/// to act as a sortable Id.
///
/// U64 of microseconds is valid for the next 500 thousand years!
#[derive(Debug, Clone, PartialEq, PartialOrd, Hash, Eq, Ord)]
pub struct Timestamp(u64);
impl Timestamp {
pub fn now() -> Self {
DEFAULT_FACTORY.lock().unwrap().now()
}
/// Return big endian bytes
pub fn to_bytes(&self) -> [u8; 8] {
self.0.to_be_bytes()
}
pub fn difference(&self, rhs: &Timestamp) -> u64 {
self.0.abs_diff(rhs.0)
}
pub fn into_inner(&self) -> u64 {
self.0
}
}
impl Display for Timestamp {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let bytes: [u8; 8] = self.into();
f.write_str(&base32::encode(base32::Alphabet::Crockford, &bytes))
}
}
impl TryFrom<String> for Timestamp {
type Error = TimestampError;
fn try_from(value: String) -> Result<Self, Self::Error> {
match base32::decode(base32::Alphabet::Crockford, &value) {
Some(vec) => {
let bytes: [u8; 8] = vec
.try_into()
.map_err(|_| TimestampError::InvalidEncoding)?;
Ok(bytes.into())
}
None => Err(TimestampError::InvalidEncoding),
}
}
}
impl TryFrom<&[u8]> for Timestamp {
type Error = TimestampError;
fn try_from(bytes: &[u8]) -> Result<Self, Self::Error> {
let bytes: [u8; 8] = bytes
.try_into()
.map_err(|_| TimestampError::InvalidBytesLength(bytes.len()))?;
Ok(bytes.into())
}
}
impl From<&Timestamp> for [u8; 8] {
fn from(timestamp: &Timestamp) -> Self {
timestamp.0.to_be_bytes()
}
}
impl From<[u8; 8]> for Timestamp {
fn from(bytes: [u8; 8]) -> Self {
Self(u64::from_be_bytes(bytes))
}
}
// === U64 conversion ===
impl From<Timestamp> for u64 {
fn from(value: Timestamp) -> Self {
value.into_inner()
}
}
impl Add<u64> for &Timestamp {
type Output = Timestamp;
fn add(self, rhs: u64) -> Self::Output {
Timestamp(self.0 + rhs)
}
}
impl Sub<u64> for &Timestamp {
type Output = Timestamp;
fn sub(self, rhs: u64) -> Self::Output {
Timestamp(self.0 - rhs)
}
}
#[cfg(not(target_arch = "wasm32"))]
/// Return the number of microseconds since [SystemTime::UNIX_EPOCH]
fn system_time() -> u64 {
SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.expect("time drift")
.as_micros() as u64
}
#[derive(thiserror::Error, Debug)]
pub enum TimestampError {
#[error("Invalid bytes length, Timestamp should be encoded as 8 bytes, got {0}")]
InvalidBytesLength(usize),
#[error("Invalid timestamp encoding")]
InvalidEncoding,
}
#[cfg(test)]
mod tests {
use std::collections::HashSet;
use super::*;
#[test]
fn monotonic() {
const COUNT: usize = 100;
let mut set = HashSet::with_capacity(COUNT);
let mut vec = Vec::with_capacity(COUNT);
for _ in 0..COUNT {
let timestamp = Timestamp::now();
set.insert(timestamp.clone());
vec.push(timestamp);
}
let mut ordered = vec.clone();
ordered.sort();
assert_eq!(set.len(), COUNT, "unique");
assert_eq!(ordered, vec, "ordered");
}
#[test]
fn strings() {
const COUNT: usize = 100;
let mut set = HashSet::with_capacity(COUNT);
let mut vec = Vec::with_capacity(COUNT);
for _ in 0..COUNT {
let string = Timestamp::now().to_string();
set.insert(string.clone());
vec.push(string)
}
let mut ordered = vec.clone();
ordered.sort();
assert_eq!(set.len(), COUNT, "unique");
assert_eq!(ordered, vec, "ordered");
}
#[test]
fn to_from_string() {
let timestamp = Timestamp::now();
let string = timestamp.to_string();
let decoded: Timestamp = string.try_into().unwrap();
assert_eq!(decoded, timestamp)
}
}

View File

@@ -0,0 +1,22 @@
[package]
name = "pubky_homeserver"
version = "0.1.0"
edition = "2021"
[dependencies]
anyhow = "1.0.82"
axum = "0.7.5"
axum-extra = { version = "0.9.3", features = ["typed-header"] }
base32 = "0.5.1"
bytes = "1.6.1"
dirs-next = "2.0.0"
heed = "0.20.3"
pkarr = { version = "2.1.0", features = ["async"] }
postcard = { version = "1.0.8", features = ["alloc"] }
pubky-common = { version = "0.1.0", path = "../pubky-common" }
serde = { version = "1.0.204", features = ["derive"] }
tokio = { version = "1.37.0", features = ["full"] }
tower-cookies = "0.10.0"
tower-http = { version = "0.5.2", features = ["cors", "trace"] }
tracing = "0.1.40"
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }

View File

@@ -0,0 +1,105 @@
//! Configuration for the server
use anyhow::{anyhow, Result};
use pkarr::Keypair;
// use serde::{Deserialize, Serialize};
use std::{fmt::Debug, path::PathBuf};
use pubky_common::timestamp::Timestamp;
const DEFAULT_HOMESERVER_PORT: u16 = 6287;
const DEFAULT_STORAGE_DIR: &str = "pubky";
/// Server configuration
///
/// The config is usually loaded from a file with [`Self::load`].
#[derive(
// Serialize, Deserialize,
Clone,
)]
pub struct Config {
port: Option<u16>,
bootstrap: Option<Vec<String>>,
domain: String,
/// Path to the storage directory
///
/// Defaults to a directory in the OS data directory
storage: Option<PathBuf>,
keypair: Keypair,
}
impl Config {
// /// Load the config from a file.
// pub async fn load(path: impl AsRef<Path>) -> Result<Config> {
// let s = tokio::fs::read_to_string(path.as_ref())
// .await
// .with_context(|| format!("failed to read {}", path.as_ref().to_string_lossy()))?;
// let config: Config = toml::from_str(&s)?;
// Ok(config)
// }
/// Test configurations
pub fn test(testnet: &pkarr::mainline::Testnet) -> Self {
Self {
bootstrap: Some(testnet.bootstrap.to_owned()),
storage: Some(
std::env::temp_dir()
.join(Timestamp::now().to_string())
.join(DEFAULT_STORAGE_DIR),
),
..Default::default()
}
}
pub fn port(&self) -> u16 {
self.port.unwrap_or(DEFAULT_HOMESERVER_PORT)
}
pub fn bootstsrap(&self) -> Option<Vec<String>> {
self.bootstrap.to_owned()
}
pub fn domain(&self) -> &str {
&self.domain
}
/// Get the path to the storage directory
pub fn storage(&self) -> Result<PathBuf> {
let dir = if let Some(storage) = &self.storage {
PathBuf::from(storage)
} else {
let path = dirs_next::data_dir().ok_or_else(|| {
anyhow!("operating environment provides no directory for application data")
})?;
path.join(DEFAULT_STORAGE_DIR)
};
Ok(dir.join("homeserver"))
}
pub fn keypair(&self) -> &Keypair {
&self.keypair
}
}
impl Default for Config {
fn default() -> Self {
Self {
port: Some(0),
bootstrap: None,
domain: "localhost".to_string(),
storage: None,
keypair: Keypair::random(),
}
}
}
impl Debug for Config {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_map()
.entry(&"port", &self.port())
.entry(&"storage", &self.storage())
.entry(&"public_key", &self.keypair().public_key())
.finish()
}
}

View File

@@ -0,0 +1,39 @@
use std::fs;
use std::path::Path;
use heed::{types::Str, Database, Env, EnvOpenOptions, RwTxn};
mod migrations;
pub mod tables;
use migrations::TABLES_COUNT;
#[derive(Debug, Clone)]
pub struct DB {
pub(crate) env: Env,
}
impl DB {
pub fn open(storage: &Path) -> anyhow::Result<Self> {
fs::create_dir_all(storage).unwrap();
let env = unsafe { EnvOpenOptions::new().max_dbs(TABLES_COUNT).open(storage) }?;
let db = DB { env };
db.run_migrations();
Ok(db)
}
fn run_migrations(&self) -> anyhow::Result<()> {
let mut wtxn = self.env.write_txn()?;
migrations::create_users_table(&self.env, &mut wtxn);
migrations::create_sessions_table(&self.env, &mut wtxn);
wtxn.commit()?;
Ok(())
}
}

View File

@@ -0,0 +1,19 @@
use heed::{types::Str, Database, Env, RwTxn};
use super::tables;
pub const TABLES_COUNT: u32 = 2;
pub fn create_users_table(env: &Env, wtxn: &mut RwTxn) -> anyhow::Result<()> {
let _: tables::users::UsersTable =
env.create_database(wtxn, Some(tables::users::USERS_TABLE))?;
Ok(())
}
pub fn create_sessions_table(env: &Env, wtxn: &mut RwTxn) -> anyhow::Result<()> {
let _: tables::sessions::SessionsTable =
env.create_database(wtxn, Some(tables::sessions::SESSIONS_TABLE))?;
Ok(())
}

View File

@@ -0,0 +1,2 @@
pub mod sessions;
pub mod users;

View File

@@ -0,0 +1,11 @@
use std::{borrow::Cow, time::SystemTime};
use heed::{
types::{Bytes, Str},
BoxedError, BytesDecode, BytesEncode, Database,
};
/// session secret => Session.
pub type SessionsTable = Database<Str, Bytes>;
pub const SESSIONS_TABLE: &str = "sessions";

View File

@@ -0,0 +1,60 @@
use std::{borrow::Cow, time::SystemTime};
use postcard::{from_bytes, to_allocvec};
use pubky_common::timestamp::Timestamp;
use serde::{Deserialize, Serialize};
use heed::{types::Str, BoxedError, BytesDecode, BytesEncode, Database};
use pkarr::PublicKey;
extern crate alloc;
use alloc::vec::Vec;
/// PublicKey => User.
pub type UsersTable = Database<PublicKeyCodec, User>;
pub const USERS_TABLE: &str = "users";
// TODO: add more adminstration metadata like quota, invitation links, etc..
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct User {
pub created_at: u64,
}
impl<'a> BytesEncode<'a> for User {
type EItem = Self;
fn bytes_encode(user: &Self::EItem) -> Result<Cow<[u8]>, BoxedError> {
let vec = to_allocvec(user).unwrap();
Ok(Cow::Owned(vec))
}
}
impl<'a> BytesDecode<'a> for User {
type DItem = Self;
fn bytes_decode(bytes: &'a [u8]) -> Result<Self::DItem, BoxedError> {
let user: User = from_bytes(bytes).unwrap();
Ok(user)
}
}
pub struct PublicKeyCodec {}
impl<'a> BytesEncode<'a> for PublicKeyCodec {
type EItem = PublicKey;
fn bytes_encode(pubky: &Self::EItem) -> Result<Cow<[u8]>, BoxedError> {
Ok(Cow::Borrowed(pubky.as_bytes()))
}
}
impl<'a> BytesDecode<'a> for PublicKeyCodec {
type DItem = PublicKey;
fn bytes_decode(bytes: &'a [u8]) -> Result<Self::DItem, BoxedError> {
Ok(PublicKey::try_from(bytes)?)
}
}

View File

@@ -0,0 +1,99 @@
//! Server error
use axum::{
extract::rejection::{ExtensionRejection, PathRejection, QueryRejection},
http::StatusCode,
response::IntoResponse,
};
use pubky_common::auth::AuthnSignatureError;
use tracing::debug;
pub type Result<T, E = Error> = core::result::Result<T, E>;
#[derive(Debug, Clone)]
pub struct Error {
// #[serde(with = "serde_status_code")]
status: StatusCode,
detail: Option<String>,
}
impl Default for Error {
fn default() -> Self {
Self {
status: StatusCode::INTERNAL_SERVER_ERROR,
detail: None,
}
}
}
impl Error {
pub fn with_status(status: StatusCode) -> Error {
Self {
status,
detail: None,
}
}
/// Create a new [`Error`].
pub fn new(status_code: StatusCode, message: Option<impl ToString>) -> Error {
Self {
status: status_code,
detail: message.map(|m| m.to_string()),
}
}
}
impl IntoResponse for Error {
fn into_response(self) -> axum::response::Response {
match self.detail {
Some(detail) => (self.status, detail).into_response(),
_ => (self.status,).into_response(),
}
}
}
impl From<QueryRejection> for Error {
fn from(error: QueryRejection) -> Self {
Self::new(StatusCode::BAD_REQUEST, Some(error))
}
}
impl From<ExtensionRejection> for Error {
fn from(error: ExtensionRejection) -> Self {
Self::new(StatusCode::BAD_REQUEST, Some(error))
}
}
impl From<PathRejection> for Error {
fn from(error: PathRejection) -> Self {
Self::new(StatusCode::BAD_REQUEST, Some(error))
}
}
impl From<std::io::Error> for Error {
fn from(error: std::io::Error) -> Self {
Self::new(StatusCode::INTERNAL_SERVER_ERROR, Some(error))
}
}
// === Pubky specific errors ===
impl From<AuthnSignatureError> for Error {
fn from(error: AuthnSignatureError) -> Self {
Self::new(StatusCode::BAD_REQUEST, Some(error))
}
}
impl From<pkarr::Error> for Error {
fn from(error: pkarr::Error) -> Self {
Self::new(StatusCode::BAD_REQUEST, Some(error))
}
}
impl From<heed::Error> for Error {
fn from(error: heed::Error) -> Self {
debug!(?error);
Self::with_status(StatusCode::INTERNAL_SERVER_ERROR)
}
}

View File

@@ -0,0 +1,47 @@
use std::collections::HashMap;
use axum::{
async_trait,
extract::{FromRequestParts, Path},
http::{request::Parts, StatusCode},
response::{IntoResponse, Response},
RequestPartsExt,
};
use pkarr::PublicKey;
use crate::error::{Error, Result};
#[derive(Debug)]
pub struct Pubky(PublicKey);
impl Pubky {
pub fn public_key(&self) -> &PublicKey {
&self.0
}
}
#[async_trait]
impl<S> FromRequestParts<S> for Pubky
where
S: Send + Sync,
{
type Rejection = Response;
async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result<Self, Self::Rejection> {
let params: Path<HashMap<String, String>> =
parts.extract().await.map_err(IntoResponse::into_response)?;
let pubky_id = params
.get("pubky")
.ok_or_else(|| (StatusCode::NOT_FOUND, "pubky param missing").into_response())?;
let public_key = PublicKey::try_from(pubky_id.to_string())
.map_err(Error::try_from)
.map_err(IntoResponse::into_response)?;
// TODO: return 404 if the user doesn't exist, but exclude signups.
Ok(Pubky(public_key))
}
}

View File

@@ -0,0 +1,11 @@
#![allow(unused)]
pub mod config;
mod database;
mod error;
mod extractors;
mod pkarr;
mod routes;
mod server;
pub use server::Homeserver;

View File

@@ -0,0 +1,15 @@
use anyhow::Result;
use pubky_homeserver::Homeserver;
#[tokio::main]
async fn main() -> Result<()> {
tracing_subscriber::fmt()
.with_env_filter("pubky_homeserver=debug,tower_http=debug")
.init();
let server = Homeserver::start(Default::default()).await?;
server.run_until_done().await?;
Ok(())
}

View File

@@ -0,0 +1,44 @@
//! Pkarr related task
use pkarr::{
dns::{rdata::SVCB, Packet},
Keypair, PkarrClientAsync, SignedPacket,
};
pub async fn publish_server_packet(
pkarr_client: PkarrClientAsync,
keypair: &Keypair,
domain: &str,
port: u16,
) -> anyhow::Result<()> {
let mut packet = Packet::new_reply(0);
let mut svcb = SVCB::new(0, domain.try_into()?);
// Publishing port only for localhost domain,
// assuming any other domain will point to a reverse proxy
// at the conventional ports.
if domain == "localhost" {
svcb.priority = 1;
svcb.set_port(port);
// TODO: Add more parameteres like the signer key!
// svcb.set_param(key, value)
};
// TODO: announce A/AAAA records as well for Noise connections?
// Or maybe Iroh's magic socket
packet.answers.push(pkarr::dns::ResourceRecord::new(
"@".try_into().unwrap(),
pkarr::dns::CLASS::IN,
60 * 60,
pkarr::dns::rdata::RData::SVCB(svcb),
));
let signed_packet = SignedPacket::from_packet(keypair, &packet)?;
pkarr_client.publish(&signed_packet).await?;
Ok(())
}

View File

@@ -0,0 +1,25 @@
use axum::{
routing::{delete, get, post, put},
Router,
};
use tower_cookies::CookieManagerLayer;
use tower_http::trace::TraceLayer;
use crate::server::AppState;
mod auth;
mod drive;
mod root;
pub fn create_app(state: AppState) -> Router {
Router::new()
.route("/", get(root::handler))
.route("/:pubky", put(auth::signup))
.route("/:pubky/session", get(auth::session))
.route("/:pubky/session", post(auth::signin))
.route("/:pubky/session", delete(auth::signout))
.route("/:pubky/*key", get(drive::put))
.layer(TraceLayer::new_for_http())
.layer(CookieManagerLayer::new())
.with_state(state)
}

View File

@@ -0,0 +1,140 @@
use axum::{
extract::{Request, State},
http::{HeaderMap, StatusCode},
response::IntoResponse,
routing::get,
Router,
};
use axum_extra::{headers::UserAgent, TypedHeader};
use bytes::Bytes;
use heed::BytesEncode;
use postcard::to_allocvec;
use tower_cookies::{Cookie, Cookies};
use pubky_common::{
crypto::{random_bytes, random_hash},
session::Session,
timestamp::Timestamp,
};
use crate::{
database::tables::{
sessions::{SessionsTable, SESSIONS_TABLE},
users::{User, UsersTable, USERS_TABLE},
},
error::{Error, Result},
extractors::Pubky,
server::AppState,
};
pub async fn signup(
State(state): State<AppState>,
TypedHeader(user_agent): TypedHeader<UserAgent>,
cookies: Cookies,
pubky: Pubky,
body: Bytes,
) -> Result<impl IntoResponse> {
// TODO: Verify invitation link.
// TODO: add errors in case of already axisting user.
signin(State(state), TypedHeader(user_agent), cookies, pubky, body).await
}
pub async fn session(
State(state): State<AppState>,
TypedHeader(user_agent): TypedHeader<UserAgent>,
cookies: Cookies,
pubky: Pubky,
) -> Result<impl IntoResponse> {
if let Some(cookie) = cookies.get(&pubky.public_key().to_string()) {
let rtxn = state.db.env.read_txn()?;
let sessions: SessionsTable = state
.db
.env
.open_database(&rtxn, Some(SESSIONS_TABLE))?
.expect("Session table already created");
if let Some(session) = sessions.get(&rtxn, cookie.value())? {
let session = session.to_owned();
rtxn.commit()?;
return Ok(session);
};
rtxn.commit()?;
};
Err(Error::with_status(StatusCode::NOT_FOUND))
}
pub async fn signout(
State(state): State<AppState>,
cookies: Cookies,
pubky: Pubky,
) -> Result<impl IntoResponse> {
if let Some(cookie) = cookies.get(&pubky.public_key().to_string()) {
let mut wtxn = state.db.env.write_txn()?;
let sessions: SessionsTable = state
.db
.env
.open_database(&wtxn, Some(SESSIONS_TABLE))?
.expect("Session table already created");
let _ = sessions.delete(&mut wtxn, cookie.value());
wtxn.commit()?;
return Ok(());
};
Err(Error::with_status(StatusCode::UNAUTHORIZED))
}
pub async fn signin(
State(state): State<AppState>,
TypedHeader(user_agent): TypedHeader<UserAgent>,
cookies: Cookies,
pubky: Pubky,
body: Bytes,
) -> Result<impl IntoResponse> {
let public_key = pubky.public_key();
state.verifier.verify(&body, public_key)?;
let mut wtxn = state.db.env.write_txn()?;
let users: UsersTable = state.db.env.create_database(&mut wtxn, Some(USERS_TABLE))?;
if let Some(existing) = users.get(&wtxn, public_key)? {
users.put(&mut wtxn, public_key, &existing)?;
} else {
users.put(
&mut wtxn,
public_key,
&User {
created_at: Timestamp::now().into_inner(),
},
)?;
}
let session_secret = base32::encode(base32::Alphabet::Crockford, &random_bytes::<16>());
let sessions: SessionsTable = state
.db
.env
.open_database(&wtxn, Some(SESSIONS_TABLE))?
.expect("Sessions table already created");
// TODO: handle not having a user agent?
let mut session = Session::new();
session.set_user_agent(user_agent.to_string());
sessions.put(&mut wtxn, &session_secret, &session.serialize())?;
cookies.add(Cookie::new(public_key.to_string(), session_secret));
wtxn.commit()?;
Ok(())
}

View File

@@ -0,0 +1,11 @@
use axum::response::IntoResponse;
use tracing::debug;
use crate::extractors::Pubky;
pub async fn put(pubky: Pubky) -> Result<impl IntoResponse, String> {
debug!(pubky=?pubky.public_key());
Ok("Pubky drive...".to_string())
}

View File

@@ -0,0 +1,5 @@
use axum::response::IntoResponse;
pub async fn handler() -> Result<impl IntoResponse, String> {
Ok("This a Pubky homeserver.".to_string())
}

View File

@@ -0,0 +1,154 @@
use std::{future::IntoFuture, net::SocketAddr};
use anyhow::{Error, Result};
use pubky_common::auth::AuthnVerifier;
use tokio::{net::TcpListener, signal, task::JoinSet};
use tracing::{info, warn};
use pkarr::{
mainline::dht::{DhtSettings, Testnet},
PkarrClient, PublicKey, Settings,
};
use crate::{config::Config, database::DB, pkarr::publish_server_packet};
#[derive(Debug)]
pub struct Homeserver {
port: u16,
config: Config,
tasks: JoinSet<std::io::Result<()>>,
}
#[derive(Clone, Debug)]
pub(crate) struct AppState {
pub verifier: AuthnVerifier,
pub db: DB,
}
impl Homeserver {
pub async fn start(config: Config) -> Result<Self> {
let public_key = config.keypair().public_key();
let db = DB::open(&config.storage()?)?;
let state = AppState {
verifier: AuthnVerifier::new(public_key.clone()),
db,
};
let app = crate::routes::create_app(state);
let mut tasks = JoinSet::new();
let app = app.clone();
let listener = TcpListener::bind(SocketAddr::from(([0, 0, 0, 0], config.port()))).await?;
let port = listener.local_addr()?.port();
// Spawn http server task
tasks.spawn(
axum::serve(
listener,
app.into_make_service_with_connect_info::<SocketAddr>(),
)
.with_graceful_shutdown(shutdown_signal())
.into_future(),
);
info!("Homeserver listening on http://localhost:{port}");
let pkarr_client = PkarrClient::new(Settings {
dht: DhtSettings {
bootstrap: config.bootstsrap(),
..Default::default()
},
..Default::default()
})?
.as_async();
publish_server_packet(pkarr_client, config.keypair(), config.domain(), port).await?;
info!("Homeserver listening on pubky://{public_key}");
Ok(Self {
tasks,
config,
port,
})
}
/// Test version of [Homeserver::start], using mainline Testnet, and a temporary storage.
pub async fn start_test(testnet: &Testnet) -> Result<Self> {
Homeserver::start(Config::test(testnet)).await
}
// === Getters ===
pub fn port(&self) -> u16 {
self.port
}
pub fn public_key(&self) -> PublicKey {
self.config.keypair().public_key()
}
// === Public Methods ===
/// Shutdown the server and wait for all tasks to complete.
pub async fn shutdown(mut self) -> Result<()> {
self.tasks.abort_all();
self.run_until_done().await?;
Ok(())
}
/// Wait for all tasks to complete.
///
/// Runs forever unless tasks fail.
pub async fn run_until_done(mut self) -> Result<()> {
let mut final_res: Result<()> = Ok(());
while let Some(res) = self.tasks.join_next().await {
match res {
Ok(Ok(())) => {}
Err(err) if err.is_cancelled() => {}
Ok(Err(err)) => {
warn!(?err, "task failed");
final_res = Err(Error::from(err));
}
Err(err) => {
warn!(?err, "task panicked");
final_res = Err(err.into());
}
}
}
final_res
}
}
async fn shutdown_signal() {
let ctrl_c = async {
signal::ctrl_c()
.await
.expect("failed to install Ctrl+C handler");
};
#[cfg(unix)]
let terminate = async {
signal::unix::signal(signal::unix::SignalKind::terminate())
.expect("failed to install signal handler")
.recv()
.await;
};
#[cfg(not(unix))]
let terminate = std::future::pending::<()>();
fn graceful_shutdown() {
info!("Gracefully Shutting down..");
}
tokio::select! {
_ = ctrl_c => graceful_shutdown(),
_ = terminate => graceful_shutdown(),
}
}

22
pubky/Cargo.toml Normal file
View File

@@ -0,0 +1,22 @@
[package]
name = "pubky"
version = "0.1.0"
edition = "2021"
[dependencies]
pubky-common = { version = "0.1.0", path = "../pubky-common" }
pkarr = "2.1.0"
ureq = { version = "2.10.0", features = ["cookies"] }
thiserror = "1.0.62"
url = "2.5.2"
flume = { version = "0.11.0", features = ["select", "eventual-fairness"], default-features = false }
[dev-dependencies]
pubky_homeserver = { path = "../pubky-homeserver" }
tokio = "1.37.0"
[features]
async = ["flume/async"]
default = ["async"]

311
pubky/src/client.rs Normal file
View File

@@ -0,0 +1,311 @@
use std::{collections::HashMap, fmt::format, time::Duration};
use pkarr::{
dns::{rdata::SVCB, Packet},
mainline::{dht::DhtSettings, Testnet},
Keypair, PkarrClient, PublicKey, Settings, SignedPacket,
};
use ureq::{Agent, Response};
use url::Url;
use pubky_common::{auth::AuthnSignature, session::Session};
use crate::error::{Error, Result};
const MAX_RECURSIVE_PUBKY_HOMESERVER_RESOLUTION: u8 = 3;
#[derive(Debug, Clone)]
pub struct PubkyClient {
agent: Agent,
pkarr: PkarrClient,
}
impl PubkyClient {
pub fn new() -> Self {
Self {
agent: Agent::new(),
pkarr: PkarrClient::new(Default::default()).unwrap(),
}
}
pub fn test(testnet: &Testnet) -> Self {
Self {
agent: Agent::new(),
pkarr: PkarrClient::new(Settings {
dht: DhtSettings {
request_timeout: Some(Duration::from_millis(10)),
bootstrap: Some(testnet.bootstrap.to_owned()),
..DhtSettings::default()
},
..Settings::default()
})
.unwrap(),
}
}
// === Public Methods ===
/// Signup to a homeserver and update Pkarr accordingly.
///
/// The homeserver is a Pkarr domain name, where the TLD is a Pkarr public key
/// for example "pubky.o4dksfbqk85ogzdb5osziw6befigbuxmuxkuxq8434q89uj56uyy"
pub fn signup(&self, keypair: &Keypair, homeserver: &str) -> Result<()> {
let (audience, mut url) = self.resolve_endpoint(homeserver)?;
url.set_path(&format!("/{}", keypair.public_key()));
self.request(HttpMethod::Put, &url)
.send_bytes(AuthnSignature::generate(keypair, &audience).as_bytes())
.map_err(Box::new)?;
self.publish_pubky_homeserver(keypair, homeserver);
Ok(())
}
/// Check the current sesison for a given Pubky in its homeserver.
pub fn session(&self, pubky: &PublicKey) -> Result<Session> {
let (homeserver, mut url) = self.resolve_pubky_homeserver(pubky)?;
url.set_path(&format!("/{}/session", pubky));
let mut bytes = vec![];
let result = self.request(HttpMethod::Get, &url).call().map_err(Box::new);
if let Ok(reader) = result {
reader.into_reader().read_to_end(&mut bytes);
} else {
return Err(Error::NotSignedIn);
}
Ok(Session::deserialize(&bytes)?)
}
/// Signout from a homeserver.
pub fn signout(&self, pubky: &PublicKey) -> Result<()> {
let (homeserver, mut url) = self.resolve_pubky_homeserver(pubky)?;
url.set_path(&format!("/{}/session", pubky));
self.request(HttpMethod::Delete, &url)
.call()
.map_err(Box::new)?;
Ok(())
}
/// Signin to a homeserver.
pub fn signin(&self, keypair: &Keypair) -> Result<()> {
let pubky = keypair.public_key();
let (audience, mut url) = self.resolve_pubky_homeserver(&pubky)?;
url.set_path(&format!("/{}/session", &pubky));
self.request(HttpMethod::Post, &url)
.send_bytes(AuthnSignature::generate(keypair, &audience).as_bytes())
.map_err(Box::new)?;
Ok(())
}
// === Private Methods ===
/// Publish the SVCB record for `_pubky.<public_key>`.
pub(crate) fn publish_pubky_homeserver(&self, keypair: &Keypair, host: &str) -> Result<()> {
let mut packet = Packet::new_reply(0);
if let Some(existing) = self.pkarr.resolve(&keypair.public_key())? {
for answer in existing.packet().answers.iter().cloned() {
if !answer.name.to_string().starts_with("_pubky") {
packet.answers.push(answer.into_owned())
}
}
}
let svcb = SVCB::new(0, host.try_into()?);
packet.answers.push(pkarr::dns::ResourceRecord::new(
"_pubky".try_into().unwrap(),
pkarr::dns::CLASS::IN,
60 * 60,
pkarr::dns::rdata::RData::SVCB(svcb),
));
let signed_packet = SignedPacket::from_packet(keypair, &packet)?;
self.pkarr.publish(&signed_packet)?;
Ok(())
}
/// Resolve the homeserver for a pubky.
pub(crate) fn resolve_pubky_homeserver(&self, pubky: &PublicKey) -> Result<(PublicKey, Url)> {
let target = format!("_pubky.{}", pubky);
self.resolve_endpoint(&target)
.map_err(|_| Error::Generic("Could not resolve homeserver".to_string()))
}
/// Resolve a service's public_key and clearnet url from a Pubky domain
fn resolve_endpoint(&self, target: &str) -> Result<(PublicKey, Url)> {
// TODO: cache the result of this function?
// TODO: use MAX_RECURSIVE_PUBKY_HOMESERVER_RESOLUTION
// TODO: move to common?
let mut target = target.to_string();
let mut homeserver_public_key = None;
let mut host = target.clone();
// PublicKey is very good at extracting the Pkarr TLD from a string.
while let Ok(public_key) = PublicKey::try_from(target.clone()) {
if let Some(signed_packet) = self.pkarr.resolve(&public_key)? {
let mut prior = None;
for answer in signed_packet.resource_records(&target) {
if let pkarr::dns::rdata::RData::SVCB(svcb) = &answer.rdata {
if svcb.priority == 0 {
prior = Some(svcb)
} else if let Some(sofar) = prior {
if svcb.priority >= sofar.priority {
prior = Some(svcb)
}
// TODO return random if priority is the same
} else {
prior = Some(svcb)
}
}
}
if let Some(svcb) = prior {
homeserver_public_key = Some(public_key);
target = svcb.target.to_string();
if let Some(port) = svcb.get_param(pkarr::dns::rdata::SVCB::PORT) {
if port.len() < 2 {
// TODO: debug! Error encoding port!
}
let port = u16::from_be_bytes([port[0], port[1]]);
host = format!("{target}:{port}");
} else {
host.clone_from(&target);
};
continue;
}
};
break;
}
if let Some(homeserver) = homeserver_public_key {
let url = if host.starts_with("localhost") {
format!("http://{host}")
} else {
format!("https://{host}")
};
return Ok((homeserver, Url::parse(&url)?));
}
Err(Error::Generic("Could not resolve endpoint".to_string()))
}
fn request(&self, method: HttpMethod, url: &Url) -> ureq::Request {
self.agent.request_url(method.into(), url)
}
}
impl Default for PubkyClient {
fn default() -> Self {
Self::new()
}
}
#[derive(Debug, Clone)]
pub enum HttpMethod {
Get,
Put,
Post,
Delete,
}
impl From<HttpMethod> for &str {
fn from(value: HttpMethod) -> Self {
match value {
HttpMethod::Get => "GET",
HttpMethod::Put => "PUT",
HttpMethod::Post => "POST",
HttpMethod::Delete => "DELETE",
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use pkarr::{
dns::{rdata::SVCB, Packet},
mainline::{dht::DhtSettings, Testnet},
Keypair, PkarrClient, Settings, SignedPacket,
};
use pubky_homeserver::Homeserver;
#[tokio::test]
async fn resolve_homeserver() {
let testnet = Testnet::new(3);
let server = Homeserver::start_test(&testnet).await.unwrap();
// Publish an intermediate controller of the homeserver
let pkarr_client = PkarrClient::new(Settings {
dht: DhtSettings {
bootstrap: Some(testnet.bootstrap.clone()),
..Default::default()
},
..Default::default()
})
.unwrap()
.as_async();
let intermediate = Keypair::random();
let mut packet = Packet::new_reply(0);
let server_tld = server.public_key().to_string();
let mut svcb = SVCB::new(0, server_tld.as_str().try_into().unwrap());
packet.answers.push(pkarr::dns::ResourceRecord::new(
"pubky".try_into().unwrap(),
pkarr::dns::CLASS::IN,
60 * 60,
pkarr::dns::rdata::RData::SVCB(svcb),
));
let signed_packet = SignedPacket::from_packet(&intermediate, &packet).unwrap();
pkarr_client.publish(&signed_packet).await.unwrap();
tokio::task::spawn_blocking(move || {
let client = PubkyClient::test(&testnet);
let pubky = Keypair::random();
client
.publish_pubky_homeserver(&pubky, &format!("pubky.{}", &intermediate.public_key()));
let (public_key, url) = client
.resolve_pubky_homeserver(&pubky.public_key())
.unwrap();
assert_eq!(public_key, server.public_key());
assert_eq!(url.host_str(), Some("localhost"));
assert_eq!(url.port(), Some(server.port()));
})
.await
.expect("task failed")
}
}

65
pubky/src/client_async.rs Normal file
View File

@@ -0,0 +1,65 @@
use std::thread;
use pkarr::{Keypair, PublicKey};
use pubky_common::session::Session;
use crate::{error::Result, PubkyClient};
pub struct PubkyClientAsync(PubkyClient);
impl PubkyClient {
pub fn as_async(&self) -> PubkyClientAsync {
PubkyClientAsync(self.clone())
}
}
impl PubkyClientAsync {
/// Async version of [PubkyClient::signup]
pub async fn signup(&self, keypair: &Keypair, homeserver: &str) -> Result<()> {
let (sender, receiver) = flume::bounded::<Result<()>>(1);
let client = self.0.clone();
let keypair = keypair.clone();
let homeserver = homeserver.to_string();
thread::spawn(move || sender.send(client.signup(&keypair, &homeserver)));
receiver.recv_async().await?
}
/// Async version of [PubkyClient::session]
pub async fn session(&self, pubky: &PublicKey) -> Result<Session> {
let (sender, receiver) = flume::bounded::<Result<Session>>(1);
let client = self.0.clone();
let pubky = pubky.clone();
thread::spawn(move || sender.send(client.session(&pubky)));
receiver.recv_async().await?
}
/// Async version of [PubkyClient::signout]
pub async fn signout(&self, pubky: &PublicKey) -> Result<()> {
let (sender, receiver) = flume::bounded::<Result<()>>(1);
let client = self.0.clone();
let pubky = pubky.clone();
thread::spawn(move || sender.send(client.signout(&pubky)));
receiver.recv_async().await?
}
/// Async version of [PubkyClient::signin]
pub async fn signin(&self, keypair: &Keypair) -> Result<()> {
let (sender, receiver) = flume::bounded::<Result<()>>(1);
let client = self.0.clone();
let keypair = keypair.clone();
thread::spawn(move || sender.send(client.signin(&keypair)));
receiver.recv_async().await?
}
}

36
pubky/src/error.rs Normal file
View File

@@ -0,0 +1,36 @@
//! Main Crate Error
use pkarr::dns::SimpleDnsError;
// Alias Result to be the crate Result.
pub type Result<T, E = Error> = core::result::Result<T, E>;
#[derive(thiserror::Error, Debug)]
/// Pk common Error
pub enum Error {
/// For starter, to remove as code matures.
#[error("Generic error: {0}")]
Generic(String),
#[error("Not signed in")]
NotSignedIn,
// === Transparent ===
#[error(transparent)]
Dns(#[from] SimpleDnsError),
#[error(transparent)]
Pkarr(#[from] pkarr::Error),
#[error(transparent)]
Flume(#[from] flume::RecvError),
#[error(transparent)]
Ureq(#[from] Box<ureq::Error>),
#[error(transparent)]
Url(#[from] url::ParseError),
#[error(transparent)]
Session(#[from] pubky_common::session::Error),
}

59
pubky/src/lib.rs Normal file
View File

@@ -0,0 +1,59 @@
#![allow(unused)]
mod client;
mod client_async;
mod error;
pub use client::PubkyClient;
pub use error::Error;
#[cfg(test)]
mod tests {
use super::*;
use super::error::Error;
use pkarr::{mainline::Testnet, Keypair};
use pubky_common::session::Session;
use pubky_homeserver::Homeserver;
#[tokio::test]
async fn basic_authn() {
let testnet = Testnet::new(3);
let server = Homeserver::start_test(&testnet).await.unwrap();
let client = PubkyClient::test(&testnet).as_async();
let keypair = Keypair::random();
client
.signup(&keypair, &server.public_key().to_string())
.await
.unwrap();
let session = client.session(&keypair.public_key()).await.unwrap();
assert_eq!(session, Session { ..session.clone() });
client.signout(&keypair.public_key()).await.unwrap();
{
let session = client.session(&keypair.public_key()).await;
assert!(session.is_err());
match session {
Err(Error::NotSignedIn) => {}
_ => assert!(false, "expected NotSignedInt error"),
}
}
client.signin(&keypair).await.unwrap();
{
let session = client.session(&keypair.public_key()).await.unwrap();
assert_eq!(session, Session { ..session.clone() });
}
}
}