Merge pull request #37 from pubky/dev

Dev
This commit is contained in:
Nuh
2025-02-08 13:09:54 +03:00
committed by GitHub
95 changed files with 4974 additions and 3028 deletions

1
.gitignore vendored
View File

@@ -1,2 +1,3 @@
target/
config.toml
storage/

1396
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -3,16 +3,14 @@ members = [
"pubky",
"pubky-*",
"http-relay",
"examples"
]
# See: https://github.com/rust-lang/rust/issues/90148#issuecomment-949194352
resolver = "2"
[workspace.dependencies]
pkarr = { version = "2.0.0", git = "https://github.com/Pubky/pkarr", branch = "serde", package = "pkarr", features = ["async", "serde"] }
serde = { version = "^1.0.209", features = ["derive"] }
[profile.release]
lto = true
opt-level = 'z'

View File

@@ -16,12 +16,13 @@ name = "request"
path = "./request/main.rs"
[dependencies]
anyhow = "1.0.86"
anyhow = "1.0.94"
base64 = "0.22.1"
clap = { version = "4.5.16", features = ["derive"] }
clap = { version = "4.5.23", features = ["derive"] }
pubky = { path = "../pubky" }
pubky-common = { version = "0.1.0", path = "../pubky-common" }
reqwest = "0.12.8"
reqwest = "0.12.9"
rpassword = "7.3.1"
tokio = { version = "1.40.0", features = ["macros", "rt-multi-thread"] }
url = "2.5.2"
tokio = { version = "1.42.0", features = ["macros", "rt-multi-thread"] }
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
url = "2.5.4"

View File

@@ -1,6 +1,6 @@
use anyhow::Result;
use clap::Parser;
use pubky::PubkyClient;
use pubky::Client;
use std::path::PathBuf;
use pubky_common::crypto::PublicKey;
@@ -24,7 +24,7 @@ async fn main() -> Result<()> {
let homeserver = cli.homeserver;
let client = PubkyClient::builder().build();
let client = Client::builder().build()?;
println!("Enter your recovery_file's passphrase to signup:");
let passphrase = rpassword::read_password()?;

View File

@@ -56,8 +56,8 @@ export class PubkyAuthWidget extends LitElement {
this.testnet = false;
this.open = false;
/** @type {import("@synonymdev/pubky").PubkyClient} */
this.pubkyClient = new window.pubky.PubkyClient();
/** @type {import("@synonymdev/pubky").Client} */
this.pubkyClient = new window.pubky.Client();
this.caps = this.caps || ""
}
@@ -74,9 +74,9 @@ export class PubkyAuthWidget extends LitElement {
console.debug("Switching testnet");
if (this.testnet) {
this.pubkyClient = window.pubky.PubkyClient.testnet()
this.pubkyClient = window.pubky.Client.testnet()
} else {
this.pubkyClient = new window.pubky.PubkyClient();
this.pubkyClient = new window.pubky.Client();
}
console.debug("Pkarr Relays: " + this.pubkyClient.getPkarrRelays())

View File

@@ -1,6 +1,6 @@
use anyhow::Result;
use clap::Parser;
use pubky::PubkyClient;
use pubky::Client;
use std::path::PathBuf;
use url::Url;
@@ -66,7 +66,7 @@ async fn main() -> Result<()> {
println!("PublicKey: {}", keypair.public_key());
let client = if cli.testnet.unwrap_or_default() {
let client = PubkyClient::testnet();
let client = Client::testnet()?;
// For the purposes of this demo, we need to make sure
// the user has an account on the local homeserver.
@@ -78,7 +78,7 @@ async fn main() -> Result<()> {
client
} else {
PubkyClient::builder().build()
Client::builder().build()?
};
println!("Sending AuthToken to the 3rd party app...");

View File

@@ -15,3 +15,7 @@ Or make a direct HTTP request.
```bash
cargo run --bin request GET https://<Pkarr domain>/[path]
```
### Testnet
You can pass a `--testnet` argument to run the query in testnet mode (using local DHT testnet).

View File

@@ -1,9 +1,11 @@
use std::env;
use anyhow::Result;
use clap::Parser;
use reqwest::Method;
use url::Url;
use pubky::PubkyClient;
use pubky::Client;
#[derive(Parser, Debug)]
#[command(version, about, long_about = None)]
@@ -12,27 +14,42 @@ struct Cli {
method: Method,
/// Pubky or HTTPS url
url: Url,
/// Use testnet mode
#[clap(long)]
testnet: bool,
}
#[tokio::main]
async fn main() -> Result<()> {
let cli = Cli::parse();
let args = Cli::parse();
let client = PubkyClient::builder().build();
tracing_subscriber::fmt()
.with_env_filter(env::var("TRACING").unwrap_or("info".to_string()))
.init();
match cli.url.scheme() {
"https" => {
unimplemented!();
}
"pubky" => {
let response = client.get(cli.url).await.unwrap();
let client = if args.testnet {
Client::testnet()?
} else {
Client::builder().build()?
};
println!("Got a response: \n {:?}", response);
}
_ => {
panic!("Only https:// and pubky:// URL schemes are supported")
// Build the request
let response = client.get(args.url).send().await?;
println!("< Response:");
println!("< {:?} {}", response.version(), response.status());
for (name, value) in response.headers() {
if let Ok(v) = value.to_str() {
println!("< {name}: {v}");
}
}
let bytes = response.bytes().await?;
match String::from_utf8(bytes.to_vec()) {
Ok(string) => println!("<\n{}", string),
Err(_) => println!("<\n{:?}", bytes),
}
Ok(())
}

13
http-relay/Cargo.toml Normal file
View File

@@ -0,0 +1,13 @@
[package]
name = "http-relay"
version = "0.1.0"
edition = "2021"
[dependencies]
anyhow = "1.0.94"
axum = "0.7.9"
axum-server = "0.7.1"
futures-util = "0.3.31"
tokio = { version = "1.42.0", features = ["full"] }
tracing = "0.1.41"
url = "2.5.4"

7
http-relay/README.md Normal file
View File

@@ -0,0 +1,7 @@
# HTTP Relay
A Rust implementation of _some_ of [Http relay spec](https://httprelay.io/).
Normally you are better off running the [reference implementation's binary](https://httprelay.io/download/).
This implementation, for the time being is meant for having a convenient library to be used in unit tests, and testnets in Pubky.

167
http-relay/src/lib.rs Normal file
View File

@@ -0,0 +1,167 @@
use std::{
collections::HashMap,
net::{SocketAddr, TcpListener},
sync::{Arc, Mutex},
};
use anyhow::Result;
use axum::{
body::{Body, Bytes},
extract::{Path, State},
response::IntoResponse,
routing::get,
Router,
};
use axum_server::Handle;
use tokio::sync::Notify;
use futures_util::{stream::StreamExt, TryFutureExt};
use url::Url;
// Shared state to store GET requests and their notifications
type SharedState = Arc<Mutex<HashMap<String, (Vec<u8>, Arc<Notify>)>>>;
#[derive(Debug, Default)]
pub struct Config {
pub http_port: u16,
}
#[derive(Debug, Default)]
pub struct HttpRelayBuilder(Config);
impl HttpRelayBuilder {
/// Configure the port used for HTTP server.
pub fn http_port(mut self, port: u16) -> Self {
self.0.http_port = port;
self
}
pub async fn build(self) -> Result<HttpRelay> {
HttpRelay::start(self.0).await
}
}
pub struct HttpRelay {
pub(crate) http_handle: Handle,
http_address: SocketAddr,
}
impl HttpRelay {
pub fn builder() -> HttpRelayBuilder {
HttpRelayBuilder::default()
}
pub async fn start(config: Config) -> Result<Self> {
let shared_state: SharedState = Arc::new(Mutex::new(HashMap::new()));
let app = Router::new()
.route("/link/:id", get(link::get).post(link::post))
.with_state(shared_state);
let http_handle = Handle::new();
let http_listener = TcpListener::bind(SocketAddr::from(([0, 0, 0, 0], config.http_port)))?;
let http_address = http_listener.local_addr()?;
tokio::spawn(
axum_server::from_tcp(http_listener)
.handle(http_handle.clone())
.serve(app.into_make_service())
.map_err(|error| tracing::error!(?error, "HttpRelay http server error")),
);
Ok(Self {
http_handle,
http_address,
})
}
pub fn http_address(&self) -> SocketAddr {
self.http_address
}
/// Returns the localhost Url of this server.
pub fn local_url(&self) -> Url {
Url::parse(&format!("http://localhost:{}", self.http_address.port()))
.expect("local_url should be formatted fine")
}
/// Returns the localhost URL of Link endpoints
pub fn local_link_url(&self) -> Url {
let mut url = self.local_url();
let mut segments = url
.path_segments_mut()
.expect("HttpRelay::local_link_url path_segments_mut");
segments.push("link");
drop(segments);
url
}
pub fn shutdown(&self) {
self.http_handle.shutdown();
}
}
mod link {
use super::*;
pub async fn get(
Path(id): Path<String>,
State(state): State<SharedState>,
) -> impl IntoResponse {
// Create a notification for this ID
let notify = Arc::new(Notify::new());
{
let mut map = state.lock().unwrap();
// Store the notification and return it when POST arrives
map.entry(id.clone())
.or_insert_with(|| (vec![], notify.clone()));
}
notify.notified().await;
// Respond with the data stored for this ID
let map = state.lock().unwrap();
if let Some((data, _)) = map.get(&id) {
Bytes::from(data.clone()).into_response()
} else {
(axum::http::StatusCode::NOT_FOUND, "Not Found").into_response()
}
}
pub async fn post(
Path(id): Path<String>,
State(state): State<SharedState>,
body: Body,
) -> impl IntoResponse {
// Aggregate the body into bytes
let mut stream = body.into_data_stream();
let mut bytes = vec![];
while let Some(next) = stream.next().await {
let chunk = next.map_err(|e| e.to_string()).unwrap();
bytes.extend_from_slice(&chunk);
}
// Notify any waiting GET request for this ID
let mut map = state.lock().unwrap();
if let Some((storage, notify)) = map.get_mut(&id) {
*storage = bytes;
notify.notify_one();
Ok(())
} else {
Err((
axum::http::StatusCode::NOT_FOUND,
"No waiting GET request for this ID",
))
}
}
}

View File

@@ -9,22 +9,22 @@ repository = "https://github.com/pubky/pubky-core"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
base32 = "0.5.0"
blake3 = "1.5.1"
base32 = "0.5.1"
blake3 = "1.5.5"
ed25519-dalek = { version = "2.1.1", features = ["serde"] }
once_cell = "1.19.0"
once_cell = "1.20.2"
rand = "0.8.5"
thiserror = "1.0.60"
postcard = { version = "1.0.8", features = ["alloc"] }
thiserror = "2.0.6"
postcard = { version = "1.1.1", features = ["alloc"] }
crypto_secretbox = { version = "0.1.1", features = ["std"] }
argon2 = { version = "0.5.3", features = ["std"] }
pubky-timestamp = { version = "0.2.0", features = ["full"] }
serde = { version = "1.0.213", features = ["derive"] }
pkarr = { version = "2.2.1-alpha.2", features = ["serde"] }
serde = { version = "1.0.216", features = ["derive"] }
pkarr = { git = "https://github.com/Pubky/pkarr", package = "pkarr", default-features = false, features = ["keys"] }
[target.'cfg(target_arch = "wasm32")'.dependencies]
js-sys = "0.3.69"
js-sys = "0.3.76"
[dev-dependencies]
postcard = "1.0.8"
postcard = "1.1.1"

View File

@@ -0,0 +1,9 @@
/// [Reserved param keys](https://www.rfc-editor.org/rfc/rfc9460#name-initial-contents) for HTTPS Resource Records
pub mod reserved_param_keys {
pub const HTTP_PORT: u16 = 65280;
}
pub mod testnet_ports {
pub const PKARR_RELAY: u16 = 15411;
pub const HTTP_RELAY: u16 = 15412;
}

View File

@@ -30,7 +30,7 @@ pub fn random_bytes<const N: usize>() -> [u8; N] {
arr
}
pub fn encrypt(plain_text: &[u8], encryption_key: &[u8; 32]) -> Result<Vec<u8>, Error> {
pub fn encrypt(plain_text: &[u8], encryption_key: &[u8; 32]) -> Result<Vec<u8>, EncryptError> {
let cipher = XSalsa20Poly1305::new(encryption_key.into());
let nonce = XSalsa20Poly1305::generate_nonce(&mut OsRng); // unique per message
let ciphertext = cipher.encrypt(&nonce, plain_text)?;
@@ -42,18 +42,31 @@ pub fn encrypt(plain_text: &[u8], encryption_key: &[u8; 32]) -> Result<Vec<u8>,
Ok(out)
}
pub fn decrypt(bytes: &[u8], encryption_key: &[u8; 32]) -> Result<Vec<u8>, Error> {
pub fn decrypt(bytes: &[u8], encryption_key: &[u8; 32]) -> Result<Vec<u8>, DecryptError> {
let cipher = XSalsa20Poly1305::new(encryption_key.into());
if bytes.len() < 24 {
return Err(DecryptError::PayloadTooSmall(bytes.len()));
}
Ok(cipher.decrypt(bytes[..24].into(), &bytes[24..])?)
}
#[derive(thiserror::Error, Debug)]
pub enum Error {
pub enum EncryptError {
#[error(transparent)]
SecretBox(#[from] crypto_secretbox::Error),
}
#[derive(thiserror::Error, Debug)]
pub enum DecryptError {
#[error(transparent)]
SecretBox(#[from] crypto_secretbox::Error),
#[error("Encrypted message too small, expected at least 24 bytes nonce, receieved {0} bytes")]
PayloadTooSmall(usize),
}
#[cfg(test)]
mod tests {
use super::*;

View File

@@ -1,5 +1,6 @@
pub mod auth;
pub mod capabilities;
pub mod constants;
pub mod crypto;
pub mod namespaces;
pub mod recovery_file;

View File

@@ -82,7 +82,10 @@ pub enum Error {
Argon(#[from] argon2::Error),
#[error(transparent)]
Crypto(#[from] crate::crypto::Error),
DecryptError(#[from] crate::crypto::DecryptError),
#[error(transparent)]
EncryptError(#[from] crate::crypto::EncryptError),
}
#[cfg(test)]

View File

@@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize};
extern crate alloc;
use alloc::vec::Vec;
use crate::{auth::AuthToken, capabilities::Capability, timestamp::Timestamp};
use crate::{capabilities::Capability, timestamp::Timestamp};
// TODO: add IP address?
// TODO: use https://crates.io/crates/user-agent-parser to parse the session
@@ -22,12 +22,12 @@ pub struct Session {
}
impl Session {
pub fn new(token: &AuthToken, user_agent: Option<String>) -> Self {
pub fn new(pubky: &PublicKey, capabilities: &[Capability], user_agent: Option<String>) -> Self {
Self {
version: 0,
pubky: token.pubky().to_owned(),
pubky: pubky.clone(),
created_at: Timestamp::now().as_u64(),
capabilities: token.capabilities().to_vec(),
capabilities: capabilities.to_vec(),
user_agent: user_agent.as_deref().unwrap_or("").to_string(),
name: user_agent.as_deref().unwrap_or("").to_string(),
}

View File

@@ -0,0 +1,280 @@
//! Absolutely monotonic unix timestamp in microseconds
use serde::{Deserialize, Serialize};
use std::fmt::Display;
use std::{
ops::{Add, Sub},
sync::Mutex,
};
use once_cell::sync::Lazy;
use rand::Rng;
#[cfg(not(target_arch = "wasm32"))]
use std::time::SystemTime;
/// ~4% chance of none of 10 clocks have matching id.
const CLOCK_MASK: u64 = (1 << 8) - 1;
const TIME_MASK: u64 = !0 >> 8;
pub struct TimestampFactory {
clock_id: u64,
last_time: u64,
}
impl TimestampFactory {
pub fn new() -> Self {
Self {
clock_id: rand::thread_rng().gen::<u64>() & CLOCK_MASK,
last_time: system_time() & TIME_MASK,
}
}
pub fn now(&mut self) -> Timestamp {
// Ensure absolute monotonicity.
self.last_time = (system_time() & TIME_MASK).max(self.last_time + CLOCK_MASK + 1);
// Add clock_id to the end of the timestamp
Timestamp(self.last_time | self.clock_id)
}
}
impl Default for TimestampFactory {
fn default() -> Self {
Self::new()
}
}
static DEFAULT_FACTORY: Lazy<Mutex<TimestampFactory>> =
Lazy::new(|| Mutex::new(TimestampFactory::default()));
/// Absolutely monotonic timestamp since [SystemTime::UNIX_EPOCH] in microseconds as u64.
///
/// The purpose of this timestamp is to unique per "user", not globally,
/// it achieves this by:
/// 1. Override the last byte with a random `clock_id`, reducing the probability
/// of two matching timestamps across multiple machines/threads.
/// 2. Gurantee that the remaining 3 bytes are ever increasing (absolutely monotonic) within
/// the same thread regardless of the wall clock value
///
/// This timestamp is also serialized as BE bytes to remain sortable.
/// If a `utf-8` encoding is necessary, it is encoded as [base32::Alphabet::Crockford]
/// to act as a sortable Id.
///
/// U64 of microseconds is valid for the next 500 thousand years!
#[derive(Debug, Clone, PartialEq, PartialOrd, Hash, Eq, Ord)]
pub struct Timestamp(u64);
impl Timestamp {
pub fn now() -> Self {
DEFAULT_FACTORY.lock().unwrap().now()
}
/// Return big endian bytes
pub fn to_bytes(&self) -> [u8; 8] {
self.0.to_be_bytes()
}
pub fn difference(&self, rhs: &Timestamp) -> i64 {
(self.0 as i64) - (rhs.0 as i64)
}
pub fn into_inner(&self) -> u64 {
self.0
}
}
impl Default for Timestamp {
fn default() -> Self {
Timestamp::now()
}
}
impl Display for Timestamp {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let bytes: [u8; 8] = self.into();
f.write_str(&base32::encode(base32::Alphabet::Crockford, &bytes))
}
}
impl TryFrom<String> for Timestamp {
type Error = TimestampError;
fn try_from(value: String) -> Result<Self, Self::Error> {
match base32::decode(base32::Alphabet::Crockford, &value) {
Some(vec) => {
let bytes: [u8; 8] = vec
.try_into()
.map_err(|_| TimestampError::InvalidEncoding)?;
Ok(bytes.into())
}
None => Err(TimestampError::InvalidEncoding),
}
}
}
impl TryFrom<&[u8]> for Timestamp {
type Error = TimestampError;
fn try_from(bytes: &[u8]) -> Result<Self, Self::Error> {
let bytes: [u8; 8] = bytes
.try_into()
.map_err(|_| TimestampError::InvalidBytesLength(bytes.len()))?;
Ok(bytes.into())
}
}
impl From<&Timestamp> for [u8; 8] {
fn from(timestamp: &Timestamp) -> Self {
timestamp.0.to_be_bytes()
}
}
impl From<[u8; 8]> for Timestamp {
fn from(bytes: [u8; 8]) -> Self {
Self(u64::from_be_bytes(bytes))
}
}
// === U64 conversion ===
impl From<Timestamp> for u64 {
fn from(value: Timestamp) -> Self {
value.into_inner()
}
}
impl Add<u64> for &Timestamp {
type Output = Timestamp;
fn add(self, rhs: u64) -> Self::Output {
Timestamp(self.0 + rhs)
}
}
impl Sub<u64> for &Timestamp {
type Output = Timestamp;
fn sub(self, rhs: u64) -> Self::Output {
Timestamp(self.0 - rhs)
}
}
impl Serialize for Timestamp {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let bytes = self.to_bytes();
bytes.serialize(serializer)
}
}
impl<'de> Deserialize<'de> for Timestamp {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let bytes: [u8; 8] = Deserialize::deserialize(deserializer)?;
Ok(Timestamp(u64::from_be_bytes(bytes)))
}
}
#[cfg(not(target_arch = "wasm32"))]
/// Return the number of microseconds since [SystemTime::UNIX_EPOCH]
fn system_time() -> u64 {
SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.expect("time drift")
.as_micros() as u64
}
#[cfg(target_arch = "wasm32")]
/// Return the number of microseconds since [SystemTime::UNIX_EPOCH]
pub fn system_time() -> u64 {
// Won't be an issue for more than 5000 years!
(js_sys::Date::now() as u64 )
// Turn miliseconds to microseconds
* 1000
}
#[derive(thiserror::Error, Debug)]
pub enum TimestampError {
#[error("Invalid bytes length, Timestamp should be encoded as 8 bytes, got {0}")]
InvalidBytesLength(usize),
#[error("Invalid timestamp encoding")]
InvalidEncoding,
}
#[cfg(test)]
mod tests {
use std::collections::HashSet;
use super::*;
#[test]
fn absolutely_monotonic() {
const COUNT: usize = 100;
let mut set = HashSet::with_capacity(COUNT);
let mut vec = Vec::with_capacity(COUNT);
for _ in 0..COUNT {
let timestamp = Timestamp::now();
set.insert(timestamp.clone());
vec.push(timestamp);
}
let mut ordered = vec.clone();
ordered.sort();
assert_eq!(set.len(), COUNT, "unique");
assert_eq!(ordered, vec, "ordered");
}
#[test]
fn strings() {
const COUNT: usize = 100;
let mut set = HashSet::with_capacity(COUNT);
let mut vec = Vec::with_capacity(COUNT);
for _ in 0..COUNT {
let string = Timestamp::now().to_string();
set.insert(string.clone());
vec.push(string)
}
let mut ordered = vec.clone();
ordered.sort();
assert_eq!(set.len(), COUNT, "unique");
assert_eq!(ordered, vec, "ordered");
}
#[test]
fn to_from_string() {
let timestamp = Timestamp::now();
let string = timestamp.to_string();
let decoded: Timestamp = string.try_into().unwrap();
assert_eq!(decoded, timestamp)
}
#[test]
fn serde() {
let timestamp = Timestamp::now();
let serialized = postcard::to_allocvec(&timestamp).unwrap();
assert_eq!(serialized, timestamp.to_bytes());
let deserialized: Timestamp = postcard::from_bytes(&serialized).unwrap();
assert_eq!(deserialized, timestamp);
}
}

View File

@@ -4,30 +4,33 @@ version = "0.1.0"
edition = "2021"
[dependencies]
anyhow = "1.0.82"
axum = { version = "0.7.5", features = ["macros"] }
axum-extra = { version = "0.9.3", features = ["typed-header", "async-read-body"] }
anyhow = "1.0.94"
axum = { version = "0.7.9", features = ["macros"] }
axum-extra = { version = "0.9.6", features = ["typed-header", "async-read-body"] }
base32 = "0.5.1"
bytes = "^1.7.1"
clap = { version = "4.5.11", features = ["derive"] }
bytes = "^1.9.0"
clap = { version = "4.5.23", features = ["derive"] }
dirs-next = "2.0.0"
flume = "0.11.0"
futures-util = "0.3.30"
heed = "0.20.3"
flume = "0.11.1"
futures-util = "0.3.31"
heed = "0.21.0"
hex = "0.4.3"
httpdate = "1.0.3"
libc = "0.2.159"
postcard = { version = "1.0.8", features = ["alloc"] }
pkarr = { version = "2.2.1-alpha.2", features = ["serde", "async"] }
postcard = { version = "1.1.1", features = ["alloc"] }
pkarr = { git = "https://github.com/Pubky/pkarr", package = "pkarr", features = ["dht", "lmdb-cache", "tls"] }
pubky-common = { version = "0.1.0", path = "../pubky-common" }
serde = { version = "1.0.213", features = ["derive"] }
tokio = { version = "1.37.0", features = ["full"] }
serde = { version = "1.0.216", features = ["derive"] }
tokio = { version = "1.42.0", features = ["full"] }
toml = "0.8.19"
tower-cookies = "0.10.0"
tower-http = { version = "0.5.2", features = ["cors", "trace"] }
tracing = "0.1.40"
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
url = "2.5.2"
tower-http = { version = "0.6.2", features = ["cors", "trace"] }
tracing = "0.1.41"
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
url = "2.5.4"
axum-server = { version = "0.7.1", features = ["tls-rustls-no-provider"] }
tower = "0.5.1"
page_size = "0.6.0"
[dev-dependencies]
reqwest = "0.12.8"
pkarr-relay = { git = "https://github.com/Pubky/pkarr", package = "pkarr-relay" }
mainline = "5.0.0-rc.1"
http-relay = { version = "0.1.0", path = "../http-relay" }

View File

@@ -21,5 +21,13 @@ cargo build --release
Run with an optional config file
```bash
../target/release/pubky_homeserver --config=./src/config.toml
../target/release/pubky-homeserver --config=./src/config.toml
```
## Testnet
To run a local homeserver for testing with an internal Pkarr Relay, hardcoded well known publickey and only connected to local Mainline testnet:
```bash
cargo run -- --testnet
```

View File

@@ -1,10 +1,41 @@
# Use testnet network (local DHT) for testing.
testnet = false
# Secret key (in hex) to generate the Homeserver's Keypair
secret_key = "0000000000000000000000000000000000000000000000000000000000000000"
# Domain to be published in Pkarr records for this server to be accessible by.
domain = "localhost"
# Port for the Homeserver to listen on.
port = 6287
# secret_key = "0000000000000000000000000000000000000000000000000000000000000000"
[database]
# Storage directory Defaults to <System's Data Directory>
# storage = ""
#
# Storage path can be relative or absolute.
storage = "./storage/"
[io]
# The port number to run an HTTP (clear text) server on.
http_port = 6286
# The port number to run an HTTPs (Pkarr TLS) server on.
https_port = 6287
# The public IP of this server.
#
# This address will be mentioned in the Pkarr records of this
# Homeserver that is published on its public key (derivde from `secret_key`)
public_ip = "127.0.0.1"
# If you are running this server behind a reverse proxy,
# you need to provide some extra configurations.
[io.reverse_proxy]
# The public port should be mapped to the `io::https_port`
# and you should setup tcp forwarding (don't terminate TLS on that port).
public_port = 6287
# If you want your server to be accessible from legacy browsers,
# you need to provide some extra configurations.
[io.legacy_browsers]
# An ICANN domain name is necessary to support legacy browsers
#
# Make sure to setup a domain name and point it the IP
# address of this machine where you are running this server.
#
# This domain should point to the `<public_ip>:<http_port>`.
#
# Currently we don't support ICANN TLS, so you should be runing
# a reverse proxy and managing certifcates there for this endpoint.
domain = "example.com"

View File

@@ -1,337 +0,0 @@
//! Configuration for the server
use anyhow::{anyhow, Context, Result};
use pkarr::Keypair;
use serde::{Deserialize, Serialize};
use std::{
fmt::Debug,
path::{Path, PathBuf},
time::Duration,
};
use tracing::info;
use pubky_common::timestamp::Timestamp;
// === Database ===
const DEFAULT_STORAGE_DIR: &str = "pubky";
pub const DEFAULT_MAP_SIZE: usize = 10995116277760; // 10TB (not = disk-space used)
// === Server ==
pub const DEFAULT_LIST_LIMIT: u16 = 100;
pub const DEFAULT_MAX_LIST_LIMIT: u16 = 1000;
#[derive(Serialize, Deserialize, Clone, PartialEq)]
struct ConfigToml {
testnet: Option<bool>,
port: Option<u16>,
bootstrap: Option<Vec<String>>,
domain: Option<String>,
storage: Option<PathBuf>,
secret_key: Option<String>,
dht_request_timeout: Option<Duration>,
default_list_limit: Option<u16>,
max_list_limit: Option<u16>,
db_map_size: Option<usize>,
}
/// Server configuration
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Config {
/// Whether or not this server is running in a testnet.
testnet: bool,
/// The configured port for this server.
port: u16,
/// Bootstrapping DHT nodes.
///
/// Helpful to run the server locally or in testnet.
bootstrap: Option<Vec<String>>,
/// A public domain for this server
/// necessary for web browsers running in https environment.
domain: Option<String>,
/// Path to the storage directory.
///
/// Defaults to a directory in the OS data directory
storage: PathBuf,
/// Server keypair.
///
/// Defaults to a random keypair.
keypair: Keypair,
dht_request_timeout: Option<Duration>,
/// The default limit of a list api if no `limit` query parameter is provided.
///
/// Defaults to `100`
default_list_limit: u16,
/// The maximum limit of a list api, even if a `limit` query parameter is provided.
///
/// Defaults to `1000`
max_list_limit: u16,
// === Database params ===
db_map_size: usize,
}
impl Config {
fn try_from_str(value: &str) -> Result<Self> {
let config_toml: ConfigToml = toml::from_str(value)?;
let keypair = if let Some(secret_key) = config_toml.secret_key {
let secret_key = deserialize_secret_key(secret_key)?;
Keypair::from_secret_key(&secret_key)
} else {
Keypair::random()
};
let storage = {
let dir = if let Some(storage) = config_toml.storage {
storage
} else {
let path = dirs_next::data_dir().ok_or_else(|| {
anyhow!("operating environment provides no directory for application data")
})?;
path.join(DEFAULT_STORAGE_DIR)
};
dir.join("homeserver")
};
let config = Config {
testnet: config_toml.testnet.unwrap_or(false),
port: config_toml.port.unwrap_or(0),
bootstrap: config_toml.bootstrap,
domain: config_toml.domain,
keypair,
storage,
dht_request_timeout: config_toml.dht_request_timeout,
default_list_limit: config_toml.default_list_limit.unwrap_or(DEFAULT_LIST_LIMIT),
max_list_limit: config_toml
.default_list_limit
.unwrap_or(DEFAULT_MAX_LIST_LIMIT),
db_map_size: config_toml.db_map_size.unwrap_or(DEFAULT_MAP_SIZE),
};
if config.testnet {
let testnet_config = Config::testnet();
return Ok(Config {
bootstrap: testnet_config.bootstrap,
port: testnet_config.port,
keypair: testnet_config.keypair,
..config
});
}
Ok(config)
}
/// Load the config from a file.
pub async fn load(path: impl AsRef<Path>) -> Result<Config> {
let s = tokio::fs::read_to_string(path.as_ref())
.await
.with_context(|| format!("failed to read {}", path.as_ref().to_string_lossy()))?;
Config::try_from_str(&s)
}
/// Testnet configurations
pub fn testnet() -> Self {
let testnet = pkarr::mainline::Testnet::new(10);
info!(?testnet.bootstrap, "Testnet bootstrap nodes");
Config {
port: 15411,
dht_request_timeout: None,
db_map_size: DEFAULT_MAP_SIZE,
keypair: Keypair::from_secret_key(&[0; 32]),
..Self::test(&testnet)
}
}
/// Test configurations
pub fn test(testnet: &pkarr::mainline::Testnet) -> Self {
let bootstrap = Some(testnet.bootstrap.to_owned());
let storage = std::env::temp_dir()
.join(Timestamp::now().to_string())
.join(DEFAULT_STORAGE_DIR);
Self {
testnet: true,
bootstrap,
storage,
db_map_size: 10485760,
..Default::default()
}
}
pub fn port(&self) -> u16 {
self.port
}
pub fn bootstsrap(&self) -> Option<Vec<String>> {
self.bootstrap.to_owned()
}
pub fn domain(&self) -> &Option<String> {
&self.domain
}
pub fn keypair(&self) -> &Keypair {
&self.keypair
}
pub fn default_list_limit(&self) -> u16 {
self.default_list_limit
}
pub fn max_list_limit(&self) -> u16 {
self.max_list_limit
}
/// Get the path to the storage directory
pub fn storage(&self) -> &PathBuf {
&self.storage
}
pub(crate) fn dht_request_timeout(&self) -> Option<Duration> {
self.dht_request_timeout
}
pub(crate) fn db_map_size(&self) -> usize {
self.db_map_size
}
}
impl Default for Config {
fn default() -> Self {
Self {
testnet: false,
port: 0,
bootstrap: None,
domain: None,
storage: storage(None)
.expect("operating environment provides no directory for application data"),
keypair: Keypair::random(),
dht_request_timeout: None,
default_list_limit: DEFAULT_LIST_LIMIT,
max_list_limit: DEFAULT_MAX_LIST_LIMIT,
db_map_size: DEFAULT_MAP_SIZE,
}
}
}
fn deserialize_secret_key(s: String) -> anyhow::Result<[u8; 32]> {
let bytes =
hex::decode(s).map_err(|_| anyhow!("secret_key in config.toml should hex encoded"))?;
if bytes.len() != 32 {
return Err(anyhow!(format!(
"secret_key in config.toml should be 32 bytes in hex (64 characters), got: {}",
bytes.len()
)));
}
let mut arr = [0u8; 32];
arr.copy_from_slice(&bytes);
Ok(arr)
}
fn storage(storage: Option<String>) -> Result<PathBuf> {
let dir = if let Some(storage) = storage {
PathBuf::from(storage)
} else {
let path = dirs_next::data_dir().ok_or_else(|| {
anyhow!("operating environment provides no directory for application data")
})?;
path.join(DEFAULT_STORAGE_DIR)
};
Ok(dir.join("homeserver"))
}
#[cfg(test)]
mod tests {
use pkarr::mainline::Testnet;
use super::*;
#[test]
fn parse_empty() {
let config = Config::try_from_str("").unwrap();
assert_eq!(
config,
Config {
keypair: config.keypair.clone(),
..Default::default()
}
)
}
#[test]
fn config_test() {
let testnet = Testnet::new(3);
let config = Config::test(&testnet);
assert_eq!(
config,
Config {
testnet: true,
bootstrap: testnet.bootstrap.into(),
db_map_size: 10485760,
storage: config.storage.clone(),
keypair: config.keypair.clone(),
..Default::default()
}
)
}
#[test]
fn config_testnet() {
let config = Config::testnet();
assert_eq!(
config,
Config {
testnet: true,
port: 15411,
bootstrap: config.bootstrap.clone(),
storage: config.storage.clone(),
keypair: config.keypair.clone(),
..Default::default()
}
)
}
#[test]
fn parse_with_testnet_flag() {
let config = Config::try_from_str(
r#"
# Secret key (in hex) to generate the Homeserver's Keypair
secret_key = "0123000000000000000000000000000000000000000000000000000000000000"
# Domain to be published in Pkarr records for this server to be accessible by.
domain = "localhost"
# Port for the Homeserver to listen on.
port = 6287
# Storage directory Defaults to <System's Data Directory>
storage = "/homeserver"
testnet = true
bootstrap = ["foo", "bar"]
# event stream
default_list_limit = 500
max_list_limit = 10000
"#,
)
.unwrap();
assert_eq!(config.keypair, Keypair::from_secret_key(&[0; 32]));
assert_eq!(config.port, 15411);
assert_ne!(
config.bootstrap,
Some(vec!["foo".to_string(), "bar".to_string()])
);
}
}

View File

@@ -0,0 +1,380 @@
//! Configuration for the server
use anyhow::{anyhow, Context, Result};
use pkarr::Keypair;
use serde::{Deserialize, Serialize};
use std::{
fmt::Debug,
fs,
net::{IpAddr, SocketAddr},
path::{Path, PathBuf},
time::Duration,
};
const DEFAULT_HTTP_PORT: u16 = 6286;
const DEFAULT_HTTPS_PORT: u16 = 6287;
// === Database ===
const DEFAULT_STORAGE_DIR: &str = "pubky";
pub const DEFAULT_MAP_SIZE: usize = 10995116277760; // 10TB (not = disk-space used)
// === Server ==
pub const DEFAULT_LIST_LIMIT: u16 = 100;
pub const DEFAULT_MAX_LIST_LIMIT: u16 = 1000;
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]
struct DatabaseToml {
storage: Option<PathBuf>,
}
#[derive(Debug, Default, Serialize, Deserialize, Clone, PartialEq, Eq)]
struct ReverseProxyToml {
pub public_port: Option<u16>,
}
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]
struct LegacyBrowsersTompl {
pub domain: Option<String>,
}
#[derive(Debug, Default, Serialize, Deserialize, Clone, PartialEq)]
struct IoToml {
pub http_port: Option<u16>,
pub https_port: Option<u16>,
pub public_ip: Option<IpAddr>,
pub reverse_proxy: Option<ReverseProxyToml>,
pub legacy_browsers: Option<LegacyBrowsersTompl>,
}
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]
struct ConfigToml {
secret_key: Option<String>,
database: Option<DatabaseToml>,
io: Option<IoToml>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct IoConfig {
pub http_port: u16,
pub https_port: u16,
pub public_addr: Option<SocketAddr>,
pub domain: Option<String>,
}
/// Server configuration
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Config {
/// Run in [testnet](crate::Homeserver::start_testnet) mode.
pub testnet: bool,
/// Bootstrapping DHT nodes.
///
/// Helpful to run the server locally or in testnet.
pub bootstrap: Option<Vec<String>>,
/// Path to the storage directory.
///
/// Defaults to a directory in the OS data directory
pub storage: PathBuf,
/// Server keypair.
///
/// Defaults to a random keypair.
pub keypair: Keypair,
pub dht_request_timeout: Option<Duration>,
/// The default limit of a list api if no `limit` query parameter is provided.
///
/// Defaults to `100`
pub default_list_limit: u16,
/// The maximum limit of a list api, even if a `limit` query parameter is provided.
///
/// Defaults to `1000`
pub max_list_limit: u16,
// === Database params ===
pub db_map_size: usize,
pub io: IoConfig,
}
impl Config {
fn try_from_str(value: &str) -> Result<Self> {
let config_toml: ConfigToml = toml::from_str(value)?;
config_toml.try_into()
}
/// Load the config from a file.
pub async fn load(path: impl AsRef<Path>) -> Result<Config> {
let config_file_path = path.as_ref();
let s = tokio::fs::read_to_string(config_file_path)
.await
.with_context(|| format!("failed to read {}", path.as_ref().to_string_lossy()))?;
let mut config = Config::try_from_str(&s)?;
// support relative path.
if config.storage.is_relative() {
config.storage = config_file_path
.parent()
.unwrap_or_else(|| Path::new("."))
.join(config.storage.clone());
}
fs::create_dir_all(&config.storage)?;
config.storage = config.storage.canonicalize()?;
Ok(config)
}
/// Test configurations
pub fn test(testnet: &mainline::Testnet) -> Self {
let bootstrap = Some(testnet.bootstrap.to_owned());
let storage = std::env::temp_dir()
.join(pubky_common::timestamp::Timestamp::now().to_string())
.join(DEFAULT_STORAGE_DIR);
Self {
bootstrap,
storage,
db_map_size: 10485760,
io: IoConfig {
http_port: 0,
https_port: 0,
public_addr: None,
domain: None,
},
..Default::default()
}
}
}
impl Default for Config {
fn default() -> Self {
Self {
testnet: false,
keypair: Keypair::random(),
bootstrap: None,
storage: storage(None)
.expect("operating environment provides no directory for application data"),
dht_request_timeout: None,
default_list_limit: DEFAULT_LIST_LIMIT,
max_list_limit: DEFAULT_MAX_LIST_LIMIT,
db_map_size: DEFAULT_MAP_SIZE,
io: IoConfig {
https_port: DEFAULT_HTTPS_PORT,
http_port: DEFAULT_HTTP_PORT,
domain: None,
public_addr: None,
},
}
}
}
impl TryFrom<ConfigToml> for Config {
type Error = anyhow::Error;
fn try_from(value: ConfigToml) -> std::result::Result<Self, Self::Error> {
let keypair = if let Some(secret_key) = value.secret_key {
let secret_key = deserialize_secret_key(secret_key)?;
Keypair::from_secret_key(&secret_key)
} else {
Keypair::random()
};
let storage = {
let dir =
if let Some(storage) = value.database.as_ref().and_then(|db| db.storage.clone()) {
storage
} else {
let path = dirs_next::data_dir().ok_or_else(|| {
anyhow!("operating environment provides no directory for application data")
})?;
path.join(DEFAULT_STORAGE_DIR)
};
dir.join("homeserver")
};
let io = if let Some(io) = value.io {
IoConfig {
http_port: io.http_port.unwrap_or(DEFAULT_HTTP_PORT),
https_port: io.https_port.unwrap_or(DEFAULT_HTTPS_PORT),
domain: io.legacy_browsers.and_then(|l| l.domain),
public_addr: io.public_ip.map(|ip| {
SocketAddr::from((
ip,
io.reverse_proxy
.and_then(|r| r.public_port)
.unwrap_or(io.https_port.unwrap_or(0)),
))
}),
}
} else {
IoConfig {
http_port: DEFAULT_HTTP_PORT,
https_port: DEFAULT_HTTPS_PORT,
domain: None,
public_addr: None,
}
};
Ok(Config {
testnet: false,
keypair,
storage,
dht_request_timeout: None,
bootstrap: None,
default_list_limit: DEFAULT_LIST_LIMIT,
max_list_limit: DEFAULT_MAX_LIST_LIMIT,
db_map_size: DEFAULT_MAP_SIZE,
io,
})
}
}
fn deserialize_secret_key(s: String) -> anyhow::Result<[u8; 32]> {
let bytes =
hex::decode(s).map_err(|_| anyhow!("secret_key in config.toml should hex encoded"))?;
if bytes.len() != 32 {
return Err(anyhow!(format!(
"secret_key in config.toml should be 32 bytes in hex (64 characters), got: {}",
bytes.len()
)));
}
let mut arr = [0u8; 32];
arr.copy_from_slice(&bytes);
Ok(arr)
}
fn storage(storage: Option<String>) -> Result<PathBuf> {
let dir = if let Some(storage) = storage {
PathBuf::from(storage)
} else {
let path = dirs_next::data_dir().ok_or_else(|| {
anyhow!("operating environment provides no directory for application data")
})?;
path.join(DEFAULT_STORAGE_DIR)
};
Ok(dir.join("homeserver"))
}
#[cfg(test)]
mod tests {
use mainline::Testnet;
use super::*;
#[test]
fn parse_empty() {
let config = Config::try_from_str("").unwrap();
assert_eq!(
config,
Config {
keypair: config.keypair.clone(),
..Default::default()
}
)
}
#[tokio::test]
async fn config_load() {
let crate_dir = std::env::current_dir().unwrap();
let config_file_path = crate_dir.join("./src/config.example.toml");
let canonical_file_path = config_file_path.canonicalize().unwrap();
let config = Config::load(canonical_file_path).await.unwrap();
assert!(config
.storage
.ends_with("pubky-homeserver/src/storage/homeserver"));
}
#[test]
fn config_test() {
let testnet = Testnet::new(3).unwrap();
let config = Config::test(&testnet);
assert_eq!(
config,
Config {
bootstrap: testnet.bootstrap.into(),
db_map_size: 10485760,
storage: config.storage.clone(),
keypair: config.keypair.clone(),
io: IoConfig {
http_port: 0,
https_port: 0,
public_addr: None,
domain: None
},
..Default::default()
}
)
}
#[test]
fn parse() {
let config = Config::try_from_str(
r#"
# Secret key (in hex) to generate the Homeserver's Keypair
secret_key = "0000000000000000000000000000000000000000000000000000000000000000"
[database]
# Storage directory Defaults to <System's Data Directory>
# storage = ""
[io]
# The port number to run an HTTP (clear text) server on.
http_port = 6286
# The port number to run an HTTPs (Pkarr TLS) server on.
https_port = 6287
# The public IP of this server.
#
# This address will be mentioned in the Pkarr records of this
# Homeserver that is published on its public key (derivde from `secret_key`)
public_ip = "127.0.0.1"
# If you are running this server behind a reverse proxy,
# you need to provide some extra configurations.
[io.reverse_proxy]
# The public port should be mapped to the `io::https_port`
# and you should setup tcp forwarding (don't terminate TLS on that port).
public_port = 6287
# If you want your server to be accessible from legacy browsers,
# you need to provide some extra configurations.
[io.legacy_browsers]
# An ICANN domain name is necessary to support legacy browsers
#
# Make sure to setup a domain name and point it the IP
# address of this machine where you are running this server.
#
# This domain should point to the `<public_ip>:<http_port>`.
#
# Currently we don't support ICANN TLS, so you should be runing
# a reverse proxy and managing certifcates there for this endpoint.
domain = "example.com"
"#,
)
.unwrap();
assert_eq!(config.keypair, Keypair::from_secret_key(&[0; 32]));
assert_eq!(config.io.https_port, 6287);
assert_eq!(
config.io.public_addr,
Some(SocketAddr::from(([127, 0, 0, 1], 6287)))
);
assert_eq!(config.io.domain, Some("example.com".to_string()));
}
}

View File

@@ -1,6 +1,6 @@
use heed::{Env, RwTxn};
use crate::database::tables::{blobs, entries, events, sessions, users};
use crate::core::database::tables::{blobs, entries, events, sessions, users};
pub fn run(env: &Env, wtxn: &mut RwTxn) -> anyhow::Result<()> {
let _: users::UsersTable = env.create_database(wtxn, Some(users::USERS_TABLE))?;

View File

@@ -0,0 +1,81 @@
//! Internal database in [crate::HomeserverCore]
use std::{fs, path::PathBuf};
use heed::{Env, EnvOpenOptions};
mod migrations;
pub mod tables;
use crate::core::config::Config;
use tables::{Tables, TABLES_COUNT};
pub use protected::DB;
/// Protecting fields from being mutated by modules in crate::database
mod protected {
use super::*;
#[derive(Debug, Clone)]
pub struct DB {
pub(crate) env: Env,
pub(crate) tables: Tables,
pub(crate) buffers_dir: PathBuf,
pub(crate) max_chunk_size: usize,
config: Config,
}
impl DB {
/// # Safety
/// DB uses LMDB, [opening][heed::EnvOpenOptions::open] which is marked unsafe,
/// because the possible Undefined Behavior (UB) if the lock file is broken.
pub unsafe fn open(config: Config) -> anyhow::Result<Self> {
let buffers_dir = config.storage.clone().join("buffers");
// Cleanup buffers.
let _ = fs::remove_dir(&buffers_dir);
fs::create_dir_all(&buffers_dir)?;
let env = unsafe {
EnvOpenOptions::new()
.max_dbs(TABLES_COUNT)
.map_size(config.db_map_size)
.open(&config.storage)
}?;
let tables = migrations::run(&env)?;
let db = DB {
env,
tables,
config,
buffers_dir,
max_chunk_size: max_chunk_size(),
};
Ok(db)
}
// === Getters ===
pub fn config(&self) -> &Config {
&self.config
}
}
}
/// calculate optimal chunk size:
/// - https://lmdb.readthedocs.io/en/release/#storage-efficiency-limits
/// - https://github.com/lmdbjava/benchmarks/blob/master/results/20160710/README.md#test-2-determine-24816-kb-byte-values
fn max_chunk_size() -> usize {
let page_size = page_size::get();
// - 16 bytes Header per page (LMDB)
// - Each page has to contain 2 records
// - 8 bytes per record (LMDB) (imperically, it seems to be 10 not 8)
// - 12 bytes key:
// - timestamp : 8 bytes
// - chunk index: 4 bytes
((page_size - 16) / 2) - (8 + 2) - 12
}

View File

@@ -1,6 +1,6 @@
use heed::{types::Bytes, Database, RoTxn};
use crate::database::DB;
use crate::core::database::DB;
use super::entries::Entry;

View File

@@ -18,7 +18,7 @@ use pubky_common::{
timestamp::Timestamp,
};
use crate::database::DB;
use crate::core::database::DB;
use super::events::Event;
@@ -28,6 +28,9 @@ pub type EntriesTable = Database<Str, Bytes>;
pub const ENTRIES_TABLE: &str = "entries";
impl DB {
/// Write an entry by an author at a given path.
///
/// The path has to start with a forward slash `/`
pub fn write_entry(
&mut self,
public_key: &PublicKey,
@@ -36,10 +39,13 @@ impl DB {
EntryWriter::new(self, public_key, path)
}
/// Delete an entry by an author at a given path.
///
/// The path has to start with a forward slash `/`
pub fn delete_entry(&mut self, public_key: &PublicKey, path: &str) -> anyhow::Result<bool> {
let mut wtxn = self.env.write_txn()?;
let key = format!("{public_key}/{path}");
let key = format!("{public_key}{path}");
let deleted = if let Some(bytes) = self.tables.entries.get(&wtxn, &key)? {
let entry = Entry::deserialize(bytes)?;
@@ -62,7 +68,7 @@ impl DB {
let deleted_entry = self.tables.entries.delete(&mut wtxn, &key)?;
// create DELETE event
if path.starts_with("pub/") {
if path.starts_with("/pub/") {
let url = format!("pubky://{key}");
let event = Event::delete(&url);
@@ -92,7 +98,7 @@ impl DB {
public_key: &PublicKey,
path: &str,
) -> anyhow::Result<Option<Entry>> {
let key = format!("{public_key}/{path}");
let key = format!("{public_key}{path}");
if let Some(bytes) = self.tables.entries.get(txn, &key)? {
return Ok(Some(Entry::deserialize(bytes)?));
@@ -107,7 +113,7 @@ impl DB {
/// Return a list of pubky urls.
///
/// - limit defaults to [Config::default_list_limit] and capped by [Config::max_list_limit]
/// - limit defaults to [crate::Config::default_list_limit] and capped by [crate::Config::max_list_limit]
pub fn list(
&self,
txn: &RoTxn,
@@ -121,8 +127,8 @@ impl DB {
let mut results = Vec::new();
let limit = limit
.unwrap_or(self.config.default_list_limit())
.min(self.config.max_list_limit());
.unwrap_or(self.config().default_list_limit)
.min(self.config().max_list_limit);
// TODO: make this more performant than split and allocations?
@@ -336,7 +342,7 @@ impl<'db> EntryWriter<'db> {
let buffer = File::create(&buffer_path)?;
let entry_key = format!("{public_key}/{path}");
let entry_key = format!("{public_key}{path}");
Ok(Self {
db,
@@ -345,7 +351,7 @@ impl<'db> EntryWriter<'db> {
buffer_path,
entry_key,
timestamp,
is_public: path.starts_with("pub/"),
is_public: path.starts_with("/pub/"),
})
}
@@ -445,15 +451,16 @@ impl<'db> std::io::Write for EntryWriter<'db> {
#[cfg(test)]
mod tests {
use bytes::Bytes;
use pkarr::{mainline::Testnet, Keypair};
use mainline::Testnet;
use pkarr::Keypair;
use crate::config::Config;
use crate::Config;
use super::DB;
#[tokio::test]
async fn entries() -> anyhow::Result<()> {
let mut db = DB::open(Config::test(&Testnet::new(0))).unwrap();
let mut db = unsafe { DB::open(Config::test(&Testnet::new(0).unwrap())).unwrap() };
let keypair = Keypair::random();
let public_key = keypair.public_key();
@@ -495,7 +502,7 @@ mod tests {
#[tokio::test]
async fn chunked_entry() -> anyhow::Result<()> {
let mut db = DB::open(Config::test(&Testnet::new(0))).unwrap();
let mut db = unsafe { DB::open(Config::test(&Testnet::new(0).unwrap())).unwrap() };
let keypair = Keypair::random();
let public_key = keypair.public_key();

View File

@@ -10,7 +10,7 @@ use heed::{
use postcard::{from_bytes, to_allocvec};
use serde::{Deserialize, Serialize};
use crate::database::DB;
use crate::core::database::DB;
/// Event [Timestamp] base32 => Encoded event.
pub type EventsTable = Database<Str, Bytes>;
@@ -62,7 +62,7 @@ impl Event {
impl DB {
/// Returns a list of events formatted as `<OP> <url>`.
///
/// - limit defaults to [Config::default_list_limit] and capped by [Config::max_list_limit]
/// - limit defaults to [crate::Config::default_list_limit] and capped by [crate::Config::max_list_limit]
/// - cursor is a 13 character string encoding of a timestamp
pub fn list_events(
&self,
@@ -72,8 +72,8 @@ impl DB {
let txn = self.env.read_txn()?;
let limit = limit
.unwrap_or(self.config.default_list_limit())
.min(self.config.max_list_limit());
.unwrap_or(self.config().default_list_limit)
.min(self.config().max_list_limit);
let cursor = cursor.unwrap_or("0000000000000".to_string());

View File

@@ -0,0 +1,42 @@
use heed::{
types::{Bytes, Str},
Database,
};
use pubky_common::session::Session;
use crate::core::database::DB;
/// session secret => Session.
pub type SessionsTable = Database<Str, Bytes>;
pub const SESSIONS_TABLE: &str = "sessions";
impl DB {
pub fn get_session(&self, session_secret: &str) -> anyhow::Result<Option<Session>> {
let rtxn = self.env.read_txn()?;
let session = self
.tables
.sessions
.get(&rtxn, session_secret)?
.map(|s| s.to_vec());
rtxn.commit()?;
if let Some(bytes) = session {
return Ok(Some(Session::deserialize(&bytes)?));
};
Ok(None)
}
pub fn delete_session(&mut self, secret: &str) -> anyhow::Result<bool> {
let mut wtxn = self.env.write_txn()?;
let deleted = self.tables.sessions.delete(&mut wtxn, secret)?;
wtxn.commit()?;
Ok(deleted)
}
}

View File

@@ -78,8 +78,20 @@ impl From<pubky_common::auth::Error> for Error {
}
}
impl From<pkarr::Error> for Error {
fn from(error: pkarr::Error) -> Self {
impl From<pkarr::errors::SignedPacketVerifyError> for Error {
fn from(error: pkarr::errors::SignedPacketVerifyError) -> Self {
Self::new(StatusCode::BAD_REQUEST, Some(error))
}
}
impl From<pkarr::errors::PublishError> for Error {
fn from(error: pkarr::errors::PublishError) -> Self {
Self::new(StatusCode::BAD_REQUEST, Some(error))
}
}
impl From<pkarr::errors::PublicKeyError> for Error {
fn from(error: pkarr::errors::PublicKeyError) -> Self {
Self::new(StatusCode::BAD_REQUEST, Some(error))
}
}

View File

@@ -1,8 +1,8 @@
use std::collections::HashMap;
use std::{collections::HashMap, fmt::Display};
use axum::{
async_trait,
extract::{FromRequestParts, Path, Query},
extract::{FromRequestParts, Query},
http::{request::Parts, StatusCode},
response::{IntoResponse, Response},
RequestPartsExt,
@@ -10,68 +10,42 @@ use axum::{
use pkarr::PublicKey;
use crate::error::{Error, Result};
use crate::core::error::Result;
#[derive(Debug)]
pub struct Pubky(PublicKey);
#[derive(Debug, Clone)]
pub struct PubkyHost(pub(crate) PublicKey);
impl Pubky {
impl PubkyHost {
pub fn public_key(&self) -> &PublicKey {
&self.0
}
}
#[async_trait]
impl<S> FromRequestParts<S> for Pubky
where
S: Send + Sync,
{
type Rejection = Response;
async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result<Self, Self::Rejection> {
let params: Path<HashMap<String, String>> =
parts.extract().await.map_err(IntoResponse::into_response)?;
let pubky_id = params
.get("pubky")
.ok_or_else(|| (StatusCode::NOT_FOUND, "pubky param missing").into_response())?;
let public_key = PublicKey::try_from(pubky_id.to_string())
.map_err(Error::try_from)
.map_err(IntoResponse::into_response)?;
// TODO: return 404 if the user doesn't exist, but exclude signups.
Ok(Pubky(public_key))
}
}
pub struct EntryPath(pub(crate) String);
impl EntryPath {
pub fn as_str(&self) -> &str {
self.0.as_str()
impl Display for PubkyHost {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
}
}
#[async_trait]
impl<S> FromRequestParts<S> for EntryPath
impl<S> FromRequestParts<S> for PubkyHost
where
S: Send + Sync,
S: Sync + Send,
{
type Rejection = Response;
async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result<Self, Self::Rejection> {
let params: Path<HashMap<String, String>> =
parts.extract().await.map_err(IntoResponse::into_response)?;
let pubky_host = parts
.extensions
.get::<PubkyHost>()
.cloned()
.ok_or((
StatusCode::INTERNAL_SERVER_ERROR,
"Can't extract PubkyHost. Is `PubkyHostLayer` enabled?",
))
.map_err(|e| e.into_response())?;
// TODO: enforce path limits like no trailing '/'
let path = params
.get("path")
.ok_or_else(|| (StatusCode::NOT_FOUND, "entry path missing").into_response())?;
Ok(EntryPath(path.to_string()))
Ok(pubky_host)
}
}

View File

@@ -0,0 +1,147 @@
use axum::http::Method;
use axum::response::IntoResponse;
use axum::{
body::Body,
http::{Request, StatusCode},
};
use futures_util::future::BoxFuture;
use pkarr::PublicKey;
use std::{convert::Infallible, task::Poll};
use tower::{Layer, Service};
use tower_cookies::Cookies;
use crate::core::{
error::{Error, Result},
extractors::PubkyHost,
AppState,
};
/// A Tower Layer to handle authorization for write operations.
#[derive(Debug, Clone)]
pub struct AuthorizationLayer {
state: AppState,
}
impl AuthorizationLayer {
pub fn new(state: AppState) -> Self {
Self { state }
}
}
impl<S> Layer<S> for AuthorizationLayer {
type Service = AuthorizationMiddleware<S>;
fn layer(&self, inner: S) -> Self::Service {
AuthorizationMiddleware {
inner,
state: self.state.clone(),
}
}
}
/// Middleware that performs authorization checks for write operations.
#[derive(Debug, Clone)]
pub struct AuthorizationMiddleware<S> {
inner: S,
state: AppState,
}
impl<S> Service<Request<Body>> for AuthorizationMiddleware<S>
where
S: Service<Request<Body>, Response = axum::response::Response, Error = Infallible>
+ Send
+ 'static
+ Clone,
S::Future: Send + 'static,
{
type Response = S::Response;
type Error = S::Error;
type Future = BoxFuture<'static, Result<Self::Response, Self::Error>>;
fn poll_ready(&mut self, cx: &mut std::task::Context<'_>) -> Poll<Result<(), Self::Error>> {
self.inner.poll_ready(cx).map_err(|_| unreachable!()) // `Infallible` conversion
}
fn call(&mut self, req: Request<Body>) -> Self::Future {
let state = self.state.clone();
let mut inner = self.inner.clone();
Box::pin(async move {
let path = req.uri().path();
let pubky = match req.extensions().get::<PubkyHost>() {
Some(pk) => pk,
None => {
return Ok(
Error::new(StatusCode::NOT_FOUND, "Pubky Host is missing".into())
.into_response(),
)
}
};
let cookies = req.extensions().get::<Cookies>();
// Authorize the request
if let Err(e) = authorize(&state, req.method(), cookies, pubky.public_key(), path) {
return Ok(e.into_response());
}
// If authorized, proceed to the inner service
inner.call(req).await.map_err(|_| unreachable!())
})
}
}
/// Authorize write (PUT or DELETE) for Public paths.
fn authorize(
state: &AppState,
method: &Method,
cookies: Option<&Cookies>,
public_key: &PublicKey,
path: &str,
) -> Result<()> {
if path == "/session" {
// Checking (or deleting) one's session is ok for everyone
return Ok(());
} else if path.starts_with("/pub/") {
if method == Method::GET {
return Ok(());
}
} else {
return Err(Error::new(
StatusCode::FORBIDDEN,
"Writing to directories other than '/pub/' is forbidden".into(),
));
}
if let Some(cookies) = cookies {
let session_secret = session_secret_from_cookies(cookies, public_key)
.ok_or(Error::with_status(StatusCode::UNAUTHORIZED))?;
let session = state
.db
.get_session(&session_secret)?
.ok_or(Error::with_status(StatusCode::UNAUTHORIZED))?;
if session.pubky() == public_key
&& session.capabilities().iter().any(|cap| {
path.starts_with(&cap.scope)
&& cap
.actions
.contains(&pubky_common::capabilities::Action::Write)
})
{
return Ok(());
}
return Err(Error::with_status(StatusCode::FORBIDDEN));
}
Err(Error::with_status(StatusCode::UNAUTHORIZED))
}
pub fn session_secret_from_cookies(cookies: &Cookies, public_key: &PublicKey) -> Option<String> {
cookies
.get(&public_key.to_string())
.map(|c| c.value().to_string())
}

View File

@@ -0,0 +1,3 @@
pub mod authz;
pub mod pubky_host;
pub mod trace;

View File

@@ -0,0 +1,64 @@
use pkarr::PublicKey;
use crate::core::extractors::PubkyHost;
use axum::{body::Body, http::Request};
use futures_util::future::BoxFuture;
use std::{convert::Infallible, task::Poll};
use tower::{Layer, Service};
use crate::core::error::Result;
/// A Tower Layer to handle authorization for write operations.
#[derive(Debug, Clone)]
pub struct PubkyHostLayer;
impl<S> Layer<S> for PubkyHostLayer {
type Service = PubkyHostLayerMiddleware<S>;
fn layer(&self, inner: S) -> Self::Service {
PubkyHostLayerMiddleware { inner }
}
}
/// Middleware that performs authorization checks for write operations.
#[derive(Debug, Clone)]
pub struct PubkyHostLayerMiddleware<S> {
inner: S,
}
impl<S> Service<Request<Body>> for PubkyHostLayerMiddleware<S>
where
S: Service<Request<Body>, Response = axum::response::Response, Error = Infallible>
+ Send
+ 'static
+ Clone,
S::Future: Send + 'static,
{
type Response = S::Response;
type Error = Infallible;
type Future = BoxFuture<'static, Result<Self::Response, Self::Error>>;
fn poll_ready(&mut self, cx: &mut std::task::Context<'_>) -> Poll<Result<(), Self::Error>> {
self.inner.poll_ready(cx).map_err(|_| unreachable!()) // `Infallible` conversion
}
fn call(&mut self, req: Request<Body>) -> Self::Future {
let mut inner = self.inner.clone();
let mut req = req;
Box::pin(async move {
let headers_to_check = ["host", "pubky-host"];
for header in headers_to_check {
if let Some(Ok(pubky_host)) = req.headers().get(header).map(|h| h.to_str()) {
if let Ok(public_key) = PublicKey::try_from(pubky_host) {
req.extensions_mut().insert(PubkyHost(public_key));
}
}
}
inner.call(req).await.map_err(|_| unreachable!())
})
}
}

View File

@@ -0,0 +1,75 @@
use std::sync::Arc;
use axum::{extract::Request, Router};
use tower_http::trace::{
DefaultOnFailure, DefaultOnRequest, DefaultOnResponse, OnFailure, OnRequest, OnResponse,
TraceLayer,
};
use tracing::{Level, Span};
use crate::core::extractors::PubkyHost;
pub fn with_trace_layer(router: Router, excluded_paths: &[&str]) -> Router {
let excluded_paths = Arc::new(
excluded_paths
.iter()
.map(|s| s.to_string())
.collect::<Vec<_>>(),
);
router.layer(
TraceLayer::new_for_http()
.make_span_with(move |request: &Request| {
if excluded_paths.contains(&request.uri().path().to_string()) {
// Skip logging for the noisy endpoint
tracing::span!(Level::INFO, "request", excluded = true)
} else {
// Use the default span for other endpoints
let uri = if let Some(pubky_host) = request.extensions().get::<PubkyHost>() {
format!("pubky://{pubky_host}{}", request.uri())
} else {
request.uri().to_string()
};
tracing::span!(
Level::INFO,
"request",
method = %request.method(),
uri = ?uri,
version = ?request.version(),
)
}
})
.on_request(|request: &Request, span: &Span| {
// Skip logging for excluded spans
if span.has_field("excluded") {
return;
}
// Use the default behavior for other spans
DefaultOnRequest::new().on_request(request, span);
})
.on_response(
|response: &axum::response::Response, latency: std::time::Duration, span: &Span| {
// Skip logging for excluded spans
if span.has_field("excluded") {
return;
}
// Use the default behavior for other spans
DefaultOnResponse::new().on_response(response, latency, span);
},
)
.on_failure(
|error: tower_http::classify::ServerErrorsFailureClass,
latency: std::time::Duration,
span: &Span| {
// Skip logging for excluded spans
if span.has_field("excluded") {
return;
}
// Use the default behavior for other spans
DefaultOnFailure::new().on_failure(error, latency, span);
},
),
)
}

View File

@@ -0,0 +1,113 @@
use anyhow::Result;
use axum::{
body::Body,
extract::Request,
http::{header, Method},
response::Response,
Router,
};
use pkarr::{Keypair, PublicKey};
use pubky_common::{
auth::{AuthToken, AuthVerifier},
capabilities::Capability,
};
use tower::ServiceExt;
mod config;
mod database;
mod error;
mod extractors;
mod layers;
mod routes;
use database::DB;
pub use config::Config;
#[derive(Clone, Debug)]
pub(crate) struct AppState {
pub(crate) verifier: AuthVerifier,
pub(crate) db: DB,
}
#[derive(Debug, Clone)]
/// A side-effect-free Core of the [Homeserver].
pub struct HomeserverCore {
config: Config,
pub(crate) router: Router,
}
impl HomeserverCore {
/// Create a side-effect-free Homeserver core.
///
/// # Safety
/// HomeserverCore uses LMDB, [opening][heed::EnvOpenOptions::open] which is marked unsafe,
/// because the possible Undefined Behavior (UB) if the lock file is broken.
pub unsafe fn new(config: &Config) -> Result<Self> {
let db = unsafe { DB::open(config.clone())? };
let state = AppState {
verifier: AuthVerifier::default(),
db,
};
let router = routes::create_app(state.clone());
Ok(Self {
router,
config: config.clone(),
})
}
#[cfg(test)]
/// Test version of [HomeserverCore::new], using a temporary storage.
pub fn test() -> Result<Self> {
let testnet = mainline::Testnet::new(0).expect("ignore");
unsafe { HomeserverCore::new(&Config::test(&testnet)) }
}
// === Getters ===
pub fn config(&self) -> &Config {
&self.config
}
pub fn keypair(&self) -> &Keypair {
&self.config.keypair
}
pub fn public_key(&self) -> PublicKey {
self.config.keypair.public_key()
}
// === Public Methods ===
pub async fn create_root_user(&mut self, keypair: &Keypair) -> Result<String> {
let auth_token = AuthToken::sign(keypair, vec![Capability::root()]);
let response = self
.call(
Request::builder()
.uri("/signup")
.header("host", keypair.public_key().to_string())
.method(Method::POST)
.body(Body::from(auth_token.serialize()))
.unwrap(),
)
.await?;
let header_value = response
.headers()
.get(header::SET_COOKIE)
.and_then(|h| h.to_str().ok())
.expect("should return a set-cookie header")
.to_string();
Ok(header_value)
}
pub async fn call(&self, request: Request) -> Result<Response> {
Ok(self.router.clone().oneshot(request).await?)
}
}

View File

@@ -1,6 +1,5 @@
use axum::{
extract::{Host, State},
http::StatusCode,
response::IntoResponse,
};
use axum_extra::{headers::UserAgent, TypedHeader};
@@ -9,15 +8,7 @@ use tower_cookies::{cookie::SameSite, Cookie, Cookies};
use pubky_common::{crypto::random_bytes, session::Session, timestamp::Timestamp};
use crate::{
database::tables::{
sessions::{SessionsTable, SESSIONS_TABLE},
users::User,
},
error::{Error, Result},
extractors::Pubky,
server::AppState,
};
use crate::core::{database::tables::users::User, error::Result, AppState};
pub async fn signup(
State(state): State<AppState>,
@@ -31,58 +22,6 @@ pub async fn signup(
signin(State(state), user_agent, cookies, host, body).await
}
pub async fn session(
State(state): State<AppState>,
cookies: Cookies,
pubky: Pubky,
) -> Result<impl IntoResponse> {
if let Some(cookie) = cookies.get(&pubky.public_key().to_string()) {
let rtxn = state.db.env.read_txn()?;
let sessions: SessionsTable = state
.db
.env
.open_database(&rtxn, Some(SESSIONS_TABLE))?
.expect("Session table already created");
if let Some(session) = sessions.get(&rtxn, cookie.value())? {
let session = session.to_owned();
rtxn.commit()?;
// TODO: add content-type
return Ok(session);
};
rtxn.commit()?;
};
Err(Error::with_status(StatusCode::NOT_FOUND))
}
pub async fn signout(
State(state): State<AppState>,
cookies: Cookies,
pubky: Pubky,
) -> Result<impl IntoResponse> {
if let Some(cookie) = cookies.get(&pubky.public_key().to_string()) {
let mut wtxn = state.db.env.write_txn()?;
let sessions: SessionsTable = state
.db
.env
.open_database(&wtxn, Some(SESSIONS_TABLE))?
.expect("Session table already created");
let _ = sessions.delete(&mut wtxn, cookie.value());
wtxn.commit()?;
return Ok(());
};
Err(Error::with_status(StatusCode::UNAUTHORIZED))
}
pub async fn signin(
State(state): State<AppState>,
user_agent: Option<TypedHeader<UserAgent>>,
@@ -98,6 +37,7 @@ pub async fn signin(
let users = state.db.tables.users;
if let Some(existing) = users.get(&wtxn, public_key)? {
// TODO: why do we need this?
users.put(&mut wtxn, public_key, &existing)?;
} else {
users.put(
@@ -111,7 +51,12 @@ pub async fn signin(
let session_secret = base32::encode(base32::Alphabet::Crockford, &random_bytes::<16>());
let session = Session::new(&token, user_agent.map(|ua| ua.to_string())).serialize();
let session = Session::new(
token.pubky(),
token.capabilities(),
user_agent.map(|ua| ua.to_string()),
)
.serialize();
state
.db
@@ -119,10 +64,13 @@ pub async fn signin(
.sessions
.put(&mut wtxn, &session_secret, &session)?;
wtxn.commit()?;
let mut cookie = Cookie::new(public_key.to_string(), session_secret);
cookie.set_path("/");
// TODO: do we even have insecure anymore?
if is_secure(&host) {
cookie.set_secure(true);
cookie.set_same_site(SameSite::None);
@@ -131,8 +79,6 @@ pub async fn signin(
cookies.add(cookie);
wtxn.commit()?;
Ok(session)
}

View File

@@ -6,10 +6,10 @@ use axum::{
};
use pubky_common::timestamp::Timestamp;
use crate::{
use crate::core::{
error::{Error, Result},
extractors::ListQueryParams,
server::AppState,
AppState,
};
pub async fn feed(

View File

@@ -0,0 +1,41 @@
//! The controller part of the [crate::HomeserverCore]
use axum::{
routing::{get, post},
Router,
};
use tower_cookies::CookieManagerLayer;
use tower_http::cors::CorsLayer;
use crate::core::AppState;
use super::layers::{pubky_host::PubkyHostLayer, trace::with_trace_layer};
mod auth;
mod feed;
mod root;
mod tenants;
const TRACING_EXCLUDED_PATHS: [&str; 1] = ["/events/"];
fn base() -> Router<AppState> {
Router::new()
.route("/", get(root::handler))
.route("/signup", post(auth::signup))
.route("/session", post(auth::signin))
// Events
.route("/events/", get(feed::feed))
// TODO: add size limit
// TODO: revisit if we enable streaming big payloads
// TODO: maybe add to a separate router (drive router?).
}
pub fn create_app(state: AppState) -> Router {
let app = base()
.merge(tenants::router(state.clone()))
.layer(CookieManagerLayer::new())
.layer(CorsLayer::very_permissive())
.with_state(state);
with_trace_layer(app, &TRACING_EXCLUDED_PATHS).layer(PubkyHostLayer)
}

View File

@@ -0,0 +1,33 @@
//! Per Tenant (user / Pubky) routes.
//!
//! Every route here is relative to a tenant's Pubky host,
//! as opposed to routes relative to the Homeserver's owner.
use axum::{
extract::DefaultBodyLimit,
routing::{delete, get, head, put},
Router,
};
use crate::core::{layers::authz::AuthorizationLayer, AppState};
pub mod read;
pub mod session;
pub mod write;
pub fn router(state: AppState) -> Router<AppState> {
Router::new()
// - Datastore routes
.route("/pub/", get(read::get))
.route("/pub/*path", get(read::get))
.route("/pub/*path", head(read::head))
.route("/pub/*path", put(write::put))
.route("/pub/*path", delete(write::delete))
// - Session routes
.route("/session", get(session::session))
.route("/session", delete(session::signout))
// Layers
// TODO: different max size for sessions and other routes?
.layer(DefaultBodyLimit::max(100 * 1024 * 1024))
.layer(AuthorizationLayer::new(state.clone()))
}

View File

@@ -0,0 +1,313 @@
use axum::{
body::Body,
extract::{OriginalUri, State},
http::{header, HeaderMap, HeaderValue, Response, StatusCode},
response::IntoResponse,
};
use httpdate::HttpDate;
use pkarr::PublicKey;
use std::str::FromStr;
use crate::core::{
database::tables::entries::Entry,
error::{Error, Result},
extractors::{ListQueryParams, PubkyHost},
AppState,
};
pub async fn head(
State(state): State<AppState>,
pubky: PubkyHost,
headers: HeaderMap,
path: OriginalUri,
) -> Result<impl IntoResponse> {
let rtxn = state.db.env.read_txn()?;
get_entry(
headers,
state
.db
.get_entry(&rtxn, pubky.public_key(), path.0.path())?,
None,
)
}
pub async fn get(
State(state): State<AppState>,
headers: HeaderMap,
pubky: PubkyHost,
path: OriginalUri,
params: ListQueryParams,
) -> Result<impl IntoResponse> {
let public_key = pubky.public_key().clone();
let path = path.0.path().to_string();
if path.ends_with('/') {
return list(state, &public_key, &path, params);
}
let (entry_tx, entry_rx) = flume::bounded::<Option<Entry>>(1);
let (chunks_tx, chunks_rx) = flume::unbounded::<std::result::Result<Vec<u8>, heed::Error>>();
tokio::task::spawn_blocking(move || -> anyhow::Result<()> {
let rtxn = state.db.env.read_txn()?;
let option = state.db.get_entry(&rtxn, &public_key, &path)?;
if let Some(entry) = option {
let iter = entry.read_content(&state.db, &rtxn)?;
entry_tx.send(Some(entry))?;
for next in iter {
chunks_tx.send(next.map(|b| b.to_vec()))?;
}
};
entry_tx.send(None)?;
Ok(())
});
get_entry(
headers,
entry_rx.recv_async().await?,
Some(Body::from_stream(chunks_rx.into_stream())),
)
}
pub fn list(
state: AppState,
public_key: &PublicKey,
path: &str,
params: ListQueryParams,
) -> Result<Response<Body>> {
let txn = state.db.env.read_txn()?;
let path = format!("{public_key}{path}");
if !state.db.contains_directory(&txn, &path)? {
return Err(Error::new(
StatusCode::NOT_FOUND,
"Directory Not Found".into(),
));
}
// Handle listing
let vec = state.db.list(
&txn,
&path,
params.reverse,
params.limit,
params.cursor,
params.shallow,
)?;
Ok(Response::builder()
.status(StatusCode::OK)
.header(header::CONTENT_TYPE, "text/plain")
.body(Body::from(vec.join("\n")))?)
}
pub fn get_entry(
headers: HeaderMap,
entry: Option<Entry>,
body: Option<Body>,
) -> Result<Response<Body>> {
if let Some(entry) = entry {
// TODO: Enable seek API (range requests)
// TODO: Gzip? or brotli?
let mut response = HeaderMap::from(&entry).into_response();
// Handle IF_MODIFIED_SINCE
if let Some(condition_http_date) = headers
.get(header::IF_MODIFIED_SINCE)
.and_then(|h| h.to_str().ok())
.and_then(|s| HttpDate::from_str(s).ok())
{
let entry_http_date: HttpDate = entry.timestamp().to_owned().into();
if condition_http_date >= entry_http_date {
*response.status_mut() = StatusCode::NOT_MODIFIED;
}
};
// Handle IF_NONE_MATCH
if let Some(str) = headers
.get(header::IF_NONE_MATCH)
.and_then(|h| h.to_str().ok())
{
let etag = format!("\"{}\"", entry.content_hash());
if str
.trim()
.split(',')
.collect::<Vec<_>>()
.contains(&etag.as_str())
{
*response.status_mut() = StatusCode::NOT_MODIFIED;
};
}
if let Some(body) = body {
*response.body_mut() = body;
};
Ok(response)
} else {
Err(Error::with_status(StatusCode::NOT_FOUND))?
}
}
impl From<&Entry> for HeaderMap {
fn from(entry: &Entry) -> Self {
let mut headers = HeaderMap::new();
headers.insert(header::CONTENT_LENGTH, entry.content_length().into());
headers.insert(
header::LAST_MODIFIED,
HeaderValue::from_str(&entry.timestamp().format_http_date())
.expect("http date is valid header value"),
);
headers.insert(
header::CONTENT_TYPE,
// TODO: when setting content type from user input, we should validate it as a HeaderValue
entry
.content_type()
.try_into()
.or(HeaderValue::from_str(""))
.expect("valid header value"),
);
headers.insert(
header::ETAG,
format!("\"{}\"", entry.content_hash())
.try_into()
.expect("hex string is valid"),
);
headers
}
}
#[cfg(test)]
mod tests {
use axum::{
body::Body,
http::{header, Method, Request, StatusCode},
};
use pkarr::Keypair;
use crate::core::HomeserverCore;
#[tokio::test]
async fn if_last_modified() {
let mut server = HomeserverCore::test().unwrap();
let keypair = Keypair::random();
let public_key = keypair.public_key();
let cookie = server.create_root_user(&keypair).await.unwrap().to_string();
let data = vec![1_u8, 2, 3, 4, 5];
let response = server
.call(
Request::builder()
.header("host", public_key.to_string())
.uri("/pub/foo")
.method(Method::PUT)
.header(header::COOKIE, cookie)
.body(Body::from(data))
.unwrap(),
)
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let response = server
.call(
Request::builder()
.header("host", public_key.to_string())
.uri("/pub/foo")
.method(Method::GET)
.body(Body::empty())
.unwrap(),
)
.await
.unwrap();
let response = server
.call(
Request::builder()
.header("host", public_key.to_string())
.uri("/pub/foo")
.method(Method::GET)
.header(
header::IF_MODIFIED_SINCE,
response.headers().get(header::LAST_MODIFIED).unwrap(),
)
.body(Body::empty())
.unwrap(),
)
.await
.unwrap();
assert_eq!(response.status(), StatusCode::NOT_MODIFIED);
}
#[tokio::test]
async fn if_none_match() {
let mut server = HomeserverCore::test().unwrap();
let keypair = Keypair::random();
let public_key = keypair.public_key();
let cookie = server.create_root_user(&keypair).await.unwrap().to_string();
let data = vec![1_u8, 2, 3, 4, 5];
let response = server
.call(
Request::builder()
.uri("/pub/foo")
.header("host", public_key.to_string())
.method(Method::PUT)
.header(header::COOKIE, cookie)
.body(Body::from(data))
.unwrap(),
)
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
let response = server
.call(
Request::builder()
.uri("/pub/foo")
.header("host", public_key.to_string())
.method(Method::GET)
.body(Body::empty())
.unwrap(),
)
.await
.unwrap();
let response = server
.call(
Request::builder()
.uri("/pub/foo")
.header("host", public_key.to_string())
.method(Method::GET)
.header(
header::IF_NONE_MATCH,
response.headers().get(header::ETAG).unwrap(),
)
.body(Body::empty())
.unwrap(),
)
.await
.unwrap();
assert_eq!(response.status(), StatusCode::NOT_MODIFIED);
}
}

View File

@@ -0,0 +1,38 @@
use axum::{extract::State, http::StatusCode, response::IntoResponse};
use tower_cookies::Cookies;
use crate::core::{
error::{Error, Result},
extractors::PubkyHost,
layers::authz::session_secret_from_cookies,
AppState,
};
pub async fn session(
State(state): State<AppState>,
cookies: Cookies,
pubky: PubkyHost,
) -> Result<impl IntoResponse> {
if let Some(secret) = session_secret_from_cookies(&cookies, pubky.public_key()) {
if let Some(session) = state.db.get_session(&secret)? {
// TODO: add content-type
return Ok(session.serialize());
};
}
Err(Error::with_status(StatusCode::NOT_FOUND))
}
pub async fn signout(
State(mut state): State<AppState>,
cookies: Cookies,
pubky: PubkyHost,
) -> Result<impl IntoResponse> {
// TODO: Set expired cookie to delete the cookie on client side.
if let Some(secret) = session_secret_from_cookies(&cookies, pubky.public_key()) {
state.db.delete_session(&secret)?;
}
// Idempotent Success Response (200 OK)
Ok(())
}

View File

@@ -0,0 +1,57 @@
use std::io::Write;
use futures_util::stream::StreamExt;
use axum::{
body::Body,
extract::{OriginalUri, State},
http::StatusCode,
response::IntoResponse,
};
use crate::core::{
error::{Error, Result},
extractors::PubkyHost,
AppState,
};
pub async fn delete(
State(mut state): State<AppState>,
pubky: PubkyHost,
path: OriginalUri,
) -> Result<impl IntoResponse> {
let public_key = pubky.public_key().clone();
// TODO: should we wrap this with `tokio::task::spawn_blocking` in case it takes too long?
let deleted = state.db.delete_entry(&public_key, path.0.path())?;
if !deleted {
// TODO: if the path ends with `/` return a `CONFLICT` error?
return Err(Error::with_status(StatusCode::NOT_FOUND));
};
Ok(())
}
pub async fn put(
State(mut state): State<AppState>,
pubky: PubkyHost,
path: OriginalUri,
body: Body,
) -> Result<impl IntoResponse> {
let public_key = pubky.public_key().clone();
let mut entry_writer = state.db.write_entry(&public_key, path.0.path())?;
let mut stream = body.into_data_stream();
while let Some(next) = stream.next().await {
let chunk = next?;
entry_writer.write_all(&chunk)?;
}
let _entry = entry_writer.commit()?;
// TODO: return relevant headers, like Etag?
Ok(())
}

View File

@@ -1,63 +0,0 @@
use std::{fs, path::PathBuf};
use heed::{Env, EnvOpenOptions};
mod migrations;
pub mod tables;
use crate::config::Config;
use tables::{Tables, TABLES_COUNT};
#[derive(Debug, Clone)]
pub struct DB {
pub(crate) env: Env,
pub(crate) tables: Tables,
pub(crate) config: Config,
pub(crate) buffers_dir: PathBuf,
pub(crate) max_chunk_size: usize,
}
impl DB {
pub fn open(config: Config) -> anyhow::Result<Self> {
let buffers_dir = config.storage().clone().join("buffers");
// Cleanup buffers.
let _ = fs::remove_dir(&buffers_dir);
fs::create_dir_all(&buffers_dir)?;
let env = unsafe {
EnvOpenOptions::new()
.max_dbs(TABLES_COUNT)
.map_size(config.db_map_size())
.open(config.storage())
}?;
let tables = migrations::run(&env)?;
let db = DB {
env,
tables,
config,
buffers_dir,
max_chunk_size: max_chunk_size(),
};
Ok(db)
}
}
/// calculate optimal chunk size:
/// - https://lmdb.readthedocs.io/en/release/#storage-efficiency-limits
/// - https://github.com/lmdbjava/benchmarks/blob/master/results/20160710/README.md#test-2-determine-24816-kb-byte-values
fn max_chunk_size() -> usize {
let page_size = unsafe { libc::sysconf(libc::_SC_PAGESIZE) as usize };
// - 16 bytes Header per page (LMDB)
// - Each page has to contain 2 records
// - 8 bytes per record (LMDB) (imperically, it seems to be 10 not 8)
// - 12 bytes key:
// - timestamp : 8 bytes
// - chunk index: 4 bytes
((page_size - 16) / 2) - (8 + 2) - 12
}

View File

@@ -1,51 +0,0 @@
use heed::{
types::{Bytes, Str},
Database,
};
use pkarr::PublicKey;
use pubky_common::session::Session;
use tower_cookies::Cookies;
use crate::database::DB;
/// session secret => Session.
pub type SessionsTable = Database<Str, Bytes>;
pub const SESSIONS_TABLE: &str = "sessions";
impl DB {
pub fn get_session(
&mut self,
cookies: Cookies,
public_key: &PublicKey,
) -> anyhow::Result<Option<Session>> {
if let Some(bytes) = self.get_session_bytes(cookies, public_key)? {
return Ok(Some(Session::deserialize(&bytes)?));
};
Ok(None)
}
pub fn get_session_bytes(
&mut self,
cookies: Cookies,
public_key: &PublicKey,
) -> anyhow::Result<Option<Vec<u8>>> {
if let Some(cookie) = cookies.get(&public_key.to_string()) {
let rtxn = self.env.read_txn()?;
let sessions: SessionsTable = self
.env
.open_database(&rtxn, Some(SESSIONS_TABLE))?
.expect("Session table already created");
let session = sessions.get(&rtxn, cookie.value())?.map(|s| s.to_vec());
rtxn.commit()?;
return Ok(session);
};
Ok(None)
}
}

View File

@@ -0,0 +1,91 @@
//! Http server around the HomeserverCore
use std::{
net::{SocketAddr, TcpListener},
sync::Arc,
};
use anyhow::Result;
use axum_server::{
tls_rustls::{RustlsAcceptor, RustlsConfig},
Handle,
};
use futures_util::TryFutureExt;
use crate::core::HomeserverCore;
#[derive(Debug)]
pub struct HttpServers {
/// Handle for the HTTP server
pub(crate) http_handle: Handle,
/// Handle for the HTTPS server using Pkarr TLS
pub(crate) https_handle: Handle,
http_address: SocketAddr,
https_address: SocketAddr,
}
impl HttpServers {
pub async fn start(core: &HomeserverCore) -> Result<Self> {
let http_listener =
TcpListener::bind(SocketAddr::from(([0, 0, 0, 0], core.config().io.http_port)))?;
let http_address = http_listener.local_addr()?;
let http_handle = Handle::new();
tokio::spawn(
axum_server::from_tcp(http_listener)
.handle(http_handle.clone())
.serve(
core.router
.clone()
.into_make_service_with_connect_info::<SocketAddr>(),
)
.map_err(|error| tracing::error!(?error, "Homeserver http server error")),
);
let https_listener = TcpListener::bind(SocketAddr::from((
[0, 0, 0, 0],
core.config().io.https_port,
)))?;
let https_address = https_listener.local_addr()?;
let https_handle = Handle::new();
tokio::spawn(
axum_server::from_tcp(https_listener)
.acceptor(RustlsAcceptor::new(RustlsConfig::from_config(Arc::new(
core.keypair().to_rpk_rustls_server_config(),
))))
.handle(https_handle.clone())
.serve(
core.router
.clone()
.into_make_service_with_connect_info::<SocketAddr>(),
)
.map_err(|error| tracing::error!(?error, "Homeserver https server error")),
);
Ok(Self {
http_handle,
https_handle,
http_address,
https_address,
})
}
pub fn http_address(&self) -> SocketAddr {
self.http_address
}
pub fn https_address(&self) -> SocketAddr {
self.https_address
}
/// Shutdown all HTTP servers.
pub fn shutdown(&self) {
self.http_handle.shutdown();
self.https_handle.shutdown();
}
}

View File

@@ -0,0 +1,169 @@
use std::path::PathBuf;
use ::pkarr::{Keypair, PublicKey};
use anyhow::Result;
use http::HttpServers;
use pkarr::PkarrServer;
use tracing::info;
use crate::{Config, HomeserverCore};
mod http;
mod pkarr;
#[derive(Debug, Default)]
pub struct HomeserverBuilder(Config);
impl HomeserverBuilder {
pub fn testnet(mut self) -> Self {
self.0.testnet = true;
self
}
/// Configure the Homeserver's keypair
pub fn keypair(mut self, keypair: Keypair) -> Self {
self.0.keypair = keypair;
self
}
/// Configure the Mainline DHT bootstrap nodes. Useful for testnet configurations.
pub fn bootstrap(mut self, bootstrap: Vec<String>) -> Self {
self.0.bootstrap = Some(bootstrap);
self
}
/// Configure the storage path of the Homeserver
pub fn storage(mut self, storage: PathBuf) -> Self {
self.0.storage = storage;
self
}
/// Start running a Homeserver
///
/// # Safety
/// Homeserver uses LMDB, [opening][heed::EnvOpenOptions::open] which is marked unsafe,
/// because the possible Undefined Behavior (UB) if the lock file is broken.
pub async unsafe fn start(self) -> Result<Homeserver> {
Homeserver::start(self.0).await
}
}
#[derive(Debug)]
/// Homeserver [Core][HomeserverCore] + I/O (http server and pkarr publishing).
pub struct Homeserver {
http_servers: HttpServers,
core: HomeserverCore,
}
impl Homeserver {
pub fn builder() -> HomeserverBuilder {
HomeserverBuilder::default()
}
/// Start running a Homeserver
///
/// # Safety
/// Homeserver uses LMDB, [opening][heed::EnvOpenOptions::open] which is marked unsafe,
/// because the possible Undefined Behavior (UB) if the lock file is broken.
pub async unsafe fn start(config: Config) -> Result<Self> {
tracing::debug!(?config, "Starting homeserver with configurations");
let core = unsafe { HomeserverCore::new(&config)? };
let http_servers = HttpServers::start(&core).await?;
info!(
"Homeserver listening on http://localhost:{}",
http_servers.http_address().port()
);
info!("Publishing Pkarr packet..");
let pkarr_server = PkarrServer::new(
&config,
http_servers.https_address().port(),
http_servers.http_address().port(),
)?;
pkarr_server.publish_server_packet().await?;
info!("Homeserver listening on https://{}", core.public_key());
Ok(Self { http_servers, core })
}
/// Start a homeserver in a Testnet mode.
///
/// - Homeserver address is hardcoded to `8pinxxgqs41n4aididenw5apqp1urfmzdztr8jt4abrkdn435ewo`
/// - Run a pkarr Relay on port [15411](pubky_common::constants::testnet_ports::PKARR_RELAY)
/// - Use a temporary storage for the both homeserver and relay
/// - Publish http port on a [reserved service parameter key](pubky_common::constants::reserved_param_keys::HTTP_PORT)
/// - Run an HTTP relay on port [15412](pubky_common::constants::testnet_ports::HTTP_RELAY)
///
/// # Safety
/// See [Self::start]
pub async unsafe fn start_testnet() -> Result<Self> {
let testnet = mainline::Testnet::new(10)?;
testnet.leak();
let storage =
std::env::temp_dir().join(pubky_common::timestamp::Timestamp::now().to_string());
let pkarr_relay = unsafe {
let mut config = pkarr_relay::Config {
http_port: pubky_common::constants::testnet_ports::PKARR_RELAY,
cache_path: Some(storage.join("pkarr-relay")),
rate_limiter: None,
..Default::default()
};
config.pkarr.bootstrap(&testnet.bootstrap);
pkarr_relay::Relay::run(config).await?
};
let http_relay = http_relay::HttpRelay::builder()
.http_port(pubky_common::constants::testnet_ports::HTTP_RELAY)
.build()
.await?;
tracing::info!(http_relay=?http_relay.local_link_url().as_str(), "Running http relay in Testnet mode");
tracing::info!(relay_address=?pkarr_relay.relay_address(), bootstrap=? testnet.bootstrap,"Running pkarr relay in Testnet mode");
unsafe {
Homeserver::builder()
.testnet()
.keypair(Keypair::from_secret_key(&[0; 32]))
.bootstrap(testnet.bootstrap)
.storage(storage.join("pubky-homeserver"))
.start()
.await
}
}
/// Unit tests version of [Homeserver::start], using mainline Testnet, and a temporary storage.
pub async fn start_test(testnet: &mainline::Testnet) -> Result<Self> {
unsafe { Homeserver::start(Config::test(testnet)).await }
}
// === Getters ===
pub fn public_key(&self) -> PublicKey {
self.core.public_key()
}
/// Return the `https://<server public key>` url
pub fn url(&self) -> url::Url {
url::Url::parse(&format!("https://{}", self.public_key())).expect("valid url")
}
// === Public Methods ===
/// Send a shutdown signal to all open resources
pub fn shutdown(&self) {
self.http_servers.shutdown();
}
}

View File

@@ -0,0 +1,102 @@
//! Pkarr related task
use anyhow::Result;
use pkarr::{dns::rdata::SVCB, SignedPacket};
use crate::Config;
pub struct PkarrServer {
client: pkarr::Client,
signed_packet: SignedPacket,
}
impl PkarrServer {
pub fn new(config: &Config, https_port: u16, http_port: u16) -> Result<Self> {
let mut builder = pkarr::Client::builder();
// TODO: should we enable relays in homeservers for udp restricted environments?
builder.no_relays();
if let Some(bootstrap) = &config.bootstrap {
builder.bootstrap(bootstrap);
}
if let Some(request_timeout) = config.dht_request_timeout {
builder.request_timeout(request_timeout);
}
let client = builder.build()?;
let signed_packet = create_signed_packet(config, https_port, http_port)?;
Ok(Self {
client,
signed_packet,
})
}
pub async fn publish_server_packet(&self) -> anyhow::Result<()> {
// TODO: warn if packet is not most recent, which means the
// user is publishing a Packet from somewhere else.
self.client.publish(&self.signed_packet, None).await?;
Ok(())
}
}
pub fn create_signed_packet(
config: &Config,
https_port: u16,
http_port: u16,
) -> Result<SignedPacket> {
// TODO: Try to resolve first before publishing.
let mut signed_packet_builder = SignedPacket::builder();
let mut svcb = SVCB::new(0, ".".try_into()?);
// Set the public Ip or the loclahost
signed_packet_builder = signed_packet_builder.address(
".".try_into().unwrap(),
config
.io
.public_addr
.map(|addr| addr.ip())
.unwrap_or("127.0.0.1".parse().expect("localhost is valid ip")),
60 * 60,
);
// Set the public port or the local https_port
svcb.set_port(
config
.io
.public_addr
.map(|addr| addr.port())
.unwrap_or(https_port),
);
signed_packet_builder = signed_packet_builder.https(".".try_into().unwrap(), svcb, 60 * 60);
// Set low priority https record for legacy browsers support
if config.testnet {
let mut svcb = SVCB::new(10, ".".try_into()?);
let http_port_be_bytes = http_port.to_be_bytes();
svcb.set_param(
pubky_common::constants::reserved_param_keys::HTTP_PORT,
&http_port_be_bytes,
)?;
svcb.target = "localhost".try_into().expect("localhost is valid dns name");
signed_packet_builder = signed_packet_builder.https(".".try_into().unwrap(), svcb, 60 * 60)
} else if let Some(ref domain) = config.io.domain {
let mut svcb = SVCB::new(10, ".".try_into()?);
svcb.target = domain.as_str().try_into()?;
signed_packet_builder = signed_packet_builder.https(".".try_into().unwrap(), svcb, 60 * 60);
}
Ok(signed_packet_builder.build(&config.keypair)?)
}

View File

@@ -1,9 +1,6 @@
pub mod config;
mod database;
mod error;
mod extractors;
mod pkarr;
mod routes;
mod server;
mod core;
mod io;
pub use server::Homeserver;
pub use core::Config;
pub use core::HomeserverCore;
pub use io::Homeserver;

View File

@@ -1,7 +1,7 @@
use std::path::PathBuf;
use anyhow::Result;
use pubky_homeserver::{config::Config, Homeserver};
use pubky_homeserver::{Config, Homeserver};
use clap::Parser;
@@ -31,16 +31,21 @@ async fn main() -> Result<()> {
)
.init();
let server = Homeserver::start(if args.testnet {
Config::testnet()
} else if let Some(config_path) = args.config {
Config::load(config_path).await?
} else {
Config::default()
})
.await?;
let server = unsafe {
if args.testnet {
Homeserver::start_testnet().await?
} else if let Some(config_path) = args.config {
Homeserver::start(Config::load(config_path).await?).await?
} else {
Homeserver::builder().start().await?
}
};
server.run_until_done().await?;
tokio::signal::ctrl_c().await?;
tracing::info!("Shutting down Homeserver");
server.shutdown();
Ok(())
}

View File

@@ -1,46 +0,0 @@
//! Pkarr related task
use pkarr::{
dns::{rdata::SVCB, Packet},
Keypair, PkarrClientAsync, SignedPacket,
};
pub(crate) async fn publish_server_packet(
pkarr_client: &PkarrClientAsync,
keypair: &Keypair,
domain: &str,
port: u16,
) -> anyhow::Result<()> {
// TODO: Try to resolve first before publishing.
let mut packet = Packet::new_reply(0);
let mut svcb = SVCB::new(0, domain.try_into()?);
// Publishing port only for localhost domain,
// assuming any other domain will point to a reverse proxy
// at the conventional ports.
if domain == "localhost" {
svcb.priority = 1;
svcb.set_port(port);
// TODO: Add more parameteres like the signer key!
// svcb.set_param(key, value)
};
// TODO: announce A/AAAA records as well for Noise connections?
// Or maybe Iroh's magic socket
packet.answers.push(pkarr::dns::ResourceRecord::new(
"@".try_into().unwrap(),
pkarr::dns::CLASS::IN,
60 * 60,
pkarr::dns::rdata::RData::SVCB(svcb),
));
let signed_packet = SignedPacket::from_packet(keypair, &packet)?;
pkarr_client.publish(&signed_packet).await?;
Ok(())
}

View File

@@ -1,44 +0,0 @@
use axum::{
extract::DefaultBodyLimit,
routing::{delete, get, head, post, put},
Router,
};
use tower_cookies::CookieManagerLayer;
use tower_http::{cors::CorsLayer, trace::TraceLayer};
use crate::server::AppState;
use self::pkarr::pkarr_router;
mod auth;
mod feed;
mod pkarr;
mod public;
mod root;
fn base(state: AppState) -> Router {
Router::new()
.route("/", get(root::handler))
.route("/signup", post(auth::signup))
.route("/session", post(auth::signin))
.route("/:pubky/session", get(auth::session))
.route("/:pubky/session", delete(auth::signout))
.route("/:pubky/*path", put(public::put))
.route("/:pubky/*path", get(public::get))
.route("/:pubky/*path", head(public::head))
.route("/:pubky/*path", delete(public::delete))
.route("/events/", get(feed::feed))
.layer(CookieManagerLayer::new())
// TODO: revisit if we enable streaming big payloads
// TODO: maybe add to a separate router (drive router?).
.layer(DefaultBodyLimit::max(100 * 1024 * 1024))
.with_state(state)
}
pub fn create_app(state: AppState) -> Router {
base(state.clone())
// TODO: Only enable this for test environments?
.nest("/pkarr", pkarr_router(state))
.layer(CorsLayer::very_permissive())
.layer(TraceLayer::new_for_http())
}

View File

@@ -1,58 +0,0 @@
use axum::{
body::{Body, Bytes},
extract::State,
http::StatusCode,
response::IntoResponse,
routing::{get, put},
Router,
};
use futures_util::stream::StreamExt;
use pkarr::SignedPacket;
use crate::{
error::{Error, Result},
extractors::Pubky,
server::AppState,
};
/// Pkarr relay, helpful for testing.
///
/// For real productioin, you should use a [production ready
/// relay](https://github.com/pubky/pkarr/server).
pub fn pkarr_router(state: AppState) -> Router {
Router::new()
.route("/:pubky", put(pkarr_put))
.route("/:pubky", get(pkarr_get))
.with_state(state)
}
pub async fn pkarr_put(
State(state): State<AppState>,
pubky: Pubky,
body: Body,
) -> Result<impl IntoResponse> {
let mut bytes = Vec::with_capacity(1104);
let mut stream = body.into_data_stream();
while let Some(chunk) = stream.next().await {
bytes.extend_from_slice(&chunk?)
}
let public_key = pubky.public_key().to_owned();
let signed_packet = SignedPacket::from_relay_payload(&public_key, &Bytes::from(bytes))?;
state.pkarr_client.publish(&signed_packet).await?;
Ok(())
}
pub async fn pkarr_get(State(state): State<AppState>, pubky: Pubky) -> Result<impl IntoResponse> {
if let Some(signed_packet) = state.pkarr_client.resolve(pubky.public_key()).await? {
return Ok(signed_packet.to_relay_payload());
}
Err(Error::with_status(StatusCode::NOT_FOUND))
}

View File

@@ -1,380 +0,0 @@
use axum::{
body::Body,
debug_handler,
extract::State,
http::{header, HeaderMap, HeaderValue, Response, StatusCode},
response::IntoResponse,
};
use futures_util::stream::StreamExt;
use httpdate::HttpDate;
use pkarr::PublicKey;
use std::{io::Write, str::FromStr};
use tower_cookies::Cookies;
use crate::{
database::tables::entries::Entry,
error::{Error, Result},
extractors::{EntryPath, ListQueryParams, Pubky},
server::AppState,
};
pub async fn put(
State(mut state): State<AppState>,
pubky: Pubky,
path: EntryPath,
cookies: Cookies,
body: Body,
) -> Result<impl IntoResponse> {
let public_key = pubky.public_key().clone();
let path = path.as_str().to_string();
verify(&path)?;
authorize(&mut state, cookies, &public_key, &path)?;
let mut entry_writer = state.db.write_entry(&public_key, &path)?;
let mut stream = body.into_data_stream();
while let Some(next) = stream.next().await {
let chunk = next?;
entry_writer.write_all(&chunk)?;
}
let _entry = entry_writer.commit()?;
// TODO: return relevant headers, like Etag?
Ok(())
}
#[debug_handler]
pub async fn get(
State(state): State<AppState>,
headers: HeaderMap,
pubky: Pubky,
path: EntryPath,
params: ListQueryParams,
) -> Result<impl IntoResponse> {
verify(path.as_str())?;
let public_key = pubky.public_key().clone();
let path = path.as_str().to_string();
if path.ends_with('/') {
let txn = state.db.env.read_txn()?;
let path = format!("{public_key}/{path}");
if !state.db.contains_directory(&txn, &path)? {
return Err(Error::new(
StatusCode::NOT_FOUND,
"Directory Not Found".into(),
));
}
// Handle listing
let vec = state.db.list(
&txn,
&path,
params.reverse,
params.limit,
params.cursor,
params.shallow,
)?;
return Ok(Response::builder()
.status(StatusCode::OK)
.header(header::CONTENT_TYPE, "text/plain")
.body(Body::from(vec.join("\n")))?);
}
let (entry_tx, entry_rx) = flume::bounded::<Option<Entry>>(1);
let (chunks_tx, chunks_rx) = flume::unbounded::<std::result::Result<Vec<u8>, heed::Error>>();
tokio::task::spawn_blocking(move || -> anyhow::Result<()> {
let rtxn = state.db.env.read_txn()?;
let option = state.db.get_entry(&rtxn, &public_key, &path)?;
if let Some(entry) = option {
let iter = entry.read_content(&state.db, &rtxn)?;
entry_tx.send(Some(entry))?;
for next in iter {
chunks_tx.send(next.map(|b| b.to_vec()))?;
}
};
entry_tx.send(None)?;
Ok(())
});
get_entry(
headers,
entry_rx.recv_async().await?,
Some(Body::from_stream(chunks_rx.into_stream())),
)
}
pub async fn head(
State(state): State<AppState>,
headers: HeaderMap,
pubky: Pubky,
path: EntryPath,
) -> Result<impl IntoResponse> {
verify(path.as_str())?;
let rtxn = state.db.env.read_txn()?;
get_entry(
headers,
state
.db
.get_entry(&rtxn, pubky.public_key(), path.as_str())?,
None,
)
}
pub fn get_entry(
headers: HeaderMap,
entry: Option<Entry>,
body: Option<Body>,
) -> Result<Response<Body>> {
if let Some(entry) = entry {
// TODO: Enable seek API (range requests)
// TODO: Gzip? or brotli?
let mut response = HeaderMap::from(&entry).into_response();
// Handle IF_MODIFIED_SINCE
if let Some(condition_http_date) = headers
.get(header::IF_MODIFIED_SINCE)
.and_then(|h| h.to_str().ok())
.and_then(|s| HttpDate::from_str(s).ok())
{
let entry_http_date: HttpDate = entry.timestamp().to_owned().into();
if condition_http_date >= entry_http_date {
*response.status_mut() = StatusCode::NOT_MODIFIED;
}
};
// Handle IF_NONE_MATCH
if let Some(str) = headers
.get(header::IF_NONE_MATCH)
.and_then(|h| h.to_str().ok())
{
let etag = format!("\"{}\"", entry.content_hash());
if str
.trim()
.split(',')
.collect::<Vec<_>>()
.contains(&etag.as_str())
{
*response.status_mut() = StatusCode::NOT_MODIFIED;
};
}
if let Some(body) = body {
*response.body_mut() = body;
};
Ok(response)
} else {
Err(Error::with_status(StatusCode::NOT_FOUND))?
}
}
pub async fn delete(
State(mut state): State<AppState>,
pubky: Pubky,
path: EntryPath,
cookies: Cookies,
) -> Result<impl IntoResponse> {
let public_key = pubky.public_key().clone();
let path = path.as_str();
authorize(&mut state, cookies, &public_key, path)?;
verify(path)?;
// TODO: should we wrap this with `tokio::task::spawn_blocking` in case it takes too long?
let deleted = state.db.delete_entry(&public_key, path)?;
if !deleted {
// TODO: if the path ends with `/` return a `CONFLICT` error?
return Err(Error::with_status(StatusCode::NOT_FOUND));
};
Ok(())
}
/// Authorize write (PUT or DELETE) for Public paths.
fn authorize(
state: &mut AppState,
cookies: Cookies,
public_key: &PublicKey,
path: &str,
) -> Result<()> {
// TODO: can we move this logic to the extractor or a layer
// to perform this validation?
let session = state
.db
.get_session(cookies, public_key)?
.ok_or(Error::with_status(StatusCode::UNAUTHORIZED))?;
if session.pubky() == public_key
&& session.capabilities().iter().any(|cap| {
path.starts_with(&cap.scope[1..])
&& cap
.actions
.contains(&pubky_common::capabilities::Action::Write)
})
{
return Ok(());
}
Err(Error::with_status(StatusCode::FORBIDDEN))
}
fn verify(path: &str) -> Result<()> {
if !path.starts_with("pub/") {
return Err(Error::new(
StatusCode::FORBIDDEN,
"Writing to directories other than '/pub/' is forbidden".into(),
));
}
// TODO: should we forbid paths ending with `/`?
Ok(())
}
impl From<&Entry> for HeaderMap {
fn from(entry: &Entry) -> Self {
let mut headers = HeaderMap::new();
headers.insert(header::CONTENT_LENGTH, entry.content_length().into());
headers.insert(
header::LAST_MODIFIED,
HeaderValue::from_str(&entry.timestamp().format_http_date())
.expect("http date is valid header value"),
);
headers.insert(
header::CONTENT_TYPE,
// TODO: when setting content type from user input, we should validate it as a HeaderValue
entry
.content_type()
.try_into()
.or(HeaderValue::from_str(""))
.expect("valid header value"),
);
headers.insert(
header::ETAG,
format!("\"{}\"", entry.content_hash())
.try_into()
.expect("hex string is valid"),
);
headers
}
}
#[cfg(test)]
mod tests {
use axum::http::header;
use pkarr::{mainline::Testnet, Keypair};
use reqwest::{self, Method, StatusCode};
use crate::Homeserver;
#[tokio::test]
async fn if_last_modified() -> anyhow::Result<()> {
let testnet = Testnet::new(3);
let mut server = Homeserver::start_test(&testnet).await?;
let public_key = Keypair::random().public_key();
let data = &[1, 2, 3, 4, 5];
server
.database_mut()
.write_entry(&public_key, "pub/foo")?
.update(data)?
.commit()?;
let client = reqwest::Client::builder().build()?;
let url = format!("http://localhost:{}/{public_key}/pub/foo", server.port());
let response = client.request(Method::GET, &url).send().await?;
let response = client
.request(Method::GET, &url)
.header(
header::IF_MODIFIED_SINCE,
response.headers().get(header::LAST_MODIFIED).unwrap(),
)
.send()
.await?;
assert_eq!(response.status(), StatusCode::NOT_MODIFIED);
let response = client
.request(Method::HEAD, &url)
.header(
header::IF_MODIFIED_SINCE,
response.headers().get(header::LAST_MODIFIED).unwrap(),
)
.send()
.await?;
assert_eq!(response.status(), StatusCode::NOT_MODIFIED);
Ok(())
}
#[tokio::test]
async fn if_none_match() -> anyhow::Result<()> {
let testnet = Testnet::new(3);
let mut server = Homeserver::start_test(&testnet).await?;
let public_key = Keypair::random().public_key();
let data = &[1, 2, 3, 4, 5];
server
.database_mut()
.write_entry(&public_key, "pub/foo")?
.update(data)?
.commit()?;
let client = reqwest::Client::builder().build()?;
let url = format!("http://localhost:{}/{public_key}/pub/foo", server.port());
let response = client.request(Method::GET, &url).send().await?;
let response = client
.request(Method::GET, &url)
.header(
header::IF_NONE_MATCH,
response.headers().get(header::ETAG).unwrap(),
)
.send()
.await?;
assert_eq!(response.status(), StatusCode::NOT_MODIFIED);
let response = client
.request(Method::HEAD, &url)
.header(
header::IF_NONE_MATCH,
response.headers().get(header::ETAG).unwrap(),
)
.send()
.await?;
assert_eq!(response.status(), StatusCode::NOT_MODIFIED);
Ok(())
}
}

View File

@@ -1,174 +0,0 @@
use std::{future::IntoFuture, net::SocketAddr};
use anyhow::{Error, Result};
use pubky_common::auth::AuthVerifier;
use tokio::{net::TcpListener, signal, task::JoinSet};
use tracing::{debug, info, warn};
use pkarr::{
mainline::dht::{DhtSettings, Testnet},
PkarrClient, PkarrClientAsync, PublicKey, Settings,
};
use crate::{config::Config, database::DB, pkarr::publish_server_packet};
#[derive(Debug)]
pub struct Homeserver {
state: AppState,
tasks: JoinSet<std::io::Result<()>>,
}
#[derive(Clone, Debug)]
pub(crate) struct AppState {
pub(crate) verifier: AuthVerifier,
pub(crate) db: DB,
pub(crate) pkarr_client: PkarrClientAsync,
pub(crate) config: Config,
pub(crate) port: u16,
}
impl Homeserver {
pub async fn start(config: Config) -> Result<Self> {
debug!(?config);
let db = DB::open(config.clone())?;
let pkarr_client = PkarrClient::new(Settings {
dht: DhtSettings {
bootstrap: config.bootstsrap(),
request_timeout: config.dht_request_timeout(),
..Default::default()
},
..Default::default()
})?
.as_async();
let mut tasks = JoinSet::new();
let listener = TcpListener::bind(SocketAddr::from(([0, 0, 0, 0], config.port()))).await?;
let port = listener.local_addr()?.port();
let state = AppState {
verifier: AuthVerifier::default(),
db,
pkarr_client,
config: config.clone(),
port,
};
let app = crate::routes::create_app(state.clone());
// Spawn http server task
tasks.spawn(
axum::serve(
listener,
app.into_make_service_with_connect_info::<SocketAddr>(),
)
.with_graceful_shutdown(shutdown_signal())
.into_future(),
);
info!("Homeserver listening on http://localhost:{port}");
publish_server_packet(
&state.pkarr_client,
config.keypair(),
&state
.config
.domain()
.clone()
.unwrap_or("localhost".to_string()),
port,
)
.await?;
info!(
"Homeserver listening on pubky://{}",
config.keypair().public_key()
);
Ok(Self { tasks, state })
}
/// Test version of [Homeserver::start], using mainline Testnet, and a temporary storage.
pub async fn start_test(testnet: &Testnet) -> Result<Self> {
info!("Running testnet..");
Homeserver::start(Config::test(testnet)).await
}
// === Getters ===
pub fn port(&self) -> u16 {
self.state.port
}
pub fn public_key(&self) -> PublicKey {
self.state.config.keypair().public_key()
}
#[cfg(test)]
pub(crate) fn database_mut(&mut self) -> &mut DB {
&mut self.state.db
}
// === Public Methods ===
/// Shutdown the server and wait for all tasks to complete.
pub async fn shutdown(mut self) -> Result<()> {
self.tasks.abort_all();
self.run_until_done().await?;
Ok(())
}
/// Wait for all tasks to complete.
///
/// Runs forever unless tasks fail.
pub async fn run_until_done(mut self) -> Result<()> {
let mut final_res: Result<()> = Ok(());
while let Some(res) = self.tasks.join_next().await {
match res {
Ok(Ok(())) => {}
Err(err) if err.is_cancelled() => {}
Ok(Err(err)) => {
warn!(?err, "task failed");
final_res = Err(Error::from(err));
}
Err(err) => {
warn!(?err, "task panicked");
final_res = Err(err.into());
}
}
}
final_res
}
}
async fn shutdown_signal() {
let ctrl_c = async {
signal::ctrl_c()
.await
.expect("failed to install Ctrl+C handler");
};
#[cfg(unix)]
let terminate = async {
signal::unix::signal(signal::unix::SignalKind::terminate())
.expect("failed to install signal handler")
.recv()
.await;
};
#[cfg(not(unix))]
let terminate = std::future::pending::<()>();
fn graceful_shutdown() {
info!("Gracefully Shutting down..");
}
tokio::select! {
_ = ctrl_c => graceful_shutdown(),
_ = terminate => graceful_shutdown(),
}
}

View File

@@ -11,34 +11,51 @@ keywords = ["web", "dht", "dns", "decentralized", "identity"]
crate-type = ["cdylib", "rlib"]
[dependencies]
thiserror = "1.0.62"
wasm-bindgen = "0.2.92"
url = "2.5.2"
bytes = "^1.7.1"
thiserror = "2.0.6"
wasm-bindgen = "0.2.99"
url = "2.5.4"
bytes = "^1.9.0"
base64 = "0.22.1"
pkarr = { version = "2.2.1-alpha.2", features = ["serde", "async"] }
pkarr = { git = "https://github.com/Pubky/pkarr", package = "pkarr", features = ["full"] }
mainline = "5.0.0-rc.1"
pubky-common = { version = "0.1.0", path = "../pubky-common" }
cookie = "0.18.1"
tracing = "0.1.41"
cookie_store = { version = "0.21.1", default-features = false }
anyhow = "1.0.94"
# Native dependencies
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
reqwest = { version = "0.12.5", features = ["cookies", "rustls-tls"], default-features = false }
tokio = { version = "1.37.0", features = ["full"] }
reqwest = { version = "0.12.9", features = ["cookies", "rustls-tls"], default-features = false }
tokio = { version = "1.42.0", features = ["full"] }
# Wasm dependencies
[target.'cfg(target_arch = "wasm32")'.dependencies]
reqwest = { version = "0.12.5", default-features = false }
reqwest = { version = "0.12.9", default-features = false }
futures-lite = { version = "2.5.0", default-features = false }
wasm-bindgen = "0.2.99"
wasm-bindgen-futures = "0.4.49"
console_log = { version = "1.0.0", features = ["color"] }
log = "0.4.22"
js-sys = "0.3.69"
wasm-bindgen = "0.2.92"
wasm-bindgen-futures = "0.4.42"
js-sys = "0.3.76"
web-sys = "0.3.76"
[dev-dependencies]
anyhow = "1.0.94"
axum = "0.7.9"
axum-server = "0.7.1"
futures-util = "0.3.31"
http-relay = { path = "../http-relay" }
pubky-homeserver = { path = "../pubky-homeserver" }
tokio = "1.37.0"
[features]
tokio = "1.42.0"
[package.metadata.docs.rs]
all-features = true
[package.metadata.wasm-pack.profile.release]
wasm-opt = ['-g', '-O']
# [lints.clippy]
unwrap_used = "deny"

View File

@@ -8,7 +8,7 @@ Rust implementation implementation of [Pubky](https://github.com/pubky/pubky-cor
use pkarr::mainline::Testnet;
use pkarr::Keypair;
use pubky_homeserver::Homeserver;
use pubky::PubkyClient;
use pubky::Client;
#[tokio::main]
async fn main () {
@@ -16,10 +16,10 @@ async fn main () {
let testnet = Testnet::new(10);
let server = Homeserver::start_test(&testnet).await.unwrap();
let client = PubkyClient::test(&testnet);
let client = Client::test(&testnet);
// Uncomment the following line instead if you are not just testing.
// let client PubkyClient::builder().build();
// let client Client::new().unwrap();
// Generate a keypair
let keypair = Keypair::random();

1
pubky/clippy.toml Normal file
View File

@@ -0,0 +1 @@
allow-unwrap-in-tests = true

View File

@@ -1,6 +1,8 @@
index.cjs
browser.js
coverage
node_modules
package-lock.json
pubky*
pubky.d.ts
pubky_bg.wasm
nodejs/
index.js
index.cjs

View File

@@ -21,10 +21,10 @@ For Nodejs, you need Node v20 or later.
## Getting started
```js
import { PubkyClient, Keypair, PublicKey } from '../index.js'
import { Client, Keypair, PublicKey } from '../index.js'
// Initialize PubkyClient with Pkarr relay(s).
let client = new PubkyClient();
// Initialize Client with Pkarr relay(s).
let client = new Client();
// Generate a keypair
let keypair = Keypair.random();
@@ -42,31 +42,40 @@ let url = `pubky://${publicKey.z32()}/pub/example.com/arbitrary`;
// Verify that you are signed in.
const session = await client.session(publicKey)
const body = Buffer.from(JSON.stringify({ foo: 'bar' }))
// PUT public data, by authorized client
await client.put(url, body);
await client.fetch(url, {
method: "PUT",
body: JSON.stringify({foo: "bar"}),
credentials: "include"
});
// GET public data without signup or signin
{
const client = new PubkyClient();
const client = new Client();
let response = await client.get(url);
let response = await client.fetch(url);
}
// Delete public data, by authorized client
await client.delete(url);
await client.fetch(url, { method: "DELETE", credentials: "include "});
```
## API
### PubkyClient
### Client
#### constructor
```js
let client = new PubkyClient()
let client = new Client()
```
#### fetch
```js
let response = await client.fetch(url, opts);
```
Just like normal Fetch API, but it can handle `pubky://` urls and `http(s)://` urls with Pkarr domains.
#### signup
```js
await client.signup(keypair, homeserver)
@@ -127,27 +136,6 @@ let session = await client.session(publicKey)
- publicKey: An instance of [PublicKey](#publickey).
- Returns: A [Session](#session) object if signed in, or undefined if not.
#### put
```js
let response = await client.put(url, body);
```
- url: A string representing the Pubky URL.
- body: A Buffer containing the data to be stored.
### get
```js
let response = await client.get(url)
```
- url: A string representing the Pubky URL.
- Returns: A Uint8Array object containing the requested data, or `undefined` if `NOT_FOUND`.
### delete
```js
let response = await client.delete(url);
```
- url: A string representing the Pubky URL.
### list
```js
let response = await client.list(url, cursor, reverse, limit)
@@ -257,10 +245,10 @@ Run the local testnet server
npm run testnet
```
Use the logged addresses as inputs to `PubkyClient`
Use the logged addresses as inputs to `Client`
```js
import { PubkyClient } from '../index.js'
import { Client } from '../index.js'
const client = PubkyClient().testnet();
const client = Client().testnet();
```

View File

@@ -0,0 +1,4 @@
const makeFetchCookie = require("fetch-cookie").default;
let originalFetch = globalThis.fetch;
globalThis.fetch = makeFetchCookie(originalFetch);

View File

@@ -2,14 +2,14 @@
"name": "@synonymdev/pubky",
"type": "module",
"description": "Pubky client",
"version": "0.1.16",
"version": "0.4.0-rc.4",
"license": "MIT",
"repository": {
"type": "git",
"url": "https://github.com/pubky/pubky"
"url": "git+https://github.com/pubky/pubky-core.git"
},
"scripts": {
"testnet": "cargo run -p pubky_homeserver -- --testnet",
"testnet": "cargo run -p pubky-homeserver -- --testnet",
"test": "npm run test-nodejs && npm run test-browser",
"test-nodejs": "tape test/*.js -cov",
"test-browser": "browserify test/*.js -p esmify | npx tape-run",
@@ -18,12 +18,12 @@
},
"files": [
"index.cjs",
"browser.js",
"index.js",
"pubky.d.ts",
"pubky_bg.wasm"
],
"main": "index.cjs",
"browser": "browser.js",
"browser": "index.js",
"types": "pubky.d.ts",
"keywords": [
"web",
@@ -37,5 +37,8 @@
"esmify": "^2.1.1",
"tape": "^5.8.1",
"tape-run": "^11.0.0"
},
"dependencies": {
"fetch-cookie": "^3.0.1"
}
}

View File

@@ -1,16 +1,17 @@
import test from 'tape'
import { PubkyClient, Keypair, PublicKey } from '../index.cjs'
import { Client, Keypair, PublicKey } from '../index.cjs'
const Homeserver = PublicKey.from('8pinxxgqs41n4aididenw5apqp1urfmzdztr8jt4abrkdn435ewo')
const HOMESERVER_PUBLICKEY = PublicKey.from('8pinxxgqs41n4aididenw5apqp1urfmzdztr8jt4abrkdn435ewo')
const TESTNET_HTTP_RELAY = "http://localhost:15412/link";
test('auth', async (t) => {
const client = PubkyClient.testnet();
test('Auth: basic', async (t) => {
const client = Client.testnet();
const keypair = Keypair.random()
const publicKey = keypair.publicKey()
await client.signup(keypair, Homeserver)
await client.signup(keypair, HOMESERVER_PUBLICKEY )
const session = await client.session(publicKey)
t.ok(session, "signup")
@@ -30,15 +31,42 @@ test('auth', async (t) => {
}
})
test("3rd party signin", async (t) => {
test("Auth: multi-user (cookies)", async (t) => {
const client = Client.testnet();
const alice = Keypair.random()
const bob = Keypair.random()
await client.signup(alice, HOMESERVER_PUBLICKEY )
let session = await client.session(alice.publicKey())
t.ok(session, "signup")
{
await client.signup(bob, HOMESERVER_PUBLICKEY )
const session = await client.session(bob.publicKey())
t.ok(session, "signup")
}
session = await client.session(alice.publicKey());
t.is(session.pubky().z32(), alice.publicKey().z32(), "alice is still signed in")
await client.signout(bob.publicKey());
session = await client.session(alice.publicKey());
t.is(session.pubky().z32(), alice.publicKey().z32(), "alice is still signed in after signout of bob")
})
test("Auth: 3rd party signin", async (t) => {
let keypair = Keypair.random();
let pubky = keypair.publicKey().z32();
// Third party app side
let capabilities = "/pub/pubky.app/:rw,/pub/foo.bar/file:r";
let client = PubkyClient.testnet();
let client = Client.testnet();
let [pubkyauth_url, pubkyauthResponse] = client
.authRequest("https://httprelay.staging.pubky.app/link/", capabilities);
.authRequest(TESTNET_HTTP_RELAY, capabilities);
if (globalThis.document) {
// Skip `sendAuthToken` in browser
@@ -49,9 +77,9 @@ test("3rd party signin", async (t) => {
// Authenticator side
{
let client = PubkyClient.testnet();
let client = Client.testnet();
await client.signup(keypair, Homeserver);
await client.signup(keypair, HOMESERVER_PUBLICKEY);
await client.sendAuthToken(keypair, pubkyauth_url)
}

41
pubky/pkg/test/http.js Normal file
View File

@@ -0,0 +1,41 @@
import test from 'tape'
import { Client, Keypair, PublicKey } from '../index.cjs'
const TLD = '8pinxxgqs41n4aididenw5apqp1urfmzdztr8jt4abrkdn435ewo';
test("basic fetch", async (t) => {
let client = Client.testnet();
// Normal TLD
{
let response = await client.fetch(`https://relay.pkarr.org/`);
t.equal(response.status, 200);
}
// Pubky
let response = await client.fetch(`https://${TLD}/`);
t.equal(response.status, 200);
})
test("fetch failed", async (t) => {
let client = Client.testnet();
// Normal TLD
{
let response = await client.fetch(`https://nonexistent.domain/`).catch(e => e);
t.ok(response instanceof Error);
}
// Pubky
let response = await client.fetch(`https://1pinxxgqs41n4aididenw5apqp1urfmzdztr8jt4abrkdn435ew1/`).catch(e => e);
t.ok(response instanceof Error);
})

View File

@@ -1,125 +1,133 @@
import test from 'tape'
import { PubkyClient, Keypair, PublicKey } from '../index.cjs'
import { Client, Keypair, PublicKey, setLogLevel } from '../index.cjs'
const Homeserver = PublicKey.from('8pinxxgqs41n4aididenw5apqp1urfmzdztr8jt4abrkdn435ewo');
const HOMESERVER_PUBLICKEY = PublicKey.from('8pinxxgqs41n4aididenw5apqp1urfmzdztr8jt4abrkdn435ewo')
test('public: put/get', async (t) => {
const client = PubkyClient.testnet();
const client = Client.testnet();
const keypair = Keypair.random();
await client.signup(keypair, Homeserver);
await client.signup(keypair, HOMESERVER_PUBLICKEY);
const publicKey = keypair.publicKey();
let url = `pubky://${publicKey.z32()}/pub/example.com/arbitrary`;
const body = Buffer.from(JSON.stringify({ foo: 'bar' }))
const json = { foo: 'bar' }
// PUT public data, by authorized client
await client.put(url, body);
await client.fetch(url, {
method:"PUT",
body: JSON.stringify(json),
contentType: "json",
credentials: "include"
});
const otherClient = PubkyClient.testnet();
const otherClient = Client.testnet();
// GET public data without signup or signin
{
let response = await otherClient.get(url);
let response = await otherClient.fetch(url)
t.ok(Buffer.from(response).equals(body))
t.is(response.status, 200);
t.deepEquals(await response.json(), {foo: "bar"})
}
// DELETE public data, by authorized client
await client.delete(url);
await client.fetch(url, {
method:"DELETE",
credentials: "include"
});
// GET public data without signup or signin
{
let response = await otherClient.get(url);
let response = await otherClient.fetch(url);
t.notOk(response)
t.is(response.status, 404)
}
})
test("not found", async (t) => {
const client = PubkyClient.testnet();
const client = Client.testnet();
const keypair = Keypair.random();
await client.signup(keypair, Homeserver);
await client.signup(keypair, HOMESERVER_PUBLICKEY);
const publicKey = keypair.publicKey();
let url = `pubky://${publicKey.z32()}/pub/example.com/arbitrary`;
let result = await client.get(url).catch(e => e);
let result = await client.fetch(url);
t.notOk(result);
t.is(result.status, 404);
})
test("unauthorized", async (t) => {
const client = PubkyClient.testnet();
const client = Client.testnet();
const keypair = Keypair.random()
const publicKey = keypair.publicKey()
await client.signup(keypair, Homeserver)
await client.signup(keypair, HOMESERVER_PUBLICKEY)
const session = await client.session(publicKey)
t.ok(session, "signup")
await client.signout(publicKey)
const body = Buffer.from(JSON.stringify({ foo: 'bar' }))
let url = `pubky://${publicKey.z32()}/pub/example.com/arbitrary`;
// PUT public data, by authorized client
let result = await client.put(url, body).catch(e => e);
let response = await client.fetch(url, {
method: "PUT",
body: JSON.stringify({ foo: 'bar' }),
contentType: "json",
credentials: "include"
});
t.ok(result instanceof Error);
t.is(
result.message,
`HTTP status client error (401 Unauthorized) for url (http://localhost:15411/${publicKey.z32()}/pub/example.com/arbitrary)`
)
t.equals(response.status,401);
})
test("forbidden", async (t) => {
const client = PubkyClient.testnet();
const client = Client.testnet();
const keypair = Keypair.random()
const publicKey = keypair.publicKey()
await client.signup(keypair, Homeserver)
await client.signup(keypair, HOMESERVER_PUBLICKEY)
const session = await client.session(publicKey)
t.ok(session, "signup")
const body = Buffer.from(JSON.stringify({ foo: 'bar' }))
const body = (JSON.stringify({ foo: 'bar' }))
let url = `pubky://${publicKey.z32()}/priv/example.com/arbitrary`;
// PUT public data, by authorized client
let result = await client.put(url, body).catch(e => e);
let response = await client.fetch(url, {
method: "PUT",
body: JSON.stringify({ foo: 'bar' }),
credentials: "include"
});
t.ok(result instanceof Error);
t.is(
result.message,
`HTTP status client error (403 Forbidden) for url (http://localhost:15411/${publicKey.z32()}/priv/example.com/arbitrary)`
)
t.is(response.status, 403)
t.is(await response.text(), 'Writing to directories other than \'/pub/\' is forbidden')
})
test("list", async (t) => {
const client = PubkyClient.testnet();
const client = Client.testnet();
const keypair = Keypair.random()
const publicKey = keypair.publicKey()
const pubky = publicKey.z32()
await client.signup(keypair, Homeserver)
await client.signup(keypair, HOMESERVER_PUBLICKEY)
let urls = [
`pubky://${pubky}/pub/a.wrong/a.txt`,
@@ -132,7 +140,11 @@ test("list", async (t) => {
]
for (let url of urls) {
await client.put(url, Buffer.from(""));
await client.fetch(url, {
method: "PUT",
body:Buffer.from(""),
credentials: "include"
});
}
let url = `pubky://${pubky}/pub/example.com/`;
@@ -242,13 +254,13 @@ test("list", async (t) => {
})
test('list shallow', async (t) => {
const client = PubkyClient.testnet();
const client = Client.testnet();
const keypair = Keypair.random()
const publicKey = keypair.publicKey()
const pubky = publicKey.z32()
await client.signup(keypair, Homeserver)
await client.signup(keypair, HOMESERVER_PUBLICKEY)
let urls = [
`pubky://${pubky}/pub/a.com/a.txt`,
@@ -264,7 +276,11 @@ test('list shallow', async (t) => {
]
for (let url of urls) {
await client.put(url, Buffer.from(""));
await client.fetch(url, {
method: "PUT",
body: Buffer.from(""),
credentials: "include"
});
}
let url = `pubky://${pubky}/pub/`;

View File

@@ -54,7 +54,7 @@ const bytes = __toBinary(${JSON.stringify(await readFile(path.join(__dirname, `.
`,
);
await writeFile(path.join(__dirname, `../../pkg/browser.js`), patched + "\nglobalThis['pubky'] = imports");
await writeFile(path.join(__dirname, `../../pkg/index.js`), patched + "\nglobalThis['pubky'] = imports");
// Move outside of nodejs
@@ -64,3 +64,12 @@ await Promise.all([".js", ".d.ts", "_bg.wasm"].map(suffix =>
path.join(__dirname, `../../pkg/${suffix === '.js' ? "index.cjs" : (name + suffix)}`),
))
)
// Add index.cjs headers
const indexcjsPath = path.join(__dirname, `../../pkg/index.cjs`);
const headerContent = await readFile(path.join(__dirname, `../../pkg/node-header.cjs`), 'utf8');
const indexcjsContent = await readFile(indexcjsPath, 'utf8');
await writeFile(indexcjsPath, headerContent + '\n' + indexcjsContent, 'utf8')

View File

@@ -1,56 +0,0 @@
//! Main Crate Error
use pkarr::dns::SimpleDnsError;
// Alias Result to be the crate Result.
pub type Result<T, E = Error> = core::result::Result<T, E>;
#[derive(thiserror::Error, Debug)]
/// Pubky crate's common Error enum
pub enum Error {
/// For starter, to remove as code matures.
#[error("Generic error: {0}")]
Generic(String),
#[error("Could not resolve endpoint for {0}")]
ResolveEndpoint(String),
#[error("Could not convert the passed type into a Url")]
InvalidUrl,
// === Transparent ===
#[error(transparent)]
Dns(#[from] SimpleDnsError),
#[error(transparent)]
Pkarr(#[from] pkarr::Error),
#[error(transparent)]
Url(#[from] url::ParseError),
#[error(transparent)]
Reqwest(#[from] reqwest::Error),
#[error(transparent)]
Session(#[from] pubky_common::session::Error),
#[error(transparent)]
Crypto(#[from] pubky_common::crypto::Error),
#[error(transparent)]
RecoveryFile(#[from] pubky_common::recovery_file::Error),
#[error(transparent)]
AuthToken(#[from] pubky_common::auth::Error),
}
#[cfg(target_arch = "wasm32")]
use wasm_bindgen::JsValue;
#[cfg(target_arch = "wasm32")]
impl From<Error> for JsValue {
fn from(error: Error) -> JsValue {
let error_message = error.to_string();
js_sys::Error::new(&error_message).into()
}
}

View File

@@ -1,7 +1,6 @@
#![doc = include_str!("../README.md")]
//!
mod error;
mod shared;
#[cfg(not(target_arch = "wasm32"))]
@@ -9,32 +8,32 @@ mod native;
#[cfg(target_arch = "wasm32")]
mod wasm;
#[cfg(target_arch = "wasm32")]
use std::{
collections::HashSet,
sync::{Arc, RwLock},
};
use std::fmt::Debug;
use wasm_bindgen::prelude::*;
#[cfg(not(target_arch = "wasm32"))]
use ::pkarr::PkarrClientAsync;
pub use error::Error;
#[cfg(not(target_arch = "wasm32"))]
pub use crate::shared::list_builder::ListBuilder;
/// A client for Pubky homeserver API, as well as generic HTTP requests to Pubky urls.
#[derive(Debug, Clone)]
#[derive(Clone)]
#[wasm_bindgen]
pub struct PubkyClient {
pub struct Client {
http: reqwest::Client,
pkarr: pkarr::Client,
#[cfg(not(target_arch = "wasm32"))]
pub(crate) pkarr: PkarrClientAsync,
/// A cookie jar for nodejs fetch.
cookie_store: std::sync::Arc<native::CookieJar>,
#[cfg(not(target_arch = "wasm32"))]
icann_http: reqwest::Client,
#[cfg(target_arch = "wasm32")]
pub(crate) session_cookies: Arc<RwLock<HashSet<String>>>,
#[cfg(target_arch = "wasm32")]
pub(crate) pkarr_relays: Vec<String>,
testnet: bool,
}
impl Debug for Client {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Pubky Client").finish()
}
}

View File

@@ -1,262 +1,97 @@
use std::net::ToSocketAddrs;
use std::time::Duration;
use std::{sync::Arc, time::Duration};
use bytes::Bytes;
use pubky_common::{
capabilities::Capabilities,
recovery_file::{create_recovery_file, decrypt_recovery_file},
session::Session,
};
use reqwest::{RequestBuilder, Response};
use tokio::sync::oneshot;
use url::Url;
use mainline::Testnet;
use pkarr::{mainline::MutableItem, Keypair, PkarrClientAsync};
use crate::Client;
use ::pkarr::{mainline::dht::Testnet, PkarrClient, PublicKey, SignedPacket};
mod api;
mod cookies;
mod http;
use crate::{
error::{Error, Result},
shared::list_builder::ListBuilder,
PubkyClient,
};
pub(crate) use cookies::CookieJar;
static DEFAULT_USER_AGENT: &str = concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION"),);
#[derive(Debug, Default)]
pub struct PubkyClientBuilder {
pkarr_settings: pkarr::Settings,
pub struct ClientBuilder {
pkarr: pkarr::ClientBuilder,
}
impl PubkyClientBuilder {
/// Set Pkarr client [pkarr::Settings].
pub fn pkarr_settings(mut self, settings: pkarr::Settings) -> Self {
self.pkarr_settings = settings;
self
}
/// Use the bootstrap nodes of a testnet, as the bootstrap nodes and
/// resolvers in the internal Pkarr client.
impl ClientBuilder {
/// Sets the following:
/// - Pkarr client's DHT bootstrap nodes = `testnet` bootstrap nodes.
/// - Pkarr client's resolvers = `testnet` bootstrap nodes.
/// - Pkarr client's DHT request timeout = 500 milliseconds. (unless in CI, then it is left as default 2000)
pub fn testnet(mut self, testnet: &Testnet) -> Self {
self.pkarr_settings.dht.bootstrap = testnet.bootstrap.to_vec().into();
let bootstrap = testnet.bootstrap.clone();
self.pkarr_settings.resolvers = testnet
.bootstrap
.iter()
.flat_map(|resolver| resolver.to_socket_addrs())
.flatten()
.collect::<Vec<_>>()
.into();
self.pkarr.bootstrap(&bootstrap);
self
}
/// Set the request_timeout of the UDP socket in the Mainline DHT client in
/// the internal Pkarr client.
///
/// Useful to speed unit tests.
/// Defaults to 2 seconds.
pub fn dht_request_timeout(mut self, timeout: Duration) -> Self {
self.pkarr_settings.dht.request_timeout = timeout.into();
self
}
/// Build [PubkyClient]
pub fn build(self) -> PubkyClient {
PubkyClient {
http: reqwest::Client::builder()
.cookie_store(true)
.user_agent(DEFAULT_USER_AGENT)
.build()
.unwrap(),
pkarr: PkarrClient::new(self.pkarr_settings).unwrap().as_async(),
if std::env::var("CI").is_err() {
self.pkarr.request_timeout(Duration::from_millis(500));
}
self
}
/// Build [Client]
pub fn build(self) -> Result<Client, BuildError> {
let pkarr = self.pkarr.build()?;
let cookie_store = Arc::new(CookieJar::default());
// TODO: allow custom user agent, but force a Pubky user agent information
let user_agent = DEFAULT_USER_AGENT;
let http = reqwest::ClientBuilder::from(pkarr.clone())
// TODO: use persistent cookie jar
.cookie_provider(cookie_store.clone())
.user_agent(user_agent)
.build()
.expect("config expected to not error");
let icann_http = reqwest::ClientBuilder::new()
.cookie_provider(cookie_store.clone())
.user_agent(user_agent)
.build()
.expect("config expected to not error");
Ok(Client {
cookie_store,
http,
icann_http,
pkarr,
})
}
}
impl Default for PubkyClient {
fn default() -> Self {
PubkyClient::builder().build()
}
}
// === Public API ===
impl PubkyClient {
/// Returns a builder to edit settings before creating [PubkyClient].
pub fn builder() -> PubkyClientBuilder {
PubkyClientBuilder::default()
impl Client {
/// Returns a builder to edit settings before creating [Client].
pub fn builder() -> ClientBuilder {
ClientBuilder::default()
}
/// Create a client connected to the local network
/// with the bootstrapping node: `localhost:6881`
pub fn testnet() -> Self {
Self::test(&Testnet {
bootstrap: vec!["localhost:6881".to_string()],
nodes: vec![],
})
pub fn testnet() -> Result<Self, BuildError> {
Self::builder()
.testnet(&Testnet {
bootstrap: vec!["localhost:6881".to_string()],
nodes: vec![],
})
.build()
}
/// Creates a [PubkyClient] with:
/// - DHT bootstrap nodes set to the `testnet` bootstrap nodes.
/// - DHT request timout set to 500 milliseconds. (unless in CI, then it is left as default 2000)
///
/// For more control, you can use [PubkyClient::builder] testnet option.
pub fn test(testnet: &Testnet) -> PubkyClient {
let mut builder = PubkyClient::builder().testnet(testnet);
if std::env::var("CI").is_err() {
builder = builder.dht_request_timeout(Duration::from_millis(500));
}
builder.build()
}
// === Getters ===
/// Returns a reference to the internal [pkarr] Client.
pub fn pkarr(&self) -> &PkarrClientAsync {
&self.pkarr
}
// === Auth ===
/// Signup to a homeserver and update Pkarr accordingly.
///
/// The homeserver is a Pkarr domain name, where the TLD is a Pkarr public key
/// for example "pubky.o4dksfbqk85ogzdb5osziw6befigbuxmuxkuxq8434q89uj56uyy"
pub async fn signup(&self, keypair: &Keypair, homeserver: &PublicKey) -> Result<Session> {
self.inner_signup(keypair, homeserver).await
}
/// Check the current sesison for a given Pubky in its homeserver.
///
/// Returns [Session] or `None` (if recieved `404 NOT_FOUND`),
/// or [reqwest::Error] if the response has any other `>=400` status code.
pub async fn session(&self, pubky: &PublicKey) -> Result<Option<Session>> {
self.inner_session(pubky).await
}
/// Signout from a homeserver.
pub async fn signout(&self, pubky: &PublicKey) -> Result<()> {
self.inner_signout(pubky).await
}
/// Signin to a homeserver.
pub async fn signin(&self, keypair: &Keypair) -> Result<Session> {
self.inner_signin(keypair).await
}
// === Public data ===
/// Upload a small payload to a given path.
pub async fn put<T: TryInto<Url>>(&self, url: T, content: &[u8]) -> Result<()> {
self.inner_put(url, content).await
}
/// Download a small payload from a given path relative to a pubky author.
pub async fn get<T: TryInto<Url>>(&self, url: T) -> Result<Option<Bytes>> {
self.inner_get(url).await
}
/// Delete a file at a path relative to a pubky author.
pub async fn delete<T: TryInto<Url>>(&self, url: T) -> Result<()> {
self.inner_delete(url).await
}
/// Returns a [ListBuilder] to help pass options before calling [ListBuilder::send].
///
/// `url` sets the path you want to lest within.
pub fn list<T: TryInto<Url>>(&self, url: T) -> Result<ListBuilder> {
self.inner_list(url)
}
// === Helpers ===
/// Create a recovery file of the `keypair`, containing the secret key encrypted
/// using the `passphrase`.
pub fn create_recovery_file(keypair: &Keypair, passphrase: &str) -> Result<Vec<u8>> {
Ok(create_recovery_file(keypair, passphrase)?)
}
/// Recover a keypair from a recovery file by decrypting the secret key using `passphrase`.
pub fn decrypt_recovery_file(recovery_file: &[u8], passphrase: &str) -> Result<Keypair> {
Ok(decrypt_recovery_file(recovery_file, passphrase)?)
}
/// Return `pubkyauth://` url and wait for the incoming [pubky_common::auth::AuthToken]
/// verifying that AuthToken, and if capabilities were requested, signing in to
/// the Pubky's homeserver and returning the [Session] information.
pub fn auth_request(
&self,
relay: impl TryInto<Url>,
capabilities: &Capabilities,
) -> Result<(Url, tokio::sync::oneshot::Receiver<PublicKey>)> {
let mut relay: Url = relay
.try_into()
.map_err(|_| Error::Generic("Invalid relay Url".into()))?;
let (pubkyauth_url, client_secret) = self.create_auth_request(&mut relay, capabilities)?;
let (tx, rx) = oneshot::channel::<PublicKey>();
let this = self.clone();
tokio::spawn(async move {
let to_send = this
.subscribe_to_auth_response(relay, &client_secret)
.await?;
tx.send(to_send)
.map_err(|_| Error::Generic("Failed to send the session after signing in with token, since the receiver is dropped".into()))?;
Ok::<(), Error>(())
});
Ok((pubkyauth_url, rx))
}
/// Sign an [pubky_common::auth::AuthToken], encrypt it and send it to the
/// source of the pubkyauth request url.
pub async fn send_auth_token<T: TryInto<Url>>(
&self,
keypair: &Keypair,
pubkyauth_url: T,
) -> Result<()> {
let url: Url = pubkyauth_url.try_into().map_err(|_| Error::InvalidUrl)?;
self.inner_send_auth_token(keypair, url).await?;
Ok(())
#[cfg(test)]
/// Alias to `pubky::Client::builder().testnet(testnet).build().unwrap()`
pub(crate) fn test(testnet: &Testnet) -> Client {
Client::builder().testnet(testnet).build().unwrap()
}
}
// === Internals ===
impl PubkyClient {
// === Pkarr ===
pub(crate) async fn pkarr_resolve(
&self,
public_key: &PublicKey,
) -> Result<Option<SignedPacket>> {
Ok(self.pkarr.resolve(public_key).await?.or(self
.pkarr
.cache()
.get(&MutableItem::target_from_key(public_key.as_bytes(), &None))))
}
pub(crate) async fn pkarr_publish(&self, signed_packet: &SignedPacket) -> Result<()> {
Ok(self.pkarr.publish(signed_packet).await?)
}
// === HTTP ===
/// Make an HTTP(s) request to a URL with a Pkarr TLD
pub fn request(&self, method: reqwest::Method, url: Url) -> RequestBuilder {
self.http.request(method, url)
}
pub(crate) fn store_session(&self, _: &Response) {}
pub(crate) fn remove_session(&self, _: &PublicKey) {}
#[derive(Debug, thiserror::Error)]
pub enum BuildError {
#[error(transparent)]
/// Error building Pkarr client.
PkarrBuildError(#[from] pkarr::errors::BuildError),
}

View File

@@ -0,0 +1,74 @@
use pkarr::Keypair;
use pubky_common::session::Session;
use reqwest::IntoUrl;
use tokio::sync::oneshot;
use url::Url;
use pkarr::PublicKey;
use pubky_common::capabilities::Capabilities;
use anyhow::Result;
use crate::Client;
impl Client {
/// Signup to a homeserver and update Pkarr accordingly.
///
/// The homeserver is a Pkarr domain name, where the TLD is a Pkarr public key
/// for example "pubky.o4dksfbqk85ogzdb5osziw6befigbuxmuxkuxq8434q89uj56uyy"
pub async fn signup(&self, keypair: &Keypair, homeserver: &PublicKey) -> Result<Session> {
self.inner_signup(keypair, homeserver).await
}
/// Check the current sesison for a given Pubky in its homeserver.
///
/// Returns [Session] or `None` (if recieved `404 NOT_FOUND`),
/// or [reqwest::Error] if the response has any other `>=400` status code.
pub async fn session(&self, pubky: &PublicKey) -> Result<Option<Session>> {
self.inner_session(pubky).await
}
/// Signout from a homeserver.
pub async fn signout(&self, pubky: &PublicKey) -> Result<()> {
self.inner_signout(pubky).await
}
/// Signin to a homeserver.
pub async fn signin(&self, keypair: &Keypair) -> Result<Session> {
self.inner_signin(keypair).await
}
/// Return `pubkyauth://` url and wait for the incoming [AuthToken]
/// verifying that AuthToken, and if capabilities were requested, signing in to
/// the Pubky's homeserver and returning the [Session] information.
pub fn auth_request<T: IntoUrl>(
&self,
relay: T,
capabilities: &Capabilities,
) -> Result<(Url, tokio::sync::oneshot::Receiver<Result<PublicKey>>)> {
let mut relay: Url = relay.into_url()?;
let (pubkyauth_url, client_secret) = self.create_auth_request(&mut relay, capabilities)?;
let (tx, rx) = oneshot::channel::<Result<PublicKey>>();
let this = self.clone();
tokio::spawn(async move {
tx.send(this.subscribe_to_auth_response(relay, &client_secret).await)
});
Ok((pubkyauth_url, rx))
}
/// Sign an [pubky_common::auth::AuthToken], encrypt it and send it to the
/// source of the pubkyauth request url.
pub async fn send_auth_token<T: IntoUrl>(
&self,
keypair: &Keypair,
pubkyauth_url: T,
) -> Result<()> {
self.inner_send_auth_token(keypair, pubkyauth_url).await
}
}

View File

@@ -0,0 +1,5 @@
pub mod recovery_file;
// TODO: put the Homeserver API behind a feature flag
pub mod auth;
pub mod public;

View File

@@ -0,0 +1,14 @@
use reqwest::IntoUrl;
use anyhow::Result;
use crate::{shared::list_builder::ListBuilder, Client};
impl Client {
/// Returns a [ListBuilder] to help pass options before calling [ListBuilder::send].
///
/// `url` sets the path you want to lest within.
pub fn list<T: IntoUrl>(&self, url: T) -> Result<ListBuilder> {
self.inner_list(url)
}
}

View File

@@ -0,0 +1,21 @@
use pubky_common::{
crypto::Keypair,
recovery_file::{create_recovery_file, decrypt_recovery_file},
};
use anyhow::Result;
use crate::Client;
impl Client {
/// Create a recovery file of the `keypair`, containing the secret key encrypted
/// using the `passphrase`.
pub fn create_recovery_file(keypair: &Keypair, passphrase: &str) -> Result<Vec<u8>> {
Ok(create_recovery_file(keypair, passphrase)?)
}
/// Recover a keypair from a recovery file by decrypting the secret key using `passphrase`.
pub fn decrypt_recovery_file(recovery_file: &[u8], passphrase: &str) -> Result<Keypair> {
Ok(decrypt_recovery_file(recovery_file, passphrase)?)
}
}

View File

@@ -0,0 +1,83 @@
use std::{collections::HashMap, sync::RwLock};
use pkarr::PublicKey;
use reqwest::{cookie::CookieStore, header::HeaderValue, Response};
#[derive(Default)]
pub struct CookieJar {
pubky_sessions: RwLock<HashMap<String, String>>,
normal_jar: RwLock<cookie_store::CookieStore>,
}
impl CookieJar {
pub(crate) fn store_session_after_signup(&self, response: &Response, pubky: &PublicKey) {
for (header_name, header_value) in response.headers() {
let cookie_name = &pubky.to_string();
if header_name == "set-cookie"
&& header_value.as_ref().starts_with(cookie_name.as_bytes())
{
if let Ok(Ok(cookie)) =
std::str::from_utf8(header_value.as_bytes()).map(cookie::Cookie::parse)
{
if cookie.name() == cookie_name {
let domain = format!("_pubky.{pubky}");
tracing::debug!(?cookie, "Storing coookie after signup");
self.pubky_sessions
.write()
.unwrap()
.insert(domain, cookie.value().to_string());
}
};
}
}
}
pub(crate) fn delete_session_after_signout(&self, pubky: &PublicKey) {
self.pubky_sessions
.write()
.unwrap()
.remove(&format!("_pubky.{pubky}"));
}
}
impl CookieStore for CookieJar {
fn set_cookies(&self, cookie_headers: &mut dyn Iterator<Item = &HeaderValue>, url: &url::Url) {
let iter = cookie_headers.filter_map(|val| {
val.to_str()
.ok()
.and_then(|s| cookie::Cookie::parse(s.to_owned()).ok())
});
self.normal_jar
.write()
.unwrap()
.store_response_cookies(iter, url);
}
fn cookies(&self, url: &url::Url) -> Option<HeaderValue> {
let s = self
.normal_jar
.read()
.unwrap()
.get_request_values(url)
.map(|(name, value)| format!("{name}={value}"))
.collect::<Vec<_>>()
.join("; ");
if s.is_empty() {
let host = url.host_str().unwrap_or("");
if let Ok(public_key) = PublicKey::try_from(host) {
let cookie_name = public_key.to_string();
return self.pubky_sessions.read().unwrap().get(host).map(|secret| {
HeaderValue::try_from(format!("{cookie_name}={secret}")).unwrap()
});
}
}
HeaderValue::from_maybe_shared(bytes::Bytes::from(s)).ok()
}
}

172
pubky/src/native/http.rs Normal file
View File

@@ -0,0 +1,172 @@
//! HTTP methods that support `https://` with Pkarr domains, and `pubky://` URLs
use pkarr::PublicKey;
use reqwest::{IntoUrl, Method, RequestBuilder};
use crate::Client;
impl Client {
/// Start building a `Request` with the `Method` and `Url`.
///
/// Returns a `RequestBuilder`, which will allow setting headers and
/// the request body before sending.
///
/// Differs from [reqwest::Client::request], in that it can make requests to:
/// 1. HTTPs URLs with with a [pkarr::PublicKey] as Top Level Domain, by resolving
/// corresponding endpoints, and verifying TLS certificates accordingly.
/// (example: `https://o4dksfbqk85ogzdb5osziw6befigbuxmuxkuxq8434q89uj56uyy`)
/// 2. Pubky URLs like `pubky://o4dksfbqk85ogzdb5osziw6befigbuxmuxkuxq8434q89uj56uyy`
/// by converting the url into `https://_pubky.o4dksfbqk85ogzdb5osziw6befigbuxmuxkuxq8434q89uj56uyy`
///
/// # Errors
///
/// This method fails whenever the supplied `Url` cannot be parsed.
pub fn request<U: IntoUrl>(&self, method: Method, url: U) -> RequestBuilder {
let url = url.as_str();
if url.starts_with("pubky://") {
let url = format!("https://_pubky.{}", url.split_at(8).1);
return self.http.request(method, url);
} else if url.starts_with("https://") && PublicKey::try_from(url).is_err() {
return self.icann_http.request(method, url);
}
self.http.request(method, url)
}
/// Convenience method to make a `GET` request to a URL.
///
/// Differs from [reqwest::Client::get], in that it can make requests to:
/// 1. HTTP(s) URLs with with a [pkarr::PublicKey] as Top Level Domain, by resolving
/// corresponding endpoints, and verifying TLS certificates accordingly.
/// (example: `https://o4dksfbqk85ogzdb5osziw6befigbuxmuxkuxq8434q89uj56uyy`)
/// 2. Pubky URLs like `pubky://o4dksfbqk85ogzdb5osziw6befigbuxmuxkuxq8434q89uj56uyy`
/// by converting the url into `https://_pubky.o4dksfbqk85ogzdb5osziw6befigbuxmuxkuxq8434q89uj56uyy`
///
/// # Errors
///
/// This method fails whenever the supplied `Url` cannot be parsed.
pub fn get<U: IntoUrl>(&self, url: U) -> RequestBuilder {
self.request(Method::GET, url)
}
/// Convenience method to make a `POST` request to a URL.
///
/// # Errors
///
/// This method fails whenever the supplied `Url` cannot be parsed.
pub fn post<U: IntoUrl>(&self, url: U) -> RequestBuilder {
self.request(Method::POST, url)
}
/// Convenience method to make a `PUT` request to a URL.
///
/// Differs from [reqwest::Client::put], in that it can make requests to:
/// 1. HTTP(s) URLs with with a [pkarr::PublicKey] as Top Level Domain, by resolving
/// corresponding endpoints, and verifying TLS certificates accordingly.
/// (example: `https://o4dksfbqk85ogzdb5osziw6befigbuxmuxkuxq8434q89uj56uyy`)
/// 2. Pubky URLs like `pubky://o4dksfbqk85ogzdb5osziw6befigbuxmuxkuxq8434q89uj56uyy`
/// by converting the url into `https://_pubky.o4dksfbqk85ogzdb5osziw6befigbuxmuxkuxq8434q89uj56uyy`
///
/// # Errors
///
/// This method fails whenever the supplied `Url` cannot be parsed.
pub fn put<U: IntoUrl>(&self, url: U) -> RequestBuilder {
self.request(Method::PUT, url)
}
/// Convenience method to make a `PATCH` request to a URL.
///
/// Differs from [reqwest::Client::patch], in that it can make requests to:
/// 1. HTTP(s) URLs with with a [pkarr::PublicKey] as Top Level Domain, by resolving
/// corresponding endpoints, and verifying TLS certificates accordingly.
/// (example: `https://o4dksfbqk85ogzdb5osziw6befigbuxmuxkuxq8434q89uj56uyy`)
/// 2. Pubky URLs like `pubky://o4dksfbqk85ogzdb5osziw6befigbuxmuxkuxq8434q89uj56uyy`
/// by converting the url into `https://_pubky.o4dksfbqk85ogzdb5osziw6befigbuxmuxkuxq8434q89uj56uyy`
///
/// # Errors
///
/// This method fails whenever the supplied `Url` cannot be parsed.
pub fn patch<U: IntoUrl>(&self, url: U) -> RequestBuilder {
self.request(Method::PATCH, url)
}
/// Convenience method to make a `DELETE` request to a URL.
///
/// Differs from [reqwest::Client::delete], in that it can make requests to:
/// 1. HTTP(s) URLs with with a [pkarr::PublicKey] as Top Level Domain, by resolving
/// corresponding endpoints, and verifying TLS certificates accordingly.
/// (example: `https://o4dksfbqk85ogzdb5osziw6befigbuxmuxkuxq8434q89uj56uyy`)
/// 2. Pubky URLs like `pubky://o4dksfbqk85ogzdb5osziw6befigbuxmuxkuxq8434q89uj56uyy`
/// by converting the url into `https://_pubky.o4dksfbqk85ogzdb5osziw6befigbuxmuxkuxq8434q89uj56uyy`
///
/// # Errors
///
/// This method fails whenever the supplied `Url` cannot be parsed.
pub fn delete<U: IntoUrl>(&self, url: U) -> RequestBuilder {
self.request(Method::DELETE, url)
}
/// Convenience method to make a `HEAD` request to a URL.
///
/// Differs from [reqwest::Client::head], in that it can make requests to:
/// 1. HTTP(s) URLs with with a [pkarr::PublicKey] as Top Level Domain, by resolving
/// corresponding endpoints, and verifying TLS certificates accordingly.
/// (example: `https://o4dksfbqk85ogzdb5osziw6befigbuxmuxkuxq8434q89uj56uyy`)
/// 2. Pubky URLs like `pubky://o4dksfbqk85ogzdb5osziw6befigbuxmuxkuxq8434q89uj56uyy`
/// by converting the url into `https://_pubky.o4dksfbqk85ogzdb5osziw6befigbuxmuxkuxq8434q89uj56uyy`
///
/// # Errors
///
/// This method fails whenever the supplied `Url` cannot be parsed.
pub fn head<U: IntoUrl>(&self, url: U) -> RequestBuilder {
self.request(Method::HEAD, url)
}
// === Private Methods ===
pub(crate) async fn inner_request<T: IntoUrl>(&self, method: Method, url: T) -> RequestBuilder {
self.request(method, url)
}
}
#[cfg(test)]
mod tests {
use mainline::Testnet;
use pubky_homeserver::Homeserver;
use crate::Client;
#[tokio::test]
async fn http_get_pubky() {
let testnet = Testnet::new(10).unwrap();
let homeserver = Homeserver::start_test(&testnet).await.unwrap();
let client = Client::test(&testnet);
let response = client
.get(format!("https://{}/", homeserver.public_key()))
.send()
.await
.unwrap();
assert_eq!(response.status(), 200)
}
#[tokio::test]
async fn http_get_icann() {
let testnet = Testnet::new(10).unwrap();
let client = Client::test(&testnet);
let response = client
.request(Default::default(), "https://example.com/")
.send()
.await
.unwrap();
assert_eq!(response.status(), 200);
}
}

View File

@@ -1,7 +1,7 @@
use std::collections::HashMap;
use base64::{alphabet::URL_SAFE, engine::general_purpose::NO_PAD, Engine};
use reqwest::{Method, StatusCode};
use reqwest::{IntoUrl, Method, StatusCode};
use url::Url;
use pkarr::{Keypair, PublicKey};
@@ -12,14 +12,11 @@ use pubky_common::{
session::Session,
};
use crate::{
error::{Error, Result},
PubkyClient,
};
use anyhow::Result;
use super::pkarr::Endpoint;
use crate::{handle_http_error, Client};
impl PubkyClient {
impl Client {
/// Signup to a homeserver and update Pkarr accordingly.
///
/// The homeserver is a Pkarr domain name, where the TLD is a Pkarr public key
@@ -29,23 +26,22 @@ impl PubkyClient {
keypair: &Keypair,
homeserver: &PublicKey,
) -> Result<Session> {
let homeserver = homeserver.to_string();
let Endpoint { mut url, .. } = self.resolve_endpoint(&homeserver).await?;
url.set_path("/signup");
let body = AuthToken::sign(keypair, vec![Capability::root()]).serialize();
let response = self
.request(Method::POST, url.clone())
.body(body)
.inner_request(Method::POST, format!("https://{}/signup", homeserver))
.await
.body(AuthToken::sign(keypair, vec![Capability::root()]).serialize())
.send()
.await?;
self.store_session(&response);
handle_http_error!(response);
self.publish_pubky_homeserver(keypair, &homeserver).await?;
self.publish_homeserver(keypair, &homeserver.to_string())
.await?;
// Store the cookie to the correct URL.
#[cfg(not(target_arch = "wasm32"))]
self.cookie_store
.store_session_after_signup(&response, &keypair.public_key());
let bytes = response.bytes().await?;
@@ -57,34 +53,35 @@ impl PubkyClient {
/// Returns None if not signed in, or [reqwest::Error]
/// if the response has any other `>=404` status code.
pub(crate) async fn inner_session(&self, pubky: &PublicKey) -> Result<Option<Session>> {
let Endpoint { mut url, .. } = self.resolve_pubky_homeserver(pubky).await?;
let response = self
.inner_request(Method::GET, format!("pubky://{}/session", pubky))
.await
.send()
.await?;
url.set_path(&format!("/{}/session", pubky));
let res = self.request(Method::GET, url).send().await?;
if res.status() == StatusCode::NOT_FOUND {
if response.status() == StatusCode::NOT_FOUND {
return Ok(None);
}
if !res.status().is_success() {
res.error_for_status_ref()?;
};
handle_http_error!(response);
let bytes = res.bytes().await?;
let bytes = response.bytes().await?;
Ok(Some(Session::deserialize(&bytes)?))
}
/// Signout from a homeserver.
pub(crate) async fn inner_signout(&self, pubky: &PublicKey) -> Result<()> {
let Endpoint { mut url, .. } = self.resolve_pubky_homeserver(pubky).await?;
let response = self
.inner_request(Method::DELETE, format!("pubky://{}/session", pubky))
.await
.send()
.await?;
url.set_path(&format!("/{}/session", pubky));
handle_http_error!(response);
self.request(Method::DELETE, url).send().await?;
self.remove_session(pubky);
#[cfg(not(target_arch = "wasm32"))]
self.cookie_store.delete_session_after_signout(pubky);
Ok(())
}
@@ -96,11 +93,18 @@ impl PubkyClient {
self.signin_with_authtoken(&token).await
}
pub(crate) async fn inner_send_auth_token(
pub(crate) async fn inner_send_auth_token<T: IntoUrl>(
&self,
keypair: &Keypair,
pubkyauth_url: Url,
pubkyauth_url: T,
) -> Result<()> {
let pubkyauth_url = Url::parse(
pubkyauth_url
.as_str()
.replace("pubkyauth_url", "http")
.as_str(),
)?;
let query_params: HashMap<String, String> =
pubkyauth_url.query_pairs().into_owned().collect();
@@ -136,36 +140,34 @@ impl PubkyClient {
let engine = base64::engine::GeneralPurpose::new(&URL_SAFE, NO_PAD);
let mut callback = relay.clone();
let mut path_segments = callback.path_segments_mut().unwrap();
let mut callback_url = relay.clone();
let mut path_segments = callback_url.path_segments_mut().unwrap();
path_segments.pop_if_empty();
let channel_id = engine.encode(hash(&client_secret).as_bytes());
path_segments.push(&channel_id);
drop(path_segments);
let response = self
.request(Method::POST, callback)
.inner_request(Method::POST, callback_url)
.await
.body(encrypted_token)
.send()
.await?;
response.error_for_status()?;
handle_http_error!(response);
Ok(())
}
pub(crate) async fn signin_with_authtoken(&self, token: &AuthToken) -> Result<Session> {
let mut url = Url::parse(&format!("https://{}/session", token.pubky()))?;
self.resolve_url(&mut url).await?;
let response = self
.request(Method::POST, url)
.inner_request(Method::POST, format!("pubky://{}/session", token.pubky()))
.await
.body(token.serialize())
.send()
.await?;
self.store_session(&response);
handle_http_error!(response);
let bytes = response.bytes().await?;
@@ -188,7 +190,8 @@ impl PubkyClient {
let mut segments = relay
.path_segments_mut()
.map_err(|_| Error::Generic("Invalid relay".into()))?;
.map_err(|_| anyhow::anyhow!("Invalid relay"))?;
// remove trailing slash if any.
segments.pop_if_empty();
let channel_id = &engine.encode(hash(&client_secret).as_bytes());
@@ -203,9 +206,11 @@ impl PubkyClient {
relay: Url,
client_secret: &[u8; 32],
) -> Result<PublicKey> {
let response = self.http.request(Method::GET, relay).send().await?;
// TODO: use a clearnet client.
let response = reqwest::get(relay).await?;
let encrypted_token = response.bytes().await?;
let token_bytes = decrypt(&encrypted_token, client_secret)?;
let token_bytes = decrypt(&encrypted_token, client_secret)
.map_err(|e| anyhow::anyhow!("Got invalid token: {e}"))?;
let token = AuthToken::verify(&token_bytes)?;
if !token.capabilities().is_empty() {
@@ -218,20 +223,21 @@ impl PubkyClient {
#[cfg(test)]
mod tests {
use crate::*;
use pkarr::{mainline::Testnet, Keypair};
use http_relay::HttpRelay;
use mainline::Testnet;
use pkarr::Keypair;
use pubky_common::capabilities::{Capabilities, Capability};
use pubky_homeserver::Homeserver;
use reqwest::StatusCode;
#[tokio::test]
async fn basic_authn() {
let testnet = Testnet::new(10);
let testnet = Testnet::new(10).unwrap();
let server = Homeserver::start_test(&testnet).await.unwrap();
let client = PubkyClient::test(&testnet);
let client = Client::test(&testnet);
let keypair = Keypair::random();
@@ -269,23 +275,27 @@ mod tests {
#[tokio::test]
async fn authz() {
let testnet = Testnet::new(10);
let testnet = Testnet::new(10).unwrap();
let server = Homeserver::start_test(&testnet).await.unwrap();
let http_relay = HttpRelay::builder().build().await.unwrap();
let http_relay_url = http_relay.local_link_url();
let keypair = Keypair::random();
let pubky = keypair.public_key();
// Third party app side
let capabilities: Capabilities =
"/pub/pubky.app/:rw,/pub/foo.bar/file:r".try_into().unwrap();
let client = PubkyClient::test(&testnet);
let (pubkyauth_url, pubkyauth_response) = client
.auth_request("https://httprelay.staging.pubky.app/link/", &capabilities)
.unwrap();
let client = Client::test(&testnet);
let (pubkyauth_url, pubkyauth_response) =
client.auth_request(http_relay_url, &capabilities).unwrap();
// Authenticator side
{
let client = PubkyClient::test(&testnet);
let client = Client::test(&testnet);
client.signup(&keypair, &server.public_key()).await.unwrap();
@@ -295,37 +305,86 @@ mod tests {
.unwrap();
}
let public_key = pubkyauth_response.await.unwrap();
let public_key = pubkyauth_response
.await
.expect("sender to not be dropped")
.unwrap();
assert_eq!(&public_key, &pubky);
let session = client.session(&pubky).await.unwrap().unwrap();
assert_eq!(session.capabilities(), &capabilities.0);
// Test access control enforcement
client
.put(format!("pubky://{pubky}/pub/pubky.app/foo").as_str(), &[])
.put(format!("pubky://{pubky}/pub/pubky.app/foo"))
.body(vec![])
.send()
.await
.unwrap()
.error_for_status()
.unwrap();
assert_eq!(
client
.put(format!("pubky://{pubky}/pub/pubky.app").as_str(), &[])
.put(format!("pubky://{pubky}/pub/pubky.app"))
.body(vec![])
.send()
.await
.map_err(|e| match e {
crate::Error::Reqwest(e) => e.status(),
_ => None,
}),
Err(Some(StatusCode::FORBIDDEN))
.unwrap()
.status(),
StatusCode::FORBIDDEN
);
assert_eq!(
client
.put(format!("pubky://{pubky}/pub/foo.bar/file").as_str(), &[])
.put(format!("pubky://{pubky}/pub/foo.bar/file"))
.body(vec![])
.send()
.await
.map_err(|e| match e {
crate::Error::Reqwest(e) => e.status(),
_ => None,
}),
Err(Some(StatusCode::FORBIDDEN))
.unwrap()
.status(),
StatusCode::FORBIDDEN
);
}
#[tokio::test]
async fn multiple_users() {
let testnet = Testnet::new(10).unwrap();
let server = Homeserver::start_test(&testnet).await.unwrap();
let client = Client::test(&testnet);
let first_keypair = Keypair::random();
let second_keypair = Keypair::random();
client
.signup(&first_keypair, &server.public_key())
.await
.unwrap();
client
.signup(&second_keypair, &server.public_key())
.await
.unwrap();
let session = client
.session(&first_keypair.public_key())
.await
.unwrap()
.unwrap();
assert_eq!(session.pubky(), &first_keypair.public_key());
assert!(session.capabilities().contains(&Capability::root()));
let session = client
.session(&second_keypair.public_key())
.await
.unwrap()
.unwrap();
assert_eq!(session.pubky(), &second_keypair.public_key());
assert!(session.capabilities().contains(&Capability::root()));
}
}

View File

@@ -1,25 +1,26 @@
use reqwest::Method;
use url::Url;
use reqwest::{IntoUrl, Method};
use crate::{error::Result, PubkyClient};
use anyhow::Result;
use crate::{handle_http_error, Client};
/// Helper struct to edit Pubky homeserver's list API options before sending them.
#[derive(Debug)]
pub struct ListBuilder<'a> {
url: Url,
url: String,
reverse: bool,
limit: Option<u16>,
cursor: Option<&'a str>,
client: &'a PubkyClient,
client: &'a Client,
shallow: bool,
}
impl<'a> ListBuilder<'a> {
/// Create a new List request builder
pub(crate) fn new(client: &'a PubkyClient, url: Url) -> Self {
pub(crate) fn new<T: IntoUrl>(client: &'a Client, url: T) -> Self {
Self {
client,
url,
url: url.as_str().to_string(),
limit: None,
cursor: None,
reverse: false,
@@ -59,7 +60,7 @@ impl<'a> ListBuilder<'a> {
/// respecting [ListBuilder::reverse], [ListBuilder::limit] and [ListBuilder::cursor]
/// options.
pub async fn send(self) -> Result<Vec<String>> {
let mut url = self.client.pubky_to_http(self.url).await?;
let mut url = url::Url::parse(&self.url)?;
if !url.path().ends_with('/') {
let path = url.path().to_string();
@@ -91,9 +92,14 @@ impl<'a> ListBuilder<'a> {
drop(query);
let response = self.client.request(Method::GET, url).send().await?;
let response = self
.client
.inner_request(Method::GET, url)
.await
.send()
.await?;
response.error_for_status_ref()?;
handle_http_error!(response);
// TODO: bail on too large files.
let bytes = response.bytes().await?;

View File

@@ -2,3 +2,15 @@ pub mod auth;
pub mod list_builder;
pub mod pkarr;
pub mod public;
#[macro_export]
macro_rules! handle_http_error {
($res:expr) => {
if let Err(status) = $res.error_for_status_ref() {
return match $res.text().await {
Ok(text) => Err(anyhow::anyhow!("{status}. Error message: {text}")),
_ => Err(anyhow::anyhow!("{status}")),
};
}
};
}

View File

@@ -1,336 +1,81 @@
use url::Url;
use pkarr::{dns::rdata::SVCB, Keypair, SignedPacket};
use pkarr::{
dns::{rdata::SVCB, Packet},
Keypair, PublicKey, SignedPacket,
};
use anyhow::Result;
use crate::{
error::{Error, Result},
PubkyClient,
};
use crate::Client;
const MAX_ENDPOINT_RESOLUTION_RECURSION: u8 = 3;
impl Client {
/// Publish the HTTPS record for `_pubky.<public_key>`.
pub(crate) async fn publish_homeserver(&self, keypair: &Keypair, host: &str) -> Result<()> {
// TODO: Before making public, consider the effect on other records and other mirrors
impl PubkyClient {
/// Publish the SVCB record for `_pubky.<public_key>`.
pub(crate) async fn publish_pubky_homeserver(
&self,
keypair: &Keypair,
host: &str,
) -> Result<()> {
let existing = self.pkarr_resolve(&keypair.public_key()).await?;
let existing = self.pkarr.resolve_most_recent(&keypair.public_key()).await;
let mut packet = Packet::new_reply(0);
let mut signed_packet_builder = SignedPacket::builder();
if let Some(existing) = existing {
for answer in existing.packet().answers.iter().cloned() {
if let Some(ref existing) = existing {
for answer in existing.resource_records("_pubky") {
if !answer.name.to_string().starts_with("_pubky") {
packet.answers.push(answer.into_owned())
signed_packet_builder = signed_packet_builder.record(answer.to_owned());
}
}
}
let svcb = SVCB::new(0, host.try_into()?);
packet.answers.push(pkarr::dns::ResourceRecord::new(
"_pubky".try_into().unwrap(),
pkarr::dns::CLASS::IN,
60 * 60,
pkarr::dns::rdata::RData::HTTPS(svcb.into()),
));
let signed_packet = SignedPacket::builder()
.https("_pubky".try_into().unwrap(), svcb, 60 * 60)
.sign(keypair)?;
let signed_packet = SignedPacket::from_packet(keypair, &packet)?;
self.pkarr_publish(&signed_packet).await?;
self.pkarr
.publish(&signed_packet, existing.map(|s| s.timestamp()))
.await?;
Ok(())
}
/// Resolve the homeserver for a pubky.
pub(crate) async fn resolve_pubky_homeserver(&self, pubky: &PublicKey) -> Result<Endpoint> {
let target = format!("_pubky.{pubky}");
self.resolve_endpoint(&target)
.await
.map_err(|_| Error::Generic("Could not resolve homeserver".to_string()))
}
/// Resolve a service's public_key and "non-pkarr url" from a Pubky domain
///
/// "non-pkarr" url is any URL where the hostname isn't a 52 z-base32 character,
/// usually an IPv4, IPv6 or ICANN domain, but could also be any other unknown hostname.
///
/// Recursively resolve SVCB and HTTPS endpoints, with [MAX_ENDPOINT_RESOLUTION_RECURSION] limit.
pub(crate) async fn resolve_endpoint(&self, target: &str) -> Result<Endpoint> {
let original_target = target;
// TODO: cache the result of this function?
let mut target = target.to_string();
let mut endpoint_public_key = None;
let mut origin = target.clone();
let mut step = 0;
// PublicKey is very good at extracting the Pkarr TLD from a string.
while let Ok(public_key) = PublicKey::try_from(target.clone()) {
if step >= MAX_ENDPOINT_RESOLUTION_RECURSION {
break;
};
step += 1;
if let Some(signed_packet) = self
.pkarr_resolve(&public_key)
.await
.map_err(|_| Error::ResolveEndpoint(original_target.into()))?
{
// Choose most prior SVCB record
let svcb = signed_packet.resource_records(&target).fold(
None,
|prev: Option<SVCB>, answer| {
if let Some(svcb) = match &answer.rdata {
pkarr::dns::rdata::RData::SVCB(svcb) => Some(svcb),
pkarr::dns::rdata::RData::HTTPS(curr) => Some(&curr.0),
_ => None,
} {
let curr = svcb.clone();
if curr.priority == 0 {
return Some(curr);
}
if let Some(prev) = &prev {
// TODO return random if priority is the same
if curr.priority >= prev.priority {
return Some(curr);
}
} else {
return Some(curr);
}
}
prev
},
);
if let Some(svcb) = svcb {
endpoint_public_key = Some(public_key.clone());
target = svcb.target.to_string();
if let Some(port) = svcb.get_param(pkarr::dns::rdata::SVCB::PORT) {
if port.len() < 2 {
// TODO: debug! Error encoding port!
}
let port = u16::from_be_bytes([port[0], port[1]]);
origin = format!("{target}:{port}");
} else {
origin.clone_from(&target);
};
if step >= MAX_ENDPOINT_RESOLUTION_RECURSION {
continue;
};
}
} else {
break;
}
}
if PublicKey::try_from(origin.as_str()).is_ok() {
return Err(Error::ResolveEndpoint(original_target.into()));
}
if endpoint_public_key.is_some() {
let url = Url::parse(&format!(
"{}://{}",
if origin.starts_with("localhost") {
"http"
} else {
"https"
},
origin
))?;
return Ok(Endpoint { url });
}
Err(Error::ResolveEndpoint(original_target.into()))
}
pub(crate) async fn resolve_url(&self, url: &mut Url) -> Result<()> {
if let Some(Ok(pubky)) = url.host_str().map(PublicKey::try_from) {
let Endpoint { url: x, .. } = self.resolve_endpoint(&format!("_pubky.{pubky}")).await?;
url.set_host(x.host_str())?;
url.set_port(x.port()).expect("should work!");
url.set_scheme(x.scheme()).expect("should work!");
};
Ok(())
}
}
#[derive(Debug)]
pub(crate) struct Endpoint {
pub url: Url,
}
#[cfg(test)]
mod tests {
use super::*;
use pkarr::{
dns::{
rdata::{HTTPS, SVCB},
Packet,
},
mainline::{dht::DhtSettings, Testnet},
Keypair, PkarrClient, Settings, SignedPacket,
};
use pubky_homeserver::Homeserver;
#[tokio::test]
async fn resolve_endpoint_https() {
let testnet = Testnet::new(10);
let pkarr_client = PkarrClient::new(Settings {
dht: DhtSettings {
bootstrap: Some(testnet.bootstrap.clone()),
..Default::default()
},
..Default::default()
})
.unwrap()
.as_async();
let domain = "example.com";
let mut target;
// Server
{
let keypair = Keypair::random();
let https = HTTPS(SVCB::new(0, domain.try_into().unwrap()));
let mut packet = Packet::new_reply(0);
packet.answers.push(pkarr::dns::ResourceRecord::new(
"foo".try_into().unwrap(),
pkarr::dns::CLASS::IN,
60 * 60,
pkarr::dns::rdata::RData::HTTPS(https),
));
let signed_packet = SignedPacket::from_packet(&keypair, &packet).unwrap();
pkarr_client.publish(&signed_packet).await.unwrap();
target = format!("foo.{}", keypair.public_key());
}
// intermediate
{
let keypair = Keypair::random();
let svcb = SVCB::new(0, target.as_str().try_into().unwrap());
let mut packet = Packet::new_reply(0);
packet.answers.push(pkarr::dns::ResourceRecord::new(
"bar".try_into().unwrap(),
pkarr::dns::CLASS::IN,
60 * 60,
pkarr::dns::rdata::RData::SVCB(svcb),
));
let signed_packet = SignedPacket::from_packet(&keypair, &packet).unwrap();
pkarr_client.publish(&signed_packet).await.unwrap();
target = format!("bar.{}", keypair.public_key())
}
{
let keypair = Keypair::random();
let svcb = SVCB::new(0, target.as_str().try_into().unwrap());
let mut packet = Packet::new_reply(0);
packet.answers.push(pkarr::dns::ResourceRecord::new(
"pubky".try_into().unwrap(),
pkarr::dns::CLASS::IN,
60 * 60,
pkarr::dns::rdata::RData::SVCB(svcb),
));
let signed_packet = SignedPacket::from_packet(&keypair, &packet).unwrap();
pkarr_client.publish(&signed_packet).await.unwrap();
target = format!("pubky.{}", keypair.public_key())
}
let client = PubkyClient::test(&testnet);
let endpoint = client.resolve_endpoint(&target).await.unwrap();
assert_eq!(endpoint.url.host_str().unwrap(), domain);
}
#[tokio::test]
async fn resolve_homeserver() {
let testnet = Testnet::new(10);
let server = Homeserver::start_test(&testnet).await.unwrap();
// Publish an intermediate controller of the homeserver
let pkarr_client = PkarrClient::new(Settings {
dht: DhtSettings {
bootstrap: Some(testnet.bootstrap.clone()),
..Default::default()
},
..Default::default()
})
.unwrap()
.as_async();
let intermediate = Keypair::random();
let mut packet = Packet::new_reply(0);
let server_tld = server.public_key().to_string();
let svcb = SVCB::new(0, server_tld.as_str().try_into().unwrap());
packet.answers.push(pkarr::dns::ResourceRecord::new(
"pubky".try_into().unwrap(),
pkarr::dns::CLASS::IN,
60 * 60,
pkarr::dns::rdata::RData::SVCB(svcb),
));
let signed_packet = SignedPacket::from_packet(&intermediate, &packet).unwrap();
pkarr_client.publish(&signed_packet).await.unwrap();
{
let client = PubkyClient::test(&testnet);
let pubky = Keypair::random();
client
.publish_pubky_homeserver(&pubky, &format!("pubky.{}", &intermediate.public_key()))
.await
.unwrap();
let Endpoint { url, .. } = client
.resolve_pubky_homeserver(&pubky.public_key())
.await
.unwrap();
assert_eq!(url.host_str(), Some("localhost"));
assert_eq!(url.port(), Some(server.port()));
}
}
// pub(crate) resolve_icann_domain() {
//
// let original_url = url.as_str();
// let mut url = Url::parse(original_url).expect("Invalid url in inner_request");
//
// if url.scheme() == "pubky" {
// // TODO: use https for anything other than testnet
// url.set_scheme("http")
// .expect("couldn't replace pubky:// with http://");
// url.set_host(Some(&format!("_pubky.{}", url.host_str().unwrap_or(""))))
// .expect("couldn't map pubk://<pubky> to https://_pubky.<pubky>");
// }
//
// let qname = url.host_str().unwrap_or("").to_string();
//
// if PublicKey::try_from(original_url).is_ok() {
// let mut stream = self.pkarr.resolve_https_endpoints(&qname);
//
// let mut so_far: Option<Endpoint> = None;
//
// while let Some(endpoint) = stream.next().await {
// if let Some(ref e) = so_far {
// if e.domain() == "." && endpoint.domain() != "." {
// so_far = Some(endpoint);
// }
// } else {
// so_far = Some(endpoint)
// }
// }
//
// if let Some(e) = so_far {
// url.set_host(Some(e.domain()))
// .expect("coultdn't use the resolved endpoint's domain");
// url.set_port(Some(e.port()))
// .expect("coultdn't use the resolved endpoint's port");
//
// return self.http.request(method, url).fetch_credentials_include();
// } else {
// // TODO: didn't find any domain, what to do?
// }
// }
//
// self.http.request(method, url).fetch_credentials_include()
// }
}

View File

@@ -1,114 +1,33 @@
use bytes::Bytes;
use reqwest::IntoUrl;
use pkarr::PublicKey;
use reqwest::{Method, StatusCode};
use url::Url;
use anyhow::Result;
use crate::{
error::{Error, Result},
PubkyClient,
};
use crate::Client;
use super::{list_builder::ListBuilder, pkarr::Endpoint};
use super::list_builder::ListBuilder;
impl PubkyClient {
pub(crate) async fn inner_put<T: TryInto<Url>>(&self, url: T, content: &[u8]) -> Result<()> {
let url = self.pubky_to_http(url).await?;
let response = self
.request(Method::PUT, url)
.body(content.to_owned())
.send()
.await?;
response.error_for_status()?;
Ok(())
}
pub(crate) async fn inner_get<T: TryInto<Url>>(&self, url: T) -> Result<Option<Bytes>> {
let url = self.pubky_to_http(url).await?;
let response = self.request(Method::GET, url).send().await?;
if response.status() == StatusCode::NOT_FOUND {
return Ok(None);
}
response.error_for_status_ref()?;
// TODO: bail on too large files.
let bytes = response.bytes().await?;
Ok(Some(bytes))
}
pub(crate) async fn inner_delete<T: TryInto<Url>>(&self, url: T) -> Result<()> {
let url = self.pubky_to_http(url).await?;
let response = self.request(Method::DELETE, url).send().await?;
response.error_for_status_ref()?;
Ok(())
}
pub(crate) fn inner_list<T: TryInto<Url>>(&self, url: T) -> Result<ListBuilder> {
Ok(ListBuilder::new(
self,
url.try_into().map_err(|_| Error::InvalidUrl)?,
))
}
pub(crate) async fn pubky_to_http<T: TryInto<Url>>(&self, url: T) -> Result<Url> {
let original_url: Url = url.try_into().map_err(|_| Error::InvalidUrl)?;
let pubky = original_url
.host_str()
.ok_or(Error::Generic("Missing Pubky Url host".to_string()))?;
if let Ok(public_key) = PublicKey::try_from(pubky) {
let Endpoint { mut url, .. } = self.resolve_pubky_homeserver(&public_key).await?;
// TODO: remove if we move to subdomains instead of paths.
if original_url.scheme() == "pubky" {
let path = original_url.path_segments();
let mut split = url.path_segments_mut().unwrap();
split.push(pubky);
if let Some(segments) = path {
for segment in segments {
split.push(segment);
}
}
drop(split);
}
return Ok(url);
}
Ok(original_url)
impl Client {
pub(crate) fn inner_list<T: IntoUrl>(&self, url: T) -> Result<ListBuilder> {
Ok(ListBuilder::new(self, url))
}
}
#[cfg(test)]
mod tests {
use core::panic;
use crate::*;
use bytes::Bytes;
use pkarr::{mainline::Testnet, Keypair};
use mainline::Testnet;
use pkarr::Keypair;
use pubky_homeserver::Homeserver;
use reqwest::{Method, StatusCode};
#[tokio::test]
async fn put_get_delete() {
let testnet = Testnet::new(10);
let testnet = Testnet::new(10).unwrap();
let server = Homeserver::start_test(&testnet).await.unwrap();
let client = PubkyClient::test(&testnet);
let client = Client::test(&testnet);
let keypair = Keypair::random();
@@ -117,25 +36,38 @@ mod tests {
let url = format!("pubky://{}/pub/foo.txt", keypair.public_key());
let url = url.as_str();
client.put(url, &[0, 1, 2, 3, 4]).await.unwrap();
client
.put(url)
.body(vec![0, 1, 2, 3, 4])
.send()
.await
.unwrap()
.error_for_status()
.unwrap();
let response = client.get(url).await.unwrap().unwrap();
let response = client.get(url).send().await.unwrap().bytes().await.unwrap();
assert_eq!(response, bytes::Bytes::from(vec![0, 1, 2, 3, 4]));
client.delete(url).await.unwrap();
client
.delete(url)
.send()
.await
.unwrap()
.error_for_status()
.unwrap();
let response = client.get(url).await.unwrap();
let response = client.get(url).send().await.unwrap();
assert_eq!(response, None);
assert_eq!(response.status(), StatusCode::NOT_FOUND);
}
#[tokio::test]
async fn unauthorized_put_delete() {
let testnet = Testnet::new(10);
let testnet = Testnet::new(10).unwrap();
let server = Homeserver::start_test(&testnet).await.unwrap();
let client = PubkyClient::test(&testnet);
let client = Client::test(&testnet);
let keypair = Keypair::random();
@@ -146,7 +78,7 @@ mod tests {
let url = format!("pubky://{public_key}/pub/foo.txt");
let url = url.as_str();
let other_client = PubkyClient::test(&testnet);
let other_client = Client::test(&testnet);
{
let other = Keypair::random();
@@ -156,19 +88,24 @@ mod tests {
.await
.unwrap();
let response = other_client.put(url, &[0, 1, 2, 3, 4]).await;
match response {
Err(Error::Reqwest(error)) => {
assert!(error.status() == Some(StatusCode::UNAUTHORIZED))
}
_ => {
panic!("expected error StatusCode::UNAUTHORIZED")
}
}
assert_eq!(
other_client
.put(url)
.body(vec![0, 1, 2, 3, 4])
.send()
.await
.unwrap()
.status(),
StatusCode::UNAUTHORIZED
);
}
client.put(url, &[0, 1, 2, 3, 4]).await.unwrap();
client
.put(url)
.body(vec![0, 1, 2, 3, 4])
.send()
.await
.unwrap();
{
let other = Keypair::random();
@@ -179,29 +116,23 @@ mod tests {
.await
.unwrap();
let response = other_client.delete(url).await;
match response {
Err(Error::Reqwest(error)) => {
assert!(error.status() == Some(StatusCode::UNAUTHORIZED))
}
_ => {
panic!("expected error StatusCode::UNAUTHORIZED")
}
}
assert_eq!(
other_client.delete(url).send().await.unwrap().status(),
StatusCode::UNAUTHORIZED
);
}
let response = client.get(url).await.unwrap().unwrap();
let response = client.get(url).send().await.unwrap().bytes().await.unwrap();
assert_eq!(response, bytes::Bytes::from(vec![0, 1, 2, 3, 4]));
}
#[tokio::test]
async fn list() {
let testnet = Testnet::new(10);
let testnet = Testnet::new(10).unwrap();
let server = Homeserver::start_test(&testnet).await.unwrap();
let client = PubkyClient::test(&testnet);
let client = Client::test(&testnet);
let keypair = Keypair::random();
@@ -221,14 +152,13 @@ mod tests {
];
for url in urls {
client.put(url.as_str(), &[0]).await.unwrap();
client.put(url).body(vec![0]).send().await.unwrap();
}
let url = format!("pubky://{pubky}/pub/example.com/extra");
let url = url.as_str();
{
let list = client.list(url).unwrap().send().await.unwrap();
let list = client.list(&url).unwrap().send().await.unwrap();
assert_eq!(
list,
@@ -244,7 +174,7 @@ mod tests {
}
{
let list = client.list(url).unwrap().limit(2).send().await.unwrap();
let list = client.list(&url).unwrap().limit(2).send().await.unwrap();
assert_eq!(
list,
@@ -258,7 +188,7 @@ mod tests {
{
let list = client
.list(url)
.list(&url)
.unwrap()
.limit(2)
.cursor("a.txt")
@@ -278,7 +208,7 @@ mod tests {
{
let list = client
.list(url)
.list(&url)
.unwrap()
.limit(2)
.cursor("cc-nested/")
@@ -298,7 +228,7 @@ mod tests {
{
let list = client
.list(url)
.list(&url)
.unwrap()
.limit(2)
.cursor(&format!("pubky://{pubky}/pub/example.com/a.txt"))
@@ -318,7 +248,7 @@ mod tests {
{
let list = client
.list(url)
.list(&url)
.unwrap()
.limit(2)
.cursor("/a.txt")
@@ -338,7 +268,7 @@ mod tests {
{
let list = client
.list(url)
.list(&url)
.unwrap()
.reverse(true)
.send()
@@ -360,7 +290,7 @@ mod tests {
{
let list = client
.list(url)
.list(&url)
.unwrap()
.reverse(true)
.limit(2)
@@ -380,7 +310,7 @@ mod tests {
{
let list = client
.list(url)
.list(&url)
.unwrap()
.reverse(true)
.limit(2)
@@ -402,10 +332,10 @@ mod tests {
#[tokio::test]
async fn list_shallow() {
let testnet = Testnet::new(10);
let testnet = Testnet::new(10).unwrap();
let server = Homeserver::start_test(&testnet).await.unwrap();
let client = PubkyClient::test(&testnet);
let client = Client::test(&testnet);
let keypair = Keypair::random();
@@ -427,15 +357,14 @@ mod tests {
];
for url in urls {
client.put(url.as_str(), &[0]).await.unwrap();
client.put(url).body(vec![0]).send().await.unwrap();
}
let url = format!("pubky://{pubky}/pub/");
let url = url.as_str();
{
let list = client
.list(url)
.list(&url)
.unwrap()
.shallow(true)
.send()
@@ -459,7 +388,7 @@ mod tests {
{
let list = client
.list(url)
.list(&url)
.unwrap()
.shallow(true)
.limit(2)
@@ -479,7 +408,7 @@ mod tests {
{
let list = client
.list(url)
.list(&url)
.unwrap()
.shallow(true)
.limit(2)
@@ -500,7 +429,7 @@ mod tests {
{
let list = client
.list(url)
.list(&url)
.unwrap()
.shallow(true)
.limit(3)
@@ -522,7 +451,7 @@ mod tests {
{
let list = client
.list(url)
.list(&url)
.unwrap()
.reverse(true)
.shallow(true)
@@ -547,7 +476,7 @@ mod tests {
{
let list = client
.list(url)
.list(&url)
.unwrap()
.reverse(true)
.shallow(true)
@@ -568,7 +497,7 @@ mod tests {
{
let list = client
.list(url)
.list(&url)
.unwrap()
.shallow(true)
.reverse(true)
@@ -590,7 +519,7 @@ mod tests {
{
let list = client
.list(url)
.list(&url)
.unwrap()
.shallow(true)
.reverse(true)
@@ -613,10 +542,10 @@ mod tests {
#[tokio::test]
async fn list_events() {
let testnet = Testnet::new(10);
let testnet = Testnet::new(10).unwrap();
let server = Homeserver::start_test(&testnet).await.unwrap();
let client = PubkyClient::test(&testnet);
let client = Client::test(&testnet);
let keypair = Keypair::random();
@@ -638,23 +567,19 @@ mod tests {
];
for url in urls {
client.put(url.as_str(), &[0]).await.unwrap();
client.delete(url.as_str()).await.unwrap();
client.put(&url).body(vec![0]).send().await.unwrap();
client.delete(url).send().await.unwrap();
}
let feed_url = format!("http://localhost:{}/events/", server.port());
let feed_url = feed_url.as_str();
let feed_url = format!("https://{}/events/", server.public_key());
let client = PubkyClient::test(&testnet);
let client = Client::test(&testnet);
let cursor;
{
let response = client
.request(
Method::GET,
format!("{feed_url}?limit=10").as_str().try_into().unwrap(),
)
.request(Method::GET, format!("{feed_url}?limit=10"))
.send()
.await
.unwrap();
@@ -684,13 +609,7 @@ mod tests {
{
let response = client
.request(
Method::GET,
format!("{feed_url}?limit=10&cursor={cursor}")
.as_str()
.try_into()
.unwrap(),
)
.request(Method::GET, format!("{feed_url}?limit=10&cursor={cursor}"))
.send()
.await
.unwrap();
@@ -719,10 +638,10 @@ mod tests {
#[tokio::test]
async fn read_after_event() {
let testnet = Testnet::new(10);
let testnet = Testnet::new(10).unwrap();
let server = Homeserver::start_test(&testnet).await.unwrap();
let client = PubkyClient::test(&testnet);
let client = Client::test(&testnet);
let keypair = Keypair::random();
@@ -732,19 +651,15 @@ mod tests {
let url = format!("pubky://{pubky}/pub/a.com/a.txt");
client.put(url.as_str(), &[0]).await.unwrap();
client.put(&url).body(vec![0]).send().await.unwrap();
let feed_url = format!("http://localhost:{}/events/", server.port());
let feed_url = feed_url.as_str();
let feed_url = format!("https://{}/events/", server.public_key());
let client = PubkyClient::test(&testnet);
let client = Client::test(&testnet);
{
let response = client
.request(
Method::GET,
format!("{feed_url}?limit=10").as_str().try_into().unwrap(),
)
.request(Method::GET, format!("{feed_url}?limit=10"))
.send()
.await
.unwrap();
@@ -763,16 +678,19 @@ mod tests {
);
}
let resolved = client.get(url.as_str()).await.unwrap().unwrap();
let response = client.get(url).send().await.unwrap();
assert_eq!(response.status(), StatusCode::OK);
assert_eq!(&resolved[..], &[0]);
let body = response.bytes().await.unwrap();
assert_eq!(body.as_ref(), &[0]);
}
#[tokio::test]
async fn dont_delete_shared_blobs() {
let testnet = Testnet::new(10);
let testnet = Testnet::new(10).unwrap();
let homeserver = Homeserver::start_test(&testnet).await.unwrap();
let client = PubkyClient::test(&testnet);
let client = Client::test(&testnet);
let homeserver_pubky = homeserver.public_key();
@@ -789,22 +707,37 @@ mod tests {
let url_2 = format!("pubky://{user_2_id}/pub/pubky.app/file/file_1");
let file = vec![1];
client.put(url_1.as_str(), &file).await.unwrap();
client.put(url_2.as_str(), &file).await.unwrap();
client.put(&url_1).body(file.clone()).send().await.unwrap();
client.put(&url_2).body(file.clone()).send().await.unwrap();
// Delete file 1
client.delete(url_1.as_str()).await.unwrap();
client
.delete(url_1)
.send()
.await
.unwrap()
.error_for_status()
.unwrap();
let blob = client.get(url_2.as_str()).await.unwrap().unwrap();
let blob = client
.get(url_2)
.send()
.await
.unwrap()
.bytes()
.await
.unwrap();
assert_eq!(blob, file);
let feed_url = format!("http://localhost:{}/events/", homeserver.port());
let feed_url = format!("https://{}/events/", homeserver.public_key());
let response = client
.request(Method::GET, feed_url.as_str().try_into().unwrap())
.request(Method::GET, feed_url)
.send()
.await
.unwrap()
.error_for_status()
.unwrap();
let text = response.text().await.unwrap();
@@ -818,17 +751,17 @@ mod tests {
format!("DEL pubky://{user_1_id}/pub/pubky.app/file/file_1",),
lines.last().unwrap().to_string()
]
)
);
}
#[tokio::test]
async fn stream() {
// TODO: test better streaming API
let testnet = Testnet::new(10);
let testnet = Testnet::new(10).unwrap();
let server = Homeserver::start_test(&testnet).await.unwrap();
let client = PubkyClient::test(&testnet);
let client = Client::test(&testnet);
let keypair = Keypair::random();
@@ -839,16 +772,16 @@ mod tests {
let bytes = Bytes::from(vec![0; 1024 * 1024]);
client.put(url, &bytes).await.unwrap();
client.put(url).body(bytes.clone()).send().await.unwrap();
let response = client.get(url).await.unwrap().unwrap();
let response = client.get(url).send().await.unwrap().bytes().await.unwrap();
assert_eq!(response, bytes);
client.delete(url).await.unwrap();
client.delete(url).send().await.unwrap();
let response = client.get(url).await.unwrap();
let response = client.get(url).send().await.unwrap();
assert_eq!(response, None);
assert_eq!(response.status(), StatusCode::NOT_FOUND);
}
}

View File

@@ -1,250 +1,61 @@
use std::{
collections::HashSet,
sync::{Arc, RwLock},
};
use js_sys::{Array, Uint8Array};
use wasm_bindgen::prelude::*;
use url::Url;
use pubky_common::capabilities::Capabilities;
use crate::error::Error;
use crate::PubkyClient;
use crate::Client;
mod api;
mod http;
mod keys;
mod pkarr;
mod recovery_file;
mod session;
mod wrappers;
use keys::{Keypair, PublicKey};
use session::Session;
impl Default for PubkyClient {
impl Default for Client {
fn default() -> Self {
Self::new()
}
}
static DEFAULT_RELAYS: [&str; 1] = ["https://relay.pkarr.org"];
static TESTNET_RELAYS: [&str; 1] = ["http://localhost:15411/pkarr"];
static TESTNET_RELAYS: [&str; 1] = ["http://localhost:15411/"];
#[wasm_bindgen]
impl PubkyClient {
impl Client {
#[wasm_bindgen(constructor)]
/// Create Client with default Settings including default relays
pub fn new() -> Self {
Self {
http: reqwest::Client::builder().build().unwrap(),
session_cookies: Arc::new(RwLock::new(HashSet::new())),
pkarr_relays: DEFAULT_RELAYS.into_iter().map(|s| s.to_string()).collect(),
pkarr: pkarr::Client::builder().build().unwrap(),
testnet: false,
}
}
/// Create a client with with configurations appropriate for local testing:
/// - set Pkarr relays to `["http://localhost:15411/pkarr"]` instead of default relay.
/// - set Pkarr relays to `["http://localhost:15411"]` instead of default relay.
/// - transform `pubky://<pkarr public key>` to `http://<pkarr public key` instead of `https:`
/// and read the homeserver HTTP port from the [reserved service parameter key](pubky_common::constants::reserved_param_keys::HTTP_PORT)
#[wasm_bindgen]
pub fn testnet() -> Self {
Self {
http: reqwest::Client::builder().build().unwrap(),
session_cookies: Arc::new(RwLock::new(HashSet::new())),
pkarr_relays: TESTNET_RELAYS.into_iter().map(|s| s.to_string()).collect(),
pkarr: pkarr::Client::builder()
.relays(&TESTNET_RELAYS)
.expect("testnet relays are valid urls")
.build()
.unwrap(),
testnet: true,
}
}
/// Set Pkarr relays used for publishing and resolving Pkarr packets.
///
/// By default, [PubkyClient] will use `["https://relay.pkarr.org"]`
#[wasm_bindgen(js_name = "setPkarrRelays")]
pub fn set_pkarr_relays(mut self, relays: Vec<String>) -> Self {
self.pkarr_relays = relays;
self
}
// Read the set of pkarr relays used by this client.
#[wasm_bindgen(js_name = "getPkarrRelays")]
pub fn get_pkarr_relays(&self) -> Vec<String> {
self.pkarr_relays.clone()
}
/// Signup to a homeserver and update Pkarr accordingly.
///
/// The homeserver is a Pkarr domain name, where the TLD is a Pkarr public key
/// for example "pubky.o4dksfbqk85ogzdb5osziw6befigbuxmuxkuxq8434q89uj56uyy"
#[wasm_bindgen]
pub async fn signup(
&self,
keypair: &Keypair,
homeserver: &PublicKey,
) -> Result<Session, JsValue> {
Ok(Session(
self.inner_signup(keypair.as_inner(), homeserver.as_inner())
.await
.map_err(JsValue::from)?,
))
}
/// Check the current sesison for a given Pubky in its homeserver.
///
/// Returns [Session] or `None` (if recieved `404 NOT_FOUND`),
/// or throws the recieved error if the response has any other `>=400` status code.
#[wasm_bindgen]
pub async fn session(&self, pubky: &PublicKey) -> Result<Option<Session>, JsValue> {
self.inner_session(pubky.as_inner())
.await
.map(|s| s.map(Session))
.map_err(|e| e.into())
}
/// Signout from a homeserver.
#[wasm_bindgen]
pub async fn signout(&self, pubky: &PublicKey) -> Result<(), JsValue> {
self.inner_signout(pubky.as_inner())
.await
.map_err(|e| e.into())
}
/// Signin to a homeserver using the root Keypair.
#[wasm_bindgen]
pub async fn signin(&self, keypair: &Keypair) -> Result<(), JsValue> {
self.inner_signin(keypair.as_inner())
.await
.map(|_| ())
.map_err(|e| e.into())
}
/// Return `pubkyauth://` url and wait for the incoming [AuthToken]
/// verifying that AuthToken, and if capabilities were requested, signing in to
/// the Pubky's homeserver and returning the [Session] information.
///
/// Returns a tuple of [pubkyAuthUrl, Promise<Session>]
#[wasm_bindgen(js_name = "authRequest")]
pub fn auth_request(&self, relay: &str, capabilities: &str) -> Result<js_sys::Array, JsValue> {
let mut relay: Url = relay
.try_into()
.map_err(|_| Error::Generic("Invalid relay Url".into()))?;
let (pubkyauth_url, client_secret) = self.create_auth_request(
&mut relay,
&Capabilities::try_from(capabilities).map_err(|_| "Invalid capaiblities")?,
)?;
let this = self.clone();
let future = async move {
this.subscribe_to_auth_response(relay, &client_secret)
.await
.map(|pubky| JsValue::from(PublicKey(pubky)))
.map_err(|err| JsValue::from_str(&format!("{:?}", err)))
};
let promise = wasm_bindgen_futures::future_to_promise(future);
// Return the URL and the promise
let js_tuple = js_sys::Array::new();
js_tuple.push(&JsValue::from_str(pubkyauth_url.as_ref()));
js_tuple.push(&promise);
Ok(js_tuple)
}
/// Sign an [pubky_common::auth::AuthToken], encrypt it and send it to the
/// source of the pubkyauth request url.
#[wasm_bindgen(js_name = "sendAuthToken")]
pub async fn send_auth_token(
&self,
keypair: &Keypair,
pubkyauth_url: &str,
) -> Result<(), JsValue> {
let pubkyauth_url: Url = pubkyauth_url
.try_into()
.map_err(|_| Error::Generic("Invalid relay Url".into()))?;
self.inner_send_auth_token(keypair.as_inner(), pubkyauth_url)
.await?;
Ok(())
}
// === Public data ===
#[wasm_bindgen]
/// Upload a small payload to a given path.
pub async fn put(&self, url: &str, content: &[u8]) -> Result<(), JsValue> {
self.inner_put(url, content).await.map_err(|e| e.into())
}
/// Download a small payload from a given path relative to a pubky author.
#[wasm_bindgen]
pub async fn get(&self, url: &str) -> Result<Option<Uint8Array>, JsValue> {
self.inner_get(url)
.await
.map(|b| b.map(|b| (&*b).into()))
.map_err(|e| e.into())
}
/// Delete a file at a path relative to a pubky author.
#[wasm_bindgen]
pub async fn delete(&self, url: &str) -> Result<(), JsValue> {
self.inner_delete(url).await.map_err(|e| e.into())
}
/// Returns a list of Pubky urls (as strings).
///
/// - `url`: The Pubky url (string) to the directory you want to list its content.
/// - `cursor`: Either a full `pubky://` Url (from previous list response),
/// or a path (to a file or directory) relative to the `url`
/// - `reverse`: List in reverse order
/// - `limit` Limit the number of urls in the response
/// - `shallow`: List directories and files, instead of flat list of files.
#[wasm_bindgen]
pub async fn list(
&self,
url: &str,
cursor: Option<String>,
reverse: Option<bool>,
limit: Option<u16>,
shallow: Option<bool>,
) -> Result<Array, JsValue> {
// TODO: try later to return Vec<String> from async function.
if let Some(cursor) = cursor {
return self
.inner_list(url)?
.reverse(reverse.unwrap_or(false))
.limit(limit.unwrap_or(u16::MAX))
.cursor(&cursor)
.shallow(shallow.unwrap_or(false))
.send()
.await
.map(|urls| {
let js_array = Array::new();
for url in urls {
js_array.push(&JsValue::from_str(&url));
}
js_array
})
.map_err(|e| e.into());
}
self.inner_list(url)?
.reverse(reverse.unwrap_or(false))
.limit(limit.unwrap_or(u16::MAX))
.shallow(shallow.unwrap_or(false))
.send()
.await
.map(|urls| {
let js_array = Array::new();
for url in urls {
js_array.push(&JsValue::from_str(&url));
}
js_array
})
.map_err(|e| e.into())
}
}
#[wasm_bindgen(js_name = "setLogLevel")]
pub fn set_log_level(level: &str) -> Result<(), JsValue> {
let level = match level.to_lowercase().as_str() {
"error" => log::Level::Error,
"warn" => log::Level::Warn,
"info" => log::Level::Info,
"debug" => log::Level::Debug,
"trace" => log::Level::Trace,
_ => return Err(JsValue::from_str("Invalid log level")),
};
console_log::init_with_level(level).map_err(|e| JsValue::from_str(&e.to_string()))?;
log::info!("Log level set to: {}", level);
Ok(())
}

113
pubky/src/wasm/api/auth.rs Normal file
View File

@@ -0,0 +1,113 @@
//! Wasm bindings for the Auth api
use url::Url;
use pubky_common::capabilities::Capabilities;
use crate::Client;
use crate::wasm::wrappers::keys::{Keypair, PublicKey};
use crate::wasm::wrappers::session::Session;
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
impl Client {
/// Signup to a homeserver and update Pkarr accordingly.
///
/// The homeserver is a Pkarr domain name, where the TLD is a Pkarr public key
/// for example "pubky.o4dksfbqk85ogzdb5osziw6befigbuxmuxkuxq8434q89uj56uyy"
#[wasm_bindgen]
pub async fn signup(
&self,
keypair: &Keypair,
homeserver: &PublicKey,
) -> Result<Session, JsValue> {
Ok(Session(
self.inner_signup(keypair.as_inner(), homeserver.as_inner())
.await
.map_err(|e| JsValue::from_str(&e.to_string()))?,
))
}
/// Check the current sesison for a given Pubky in its homeserver.
///
/// Returns [Session] or `None` (if recieved `404 NOT_FOUND`),
/// or throws the recieved error if the response has any other `>=400` status code.
#[wasm_bindgen]
pub async fn session(&self, pubky: &PublicKey) -> Result<Option<Session>, JsValue> {
self.inner_session(pubky.as_inner())
.await
.map(|s| s.map(Session))
.map_err(|e| JsValue::from_str(&e.to_string()))
}
/// Signout from a homeserver.
#[wasm_bindgen]
pub async fn signout(&self, pubky: &PublicKey) -> Result<(), JsValue> {
self.inner_signout(pubky.as_inner())
.await
.map_err(|e| JsValue::from_str(&e.to_string()))
}
/// Signin to a homeserver using the root Keypair.
#[wasm_bindgen]
pub async fn signin(&self, keypair: &Keypair) -> Result<(), JsValue> {
self.inner_signin(keypair.as_inner())
.await
.map(|_| ())
.map_err(|e| JsValue::from_str(&e.to_string()))
}
/// Return `pubkyauth://` url and wait for the incoming [AuthToken]
/// verifying that AuthToken, and if capabilities were requested, signing in to
/// the Pubky's homeserver and returning the [Session] information.
///
/// Returns a tuple of [pubkyAuthUrl, Promise<Session>]
#[wasm_bindgen(js_name = "authRequest")]
pub fn auth_request(&self, relay: &str, capabilities: &str) -> Result<js_sys::Array, JsValue> {
let mut relay: Url = relay.try_into().map_err(|_| "Invalid relay Url")?;
let (pubkyauth_url, client_secret) = self
.create_auth_request(
&mut relay,
&Capabilities::try_from(capabilities).map_err(|_| "Invalid capaiblities")?,
)
.map_err(|e| JsValue::from_str(&e.to_string()))?;
let this = self.clone();
let future = async move {
this.subscribe_to_auth_response(relay, &client_secret)
.await
.map(|pubky| JsValue::from(PublicKey(pubky)))
.map_err(|e| JsValue::from_str(&e.to_string()))
};
let promise = wasm_bindgen_futures::future_to_promise(future);
// Return the URL and the promise
let js_tuple = js_sys::Array::new();
js_tuple.push(&JsValue::from_str(pubkyauth_url.as_ref()));
js_tuple.push(&promise);
Ok(js_tuple)
}
/// Sign an [pubky_common::auth::AuthToken], encrypt it and send it to the
/// source of the pubkyauth request url.
#[wasm_bindgen(js_name = "sendAuthToken")]
pub async fn send_auth_token(
&self,
keypair: &Keypair,
pubkyauth_url: &str,
) -> Result<(), JsValue> {
let pubkyauth_url: Url = pubkyauth_url.try_into().map_err(|_| "Invalid relay Url")?;
self.inner_send_auth_token(keypair.as_inner(), pubkyauth_url)
.await
.map_err(|e| JsValue::from_str(&e.to_string()))?;
Ok(())
}
}

View File

@@ -0,0 +1,5 @@
pub mod recovery_file;
// TODO: put the Homeserver API behind a feature flag
pub mod auth;
pub mod public;

View File

@@ -0,0 +1,69 @@
//! Wasm bindings for the /pub/ api
use js_sys::Array;
use wasm_bindgen::prelude::*;
use crate::Client;
#[wasm_bindgen]
impl Client {
/// Returns a list of Pubky urls (as strings).
///
/// - `url`: The Pubky url (string) to the directory you want to list its content.
/// - `cursor`: Either a full `pubky://` Url (from previous list response),
/// or a path (to a file or directory) relative to the `url`
/// - `reverse`: List in reverse order
/// - `limit` Limit the number of urls in the response
/// - `shallow`: List directories and files, instead of flat list of files.
#[wasm_bindgen]
pub async fn list(
&self,
url: &str,
cursor: Option<String>,
reverse: Option<bool>,
limit: Option<u16>,
shallow: Option<bool>,
) -> Result<Array, JsValue> {
// TODO: try later to return Vec<String> from async function.
if let Some(cursor) = cursor {
return self
.inner_list(url)
.map_err(|e| JsValue::from_str(&e.to_string()))?
.reverse(reverse.unwrap_or(false))
.limit(limit.unwrap_or(u16::MAX))
.cursor(&cursor)
.shallow(shallow.unwrap_or(false))
.send()
.await
.map(|urls| {
let js_array = Array::new();
for url in urls {
js_array.push(&JsValue::from_str(&url));
}
js_array
})
.map_err(|e| JsValue::from_str(&e.to_string()));
}
self.inner_list(url)
.map_err(|e| JsValue::from_str(&e.to_string()))?
.reverse(reverse.unwrap_or(false))
.limit(limit.unwrap_or(u16::MAX))
.shallow(shallow.unwrap_or(false))
.send()
.await
.map(|urls| {
let js_array = Array::new();
for url in urls {
js_array.push(&JsValue::from_str(&url));
}
js_array
})
.map_err(|e| JsValue::from_str(&e.to_string()))
}
}

View File

@@ -1,9 +1,7 @@
use js_sys::Uint8Array;
use wasm_bindgen::prelude::{wasm_bindgen, JsValue};
use crate::error::Error;
use super::keys::Keypair;
use crate::wasm::wrappers::keys::Keypair;
/// Create a recovery file of the `keypair`, containing the secret key encrypted
/// using the `passphrase`.
@@ -11,7 +9,7 @@ use super::keys::Keypair;
pub fn create_recovery_file(keypair: &Keypair, passphrase: &str) -> Result<Uint8Array, JsValue> {
pubky_common::recovery_file::create_recovery_file(keypair.as_inner(), passphrase)
.map(|b| b.as_slice().into())
.map_err(|e| Error::from(e).into())
.map_err(|e| JsValue::from_str(&e.to_string()))
}
/// Create a recovery file of the `keypair`, containing the secret key encrypted
@@ -20,5 +18,5 @@ pub fn create_recovery_file(keypair: &Keypair, passphrase: &str) -> Result<Uint8
pub fn decrypt_recovery_file(recovery_file: &[u8], passphrase: &str) -> Result<Keypair, JsValue> {
pubky_common::recovery_file::decrypt_recovery_file(recovery_file, passphrase)
.map(Keypair::from)
.map_err(|e| Error::from(e).into())
.map_err(|e| JsValue::from_str(&e.to_string()))
}

View File

@@ -1,40 +1,169 @@
use crate::PubkyClient;
//! Fetch method handling HTTP and Pubky urls with Pkarr TLD.
use reqwest::{Method, RequestBuilder, Response};
use url::Url;
use js_sys::Promise;
use wasm_bindgen::prelude::*;
use web_sys::{Headers, RequestInit};
impl PubkyClient {
pub(crate) fn request(&self, method: Method, url: Url) -> RequestBuilder {
let mut request = self.http.request(method, url).fetch_credentials_include();
use reqwest::{IntoUrl, Method, RequestBuilder, Url};
for cookie in self.session_cookies.read().unwrap().iter() {
request = request.header("Cookie", cookie);
use futures_lite::StreamExt;
use pkarr::extra::endpoints::Endpoint;
use pkarr::PublicKey;
use crate::Client;
#[wasm_bindgen]
impl Client {
#[wasm_bindgen]
pub async fn fetch(
&self,
url: &str,
request_init: Option<RequestInit>,
) -> Result<js_sys::Promise, JsValue> {
let mut url: Url = url.try_into().map_err(|err| {
JsValue::from_str(&format!("pubky::Client::fetch(): Invalid `url`; {:?}", err))
})?;
let request_init = request_init.unwrap_or_default();
if let Some(pubky_host) = self.prepare_request(&mut url).await {
let headers = request_init.get_headers();
let headers = if headers.is_null() || headers.is_undefined() {
Headers::new()?
} else {
Headers::from(headers)
};
headers.append("pubky-host", &pubky_host)?;
request_init.set_headers(&headers.into());
}
request
}
let js_req = web_sys::Request::new_with_str_and_init(url.as_str(), &request_init).map_err(
|err| {
JsValue::from_str(&format!(
"pubky::Client::fetch(): Invalid `init`; {:?}",
err
))
},
)?;
// Support cookies for nodejs
pub(crate) fn store_session(&self, response: &Response) {
if let Some(cookie) = response
.headers()
.get("set-cookie")
.and_then(|h| h.to_str().ok())
.and_then(|s| s.split(';').next())
{
self.session_cookies
.write()
.unwrap()
.insert(cookie.to_string());
}
}
pub(crate) fn remove_session(&self, pubky: &pkarr::PublicKey) {
let key = pubky.to_string();
self.session_cookies
.write()
.unwrap()
.retain(|cookie| !cookie.starts_with(&key));
Ok(js_fetch(&js_req))
}
}
#[wasm_bindgen]
extern "C" {
#[wasm_bindgen(js_name = fetch)]
fn fetch_with_request(input: &web_sys::Request) -> Promise;
}
fn js_fetch(req: &web_sys::Request) -> Promise {
use wasm_bindgen::{JsCast, JsValue};
let global = js_sys::global();
if let Ok(true) = js_sys::Reflect::has(&global, &JsValue::from_str("ServiceWorkerGlobalScope"))
{
global
.unchecked_into::<web_sys::ServiceWorkerGlobalScope>()
.fetch_with_request(req)
} else {
// browser
fetch_with_request(req)
}
}
impl Client {
/// A wrapper around [reqwest::Client::request], with the same signature between native and wasm.
pub(crate) async fn inner_request<T: IntoUrl>(&self, method: Method, url: T) -> RequestBuilder {
let original_url = url.as_str();
let mut url = Url::parse(original_url).expect("Invalid url in inner_request");
if let Some(pubky_host) = self.prepare_request(&mut url).await {
self.http
.request(method, url.clone())
.header::<&str, &str>("pubky-host", &pubky_host)
.fetch_credentials_include()
} else {
self.http
.request(method, url.clone())
.fetch_credentials_include()
}
}
/// - Transforms pubky:// url to http(s):// urls
/// - Resolves a clearnet host to call with fetch
/// - Returns the `pubky-host` value if available
pub(super) async fn prepare_request(&self, url: &mut Url) -> Option<String> {
let host = url.host_str().unwrap_or("").to_string();
if url.scheme() == "pubky" {
*url = Url::parse(&format!("https{}", &url.as_str()[5..]))
.expect("couldn't replace pubky:// with https://");
url.set_host(Some(&format!("_pubky.{}", url.host_str().unwrap_or(""))))
.expect("couldn't map pubk://<pubky> to https://_pubky.<pubky>");
}
let mut pubky_host = None;
if PublicKey::try_from(host.clone()).is_ok() {
self.transform_url(url).await;
pubky_host = Some(host);
};
pubky_host
}
pub async fn transform_url(&self, url: &mut Url) {
let clone = url.clone();
let qname = clone.host_str().unwrap_or("");
log::debug!("Prepare request {}", url.as_str());
let mut stream = self.pkarr.resolve_https_endpoints(qname);
let mut so_far: Option<Endpoint> = None;
while let Some(endpoint) = stream.next().await {
if endpoint.domain().is_some() {
so_far = Some(endpoint);
// TODO: currently we return the first thing we can see,
// in the future we might want to failover to other endpoints
break;
}
}
if let Some(e) = so_far {
// TODO: detect loopback IPs and other equivalent to localhost
if self.testnet && e.domain() == Some("localhost") {
url.set_scheme("http")
.expect("couldn't replace pubky:// with http://");
let http_port = e
.get_param(pubky_common::constants::reserved_param_keys::HTTP_PORT)
.and_then(|x| <[u8; 2]>::try_from(x).ok())
.map(u16::from_be_bytes)
.expect("could not find HTTP_PORT service param");
url.set_port(Some(http_port))
.expect("coultdn't use the resolved endpoint's port");
} else if let Some(port) = e.port() {
url.set_port(Some(port))
.expect("coultdn't use the resolved endpoint's port");
}
if let Some(domain) = e.domain() {
url.set_host(Some(domain))
.expect("coultdn't use the resolved endpoint's domain");
}
log::debug!("Transformed URL to: {}", url.as_str());
} else {
// TODO: didn't find any domain, what to do?
// return an error.
log::debug!("Could not resolve host: {}", qname);
}
}
}

View File

@@ -1,48 +0,0 @@
use reqwest::StatusCode;
pub use pkarr::{PublicKey, SignedPacket};
use crate::error::Result;
use crate::PubkyClient;
// TODO: Add an in memory cache of packets
impl PubkyClient {
//TODO: migrate to pkarr::PkarrRelayClient
pub(crate) async fn pkarr_resolve(
&self,
public_key: &PublicKey,
) -> Result<Option<SignedPacket>> {
//TODO: Allow multiple relays in parallel
let relay = self.pkarr_relays.first().expect("initialized with relays");
let res = self
.http
.get(format!("{relay}/{}", public_key))
.send()
.await?;
if res.status() == StatusCode::NOT_FOUND {
return Ok(None);
};
// TODO: guard against too large responses.
let bytes = res.bytes().await?;
let existing = SignedPacket::from_relay_payload(public_key, &bytes)?;
Ok(Some(existing))
}
pub(crate) async fn pkarr_publish(&self, signed_packet: &SignedPacket) -> Result<()> {
let relay = self.pkarr_relays.first().expect("initialized with relays");
self.http
.put(format!("{relay}/{}", signed_packet.public_key()))
.body(signed_packet.to_relay_payload())
.send()
.await?;
Ok(())
}
}

View File

@@ -1,7 +1,5 @@
use wasm_bindgen::prelude::*;
use crate::Error;
#[wasm_bindgen]
pub struct Keypair(pkarr::Keypair);
@@ -80,9 +78,9 @@ impl PublicKey {
.as_string()
.ok_or("Couldn't create a PublicKey from this type of value")?;
Ok(PublicKey(
pkarr::PublicKey::try_from(string).map_err(Error::Pkarr)?,
))
Ok(PublicKey(pkarr::PublicKey::try_from(string).map_err(
|_| "Couldn't create a PublicKey from this type of value",
)?))
}
}

View File

@@ -0,0 +1,5 @@
//! Wasm wrappers around structs that we need to be turned into Classes
//! in JavaScript.
pub mod keys;
pub mod session;