mirror of
https://github.com/aljazceru/react-native-pubky.git
synced 2025-12-18 23:24:22 +01:00
chore: upgrade pubky
This commit is contained in:
Binary file not shown.
Binary file not shown.
@@ -16,7 +16,6 @@ class Pubky: NSObject {
|
||||
|
||||
@objc(parseAuthUrl:withResolver:withRejecter:)
|
||||
func parseAuthUrl(_ url: String, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
|
||||
Task {
|
||||
do {
|
||||
let result = react_native_pubky.parseAuthUrl(url: url)
|
||||
resolve(result)
|
||||
@@ -24,13 +23,12 @@ class Pubky: NSObject {
|
||||
reject("parseAuthUrl Error", "Failed to parse auth url", error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@objc(publish:recordContent:secretKey:withResolver:withRejecter:)
|
||||
func publish(recordName: String, recordContent: String, secretKey: String, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
|
||||
Task {
|
||||
do {
|
||||
let result = react_native_pubky.publish(recordName: recordName, recordContent: recordContent, secretKey: secretKey)
|
||||
let result = try await react_native_pubky.publish(recordName: recordName, recordContent: recordContent, secretKey: secretKey)
|
||||
resolve(result)
|
||||
} catch {
|
||||
reject("publish Error", "Failed to publish", error)
|
||||
@@ -42,7 +40,7 @@ class Pubky: NSObject {
|
||||
func resolve(publicKey: String, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
|
||||
Task {
|
||||
do {
|
||||
let result = react_native_pubky.resolve(publicKey: publicKey)
|
||||
let result = try await react_native_pubky.resolve(publicKey: publicKey)
|
||||
resolve(result)
|
||||
} catch {
|
||||
reject("resolve Error", "Failed to resolve", error)
|
||||
|
||||
@@ -68,6 +68,10 @@ impl Session {
|
||||
}
|
||||
|
||||
pub fn deserialize(bytes: &[u8]) -> Result<Self> {
|
||||
if bytes.is_empty() {
|
||||
return Err(Error::EmptyPayload);
|
||||
}
|
||||
|
||||
if bytes[0] > 0 {
|
||||
return Err(Error::UnknownVersion);
|
||||
}
|
||||
@@ -80,8 +84,10 @@ impl Session {
|
||||
|
||||
pub type Result<T> = core::result::Result<T, Error>;
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
#[derive(thiserror::Error, Debug, PartialEq)]
|
||||
pub enum Error {
|
||||
#[error("Empty payload")]
|
||||
EmptyPayload,
|
||||
#[error("Unknown version")]
|
||||
UnknownVersion,
|
||||
#[error(transparent)]
|
||||
@@ -123,4 +129,11 @@ mod tests {
|
||||
|
||||
assert_eq!(deseiralized, session)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize() {
|
||||
let result = Session::deserialize(&[]);
|
||||
|
||||
assert_eq!(result, Err(Error::EmptyPayload));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use pkarr::Keypair;
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
fmt::Debug,
|
||||
path::{Path, PathBuf},
|
||||
@@ -15,31 +15,92 @@ use pubky_common::timestamp::Timestamp;
|
||||
const DEFAULT_HOMESERVER_PORT: u16 = 6287;
|
||||
const DEFAULT_STORAGE_DIR: &str = "pubky";
|
||||
|
||||
/// Server configuration
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub struct Config {
|
||||
testnet: bool,
|
||||
pub const DEFAULT_LIST_LIMIT: u16 = 100;
|
||||
pub const DEFAULT_MAX_LIST_LIMIT: u16 = 1000;
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, PartialEq)]
|
||||
struct ConfigToml {
|
||||
testnet: Option<bool>,
|
||||
port: Option<u16>,
|
||||
bootstrap: Option<Vec<String>>,
|
||||
domain: String,
|
||||
/// Path to the storage directory
|
||||
domain: Option<String>,
|
||||
storage: Option<PathBuf>,
|
||||
secret_key: Option<String>,
|
||||
dht_request_timeout: Option<Duration>,
|
||||
default_list_limit: Option<u16>,
|
||||
max_list_limit: Option<u16>,
|
||||
}
|
||||
|
||||
/// Server configuration
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Config {
|
||||
/// Whether or not this server is running in a testnet.
|
||||
testnet: bool,
|
||||
/// The configured port for this server.
|
||||
port: Option<u16>,
|
||||
/// Bootstrapping DHT nodes.
|
||||
///
|
||||
/// Helpful to run the server locally or in testnet.
|
||||
bootstrap: Option<Vec<String>>,
|
||||
/// A public domain for this server
|
||||
/// necessary for web browsers running in https environment.
|
||||
domain: Option<String>,
|
||||
/// Path to the storage directory.
|
||||
///
|
||||
/// Defaults to a directory in the OS data directory
|
||||
storage: Option<PathBuf>,
|
||||
#[serde(deserialize_with = "secret_key_deserialize")]
|
||||
secret_key: Option<[u8; 32]>,
|
||||
|
||||
storage: PathBuf,
|
||||
/// Server keypair.
|
||||
///
|
||||
/// Defaults to a random keypair.
|
||||
keypair: Keypair,
|
||||
dht_request_timeout: Option<Duration>,
|
||||
/// The default limit of a list api if no `limit` query parameter is provided.
|
||||
///
|
||||
/// Defaults to `100`
|
||||
default_list_limit: u16,
|
||||
/// The maximum limit of a list api, even if a `limit` query parameter is provided.
|
||||
///
|
||||
/// Defaults to `1000`
|
||||
max_list_limit: u16,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
/// Load the config from a file.
|
||||
pub async fn load(path: impl AsRef<Path>) -> Result<Config> {
|
||||
let s = tokio::fs::read_to_string(path.as_ref())
|
||||
.await
|
||||
.with_context(|| format!("failed to read {}", path.as_ref().to_string_lossy()))?;
|
||||
fn try_from_str(value: &str) -> Result<Self> {
|
||||
let config_toml: ConfigToml = toml::from_str(value)?;
|
||||
|
||||
let config: Config = toml::from_str(&s)?;
|
||||
let keypair = if let Some(secret_key) = config_toml.secret_key {
|
||||
let secret_key = deserialize_secret_key(secret_key)?;
|
||||
Keypair::from_secret_key(&secret_key)
|
||||
} else {
|
||||
Keypair::random()
|
||||
};
|
||||
|
||||
let storage = {
|
||||
let dir = if let Some(storage) = config_toml.storage {
|
||||
storage
|
||||
} else {
|
||||
let path = dirs_next::data_dir().ok_or_else(|| {
|
||||
anyhow!("operating environment provides no directory for application data")
|
||||
})?;
|
||||
path.join(DEFAULT_STORAGE_DIR)
|
||||
};
|
||||
|
||||
dir.join("homeserver")
|
||||
};
|
||||
|
||||
let config = Config {
|
||||
testnet: config_toml.testnet.unwrap_or(false),
|
||||
port: config_toml.port,
|
||||
bootstrap: config_toml.bootstrap,
|
||||
domain: config_toml.domain,
|
||||
keypair,
|
||||
storage,
|
||||
dht_request_timeout: config_toml.dht_request_timeout,
|
||||
default_list_limit: config_toml.default_list_limit.unwrap_or(DEFAULT_LIST_LIMIT),
|
||||
max_list_limit: config_toml
|
||||
.default_list_limit
|
||||
.unwrap_or(DEFAULT_MAX_LIST_LIMIT),
|
||||
};
|
||||
|
||||
if config.testnet {
|
||||
let testnet_config = Config::testnet();
|
||||
@@ -53,17 +114,24 @@ impl Config {
|
||||
Ok(config)
|
||||
}
|
||||
|
||||
/// Load the config from a file.
|
||||
pub async fn load(path: impl AsRef<Path>) -> Result<Config> {
|
||||
let s = tokio::fs::read_to_string(path.as_ref())
|
||||
.await
|
||||
.with_context(|| format!("failed to read {}", path.as_ref().to_string_lossy()))?;
|
||||
|
||||
Config::try_from_str(&s)
|
||||
}
|
||||
|
||||
/// Testnet configurations
|
||||
pub fn testnet() -> Self {
|
||||
let testnet = pkarr::mainline::Testnet::new(10);
|
||||
info!(?testnet.bootstrap, "Testnet bootstrap nodes");
|
||||
|
||||
let bootstrap = Some(testnet.bootstrap.to_owned());
|
||||
let storage = Some(
|
||||
std::env::temp_dir()
|
||||
let storage = std::env::temp_dir()
|
||||
.join(Timestamp::now().to_string())
|
||||
.join(DEFAULT_STORAGE_DIR),
|
||||
);
|
||||
.join(DEFAULT_STORAGE_DIR);
|
||||
|
||||
Self {
|
||||
bootstrap,
|
||||
@@ -77,11 +145,9 @@ impl Config {
|
||||
/// Test configurations
|
||||
pub fn test(testnet: &pkarr::mainline::Testnet) -> Self {
|
||||
let bootstrap = Some(testnet.bootstrap.to_owned());
|
||||
let storage = Some(
|
||||
std::env::temp_dir()
|
||||
let storage = std::env::temp_dir()
|
||||
.join(Timestamp::now().to_string())
|
||||
.join(DEFAULT_STORAGE_DIR),
|
||||
);
|
||||
.join(DEFAULT_STORAGE_DIR);
|
||||
|
||||
Self {
|
||||
bootstrap,
|
||||
@@ -98,26 +164,25 @@ impl Config {
|
||||
self.bootstrap.to_owned()
|
||||
}
|
||||
|
||||
pub fn domain(&self) -> &str {
|
||||
pub fn domain(&self) -> &Option<String> {
|
||||
&self.domain
|
||||
}
|
||||
|
||||
/// Get the path to the storage directory
|
||||
pub fn storage(&self) -> Result<PathBuf> {
|
||||
let dir = if let Some(storage) = &self.storage {
|
||||
PathBuf::from(storage)
|
||||
} else {
|
||||
let path = dirs_next::data_dir().ok_or_else(|| {
|
||||
anyhow!("operating environment provides no directory for application data")
|
||||
})?;
|
||||
path.join(DEFAULT_STORAGE_DIR)
|
||||
};
|
||||
|
||||
Ok(dir.join("homeserver"))
|
||||
pub fn keypair(&self) -> &Keypair {
|
||||
&self.keypair
|
||||
}
|
||||
|
||||
pub fn keypair(&self) -> Keypair {
|
||||
Keypair::from_secret_key(&self.secret_key.unwrap_or_default())
|
||||
pub fn default_list_limit(&self) -> u16 {
|
||||
self.default_list_limit
|
||||
}
|
||||
|
||||
pub fn max_list_limit(&self) -> u16 {
|
||||
self.max_list_limit
|
||||
}
|
||||
|
||||
/// Get the path to the storage directory
|
||||
pub fn storage(&self) -> &PathBuf {
|
||||
&self.storage
|
||||
}
|
||||
|
||||
pub(crate) fn dht_request_timeout(&self) -> Option<Duration> {
|
||||
@@ -131,43 +196,53 @@ impl Default for Config {
|
||||
testnet: false,
|
||||
port: Some(0),
|
||||
bootstrap: None,
|
||||
domain: "localhost".to_string(),
|
||||
storage: None,
|
||||
secret_key: None,
|
||||
domain: None,
|
||||
storage: storage(None)
|
||||
.expect("operating environment provides no directory for application data"),
|
||||
keypair: Keypair::random(),
|
||||
dht_request_timeout: None,
|
||||
default_list_limit: DEFAULT_LIST_LIMIT,
|
||||
max_list_limit: DEFAULT_MAX_LIST_LIMIT,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn secret_key_deserialize<'de, D>(deserializer: D) -> Result<Option<[u8; 32]>, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let opt: Option<String> = Option::deserialize(deserializer)?;
|
||||
|
||||
match opt {
|
||||
Some(s) => {
|
||||
let bytes = hex::decode(s).map_err(serde::de::Error::custom)?;
|
||||
fn deserialize_secret_key(s: String) -> anyhow::Result<[u8; 32]> {
|
||||
let bytes =
|
||||
hex::decode(s).map_err(|_| anyhow!("secret_key in config.toml should hex encoded"))?;
|
||||
|
||||
if bytes.len() != 32 {
|
||||
return Err(serde::de::Error::custom("Expected a 32-byte array"));
|
||||
return Err(anyhow!(format!(
|
||||
"secret_key in config.toml should be 32 bytes in hex (64 characters), got: {}",
|
||||
bytes.len()
|
||||
)));
|
||||
}
|
||||
|
||||
let mut arr = [0u8; 32];
|
||||
arr.copy_from_slice(&bytes);
|
||||
Ok(Some(arr))
|
||||
}
|
||||
None => Ok(None),
|
||||
}
|
||||
|
||||
Ok(arr)
|
||||
}
|
||||
|
||||
impl Debug for Config {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_map()
|
||||
.entry(&"testnet", &self.testnet)
|
||||
.entry(&"port", &self.port())
|
||||
.entry(&"storage", &self.storage())
|
||||
.entry(&"public_key", &self.keypair().public_key())
|
||||
.finish()
|
||||
fn storage(storage: Option<String>) -> Result<PathBuf> {
|
||||
let dir = if let Some(storage) = storage {
|
||||
PathBuf::from(storage)
|
||||
} else {
|
||||
let path = dirs_next::data_dir().ok_or_else(|| {
|
||||
anyhow!("operating environment provides no directory for application data")
|
||||
})?;
|
||||
path.join(DEFAULT_STORAGE_DIR)
|
||||
};
|
||||
|
||||
Ok(dir.join("homeserver"))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn parse_empty() {
|
||||
Config::try_from_str("").unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,31 +1,42 @@
|
||||
use std::fs;
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
use heed::{Env, EnvOpenOptions};
|
||||
|
||||
mod migrations;
|
||||
pub mod tables;
|
||||
|
||||
use crate::config::Config;
|
||||
|
||||
use tables::{Tables, TABLES_COUNT};
|
||||
|
||||
pub const MAX_LIST_LIMIT: u16 = 100;
|
||||
pub const DEFAULT_MAP_SIZE: usize = 10995116277760; // 10TB (not = disk-space used)
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DB {
|
||||
pub(crate) env: Env,
|
||||
pub(crate) tables: Tables,
|
||||
pub(crate) config: Config,
|
||||
}
|
||||
|
||||
impl DB {
|
||||
pub fn open(storage: &Path) -> anyhow::Result<Self> {
|
||||
fs::create_dir_all(storage).unwrap();
|
||||
pub fn open(config: Config) -> anyhow::Result<Self> {
|
||||
fs::create_dir_all(config.storage())?;
|
||||
|
||||
let env = unsafe { EnvOpenOptions::new().max_dbs(TABLES_COUNT).open(storage) }?;
|
||||
let env = unsafe {
|
||||
EnvOpenOptions::new()
|
||||
.max_dbs(TABLES_COUNT)
|
||||
// TODO: Add a configuration option?
|
||||
.map_size(DEFAULT_MAP_SIZE)
|
||||
.open(config.storage())
|
||||
}?;
|
||||
|
||||
let tables = migrations::run(&env)?;
|
||||
|
||||
let db = DB { env, tables };
|
||||
let db = DB {
|
||||
env,
|
||||
tables,
|
||||
config,
|
||||
};
|
||||
|
||||
Ok(db)
|
||||
}
|
||||
@@ -34,18 +45,15 @@ impl DB {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use bytes::Bytes;
|
||||
use pkarr::Keypair;
|
||||
use pubky_common::timestamp::Timestamp;
|
||||
use pkarr::{mainline::Testnet, Keypair};
|
||||
|
||||
use crate::config::Config;
|
||||
|
||||
use super::DB;
|
||||
|
||||
#[tokio::test]
|
||||
async fn entries() {
|
||||
let storage = std::env::temp_dir()
|
||||
.join(Timestamp::now().to_string())
|
||||
.join("pubky");
|
||||
|
||||
let db = DB::open(&storage).unwrap();
|
||||
let db = DB::open(Config::test(&Testnet::new(0))).unwrap();
|
||||
|
||||
let keypair = Keypair::random();
|
||||
let path = "/pub/foo.txt";
|
||||
|
||||
@@ -13,7 +13,7 @@ use pubky_common::{
|
||||
timestamp::Timestamp,
|
||||
};
|
||||
|
||||
use crate::database::{DB, MAX_LIST_LIMIT};
|
||||
use crate::database::DB;
|
||||
|
||||
use super::events::Event;
|
||||
|
||||
@@ -157,7 +157,7 @@ impl DB {
|
||||
|
||||
/// Return a list of pubky urls.
|
||||
///
|
||||
/// - limit defaults to and capped by [MAX_LIST_LIMIT]
|
||||
/// - limit defaults to [Config::default_list_limit] and capped by [Config::max_list_limit]
|
||||
pub fn list(
|
||||
&self,
|
||||
txn: &RoTxn,
|
||||
@@ -170,7 +170,9 @@ impl DB {
|
||||
// Vector to store results
|
||||
let mut results = Vec::new();
|
||||
|
||||
let limit = limit.unwrap_or(MAX_LIST_LIMIT).min(MAX_LIST_LIMIT);
|
||||
let limit = limit
|
||||
.unwrap_or(self.config.default_list_limit())
|
||||
.min(self.config.max_list_limit());
|
||||
|
||||
// TODO: make this more performant than split and allocations?
|
||||
|
||||
|
||||
@@ -10,6 +10,8 @@ use heed::{
|
||||
use postcard::{from_bytes, to_allocvec};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::database::DB;
|
||||
|
||||
/// Event [Timestamp] base32 => Encoded event.
|
||||
pub type EventsTable = Database<Str, Bytes>;
|
||||
|
||||
@@ -56,3 +58,48 @@ impl Event {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DB {
|
||||
/// Returns a list of events formatted as `<OP> <url>`.
|
||||
///
|
||||
/// - limit defaults to [Config::default_list_limit] and capped by [Config::max_list_limit]
|
||||
/// - cursor is a 13 character string encoding of a timestamp
|
||||
pub fn list_events(
|
||||
&self,
|
||||
limit: Option<u16>,
|
||||
cursor: Option<String>,
|
||||
) -> anyhow::Result<Vec<String>> {
|
||||
let txn = self.env.read_txn()?;
|
||||
|
||||
let limit = limit
|
||||
.unwrap_or(self.config.default_list_limit())
|
||||
.min(self.config.max_list_limit());
|
||||
|
||||
let cursor = cursor.unwrap_or("0000000000000".to_string());
|
||||
|
||||
let mut result: Vec<String> = vec![];
|
||||
let mut next_cursor = cursor.to_string();
|
||||
|
||||
for _ in 0..limit {
|
||||
match self.tables.events.get_greater_than(&txn, &next_cursor)? {
|
||||
Some((timestamp, event_bytes)) => {
|
||||
let event = Event::deserialize(event_bytes)?;
|
||||
|
||||
let line = format!("{} {}", event.operation(), event.url());
|
||||
next_cursor = timestamp.to_string();
|
||||
|
||||
result.push(line);
|
||||
}
|
||||
None => break,
|
||||
};
|
||||
}
|
||||
|
||||
if !result.is_empty() {
|
||||
result.push(format!("cursor: {next_cursor}"))
|
||||
}
|
||||
|
||||
txn.commit()?;
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ use axum::{
|
||||
http::StatusCode,
|
||||
response::IntoResponse,
|
||||
};
|
||||
use tracing::debug;
|
||||
|
||||
pub type Result<T, E = Error> = core::result::Result<T, E>;
|
||||
|
||||
@@ -86,36 +87,42 @@ impl From<pkarr::Error> for Error {
|
||||
|
||||
impl From<std::io::Error> for Error {
|
||||
fn from(error: std::io::Error) -> Self {
|
||||
debug!(?error);
|
||||
Self::new(StatusCode::INTERNAL_SERVER_ERROR, error.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<heed::Error> for Error {
|
||||
fn from(error: heed::Error) -> Self {
|
||||
debug!(?error);
|
||||
Self::new(StatusCode::INTERNAL_SERVER_ERROR, error.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<anyhow::Error> for Error {
|
||||
fn from(error: anyhow::Error) -> Self {
|
||||
debug!(?error);
|
||||
Self::new(StatusCode::INTERNAL_SERVER_ERROR, error.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<postcard::Error> for Error {
|
||||
fn from(error: postcard::Error) -> Self {
|
||||
debug!(?error);
|
||||
Self::new(StatusCode::INTERNAL_SERVER_ERROR, error.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<axum::Error> for Error {
|
||||
fn from(error: axum::Error) -> Self {
|
||||
debug!(?error);
|
||||
Self::new(StatusCode::INTERNAL_SERVER_ERROR, error.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<flume::SendError<T>> for Error {
|
||||
fn from(error: flume::SendError<T>) -> Self {
|
||||
debug!(?error);
|
||||
Self::new(StatusCode::INTERNAL_SERVER_ERROR, error.into())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ use std::collections::HashMap;
|
||||
|
||||
use axum::{
|
||||
async_trait,
|
||||
extract::{FromRequestParts, Path},
|
||||
extract::{FromRequestParts, Path, Query},
|
||||
http::{request::Parts, StatusCode},
|
||||
response::{IntoResponse, Response},
|
||||
RequestPartsExt,
|
||||
@@ -74,3 +74,50 @@ where
|
||||
Ok(EntryPath(path.to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ListQueryParams {
|
||||
pub limit: Option<u16>,
|
||||
pub cursor: Option<String>,
|
||||
pub reverse: bool,
|
||||
pub shallow: bool,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl<S> FromRequestParts<S> for ListQueryParams
|
||||
where
|
||||
S: Send + Sync,
|
||||
{
|
||||
type Rejection = Response;
|
||||
|
||||
async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result<Self, Self::Rejection> {
|
||||
let params: Query<HashMap<String, String>> =
|
||||
parts.extract().await.map_err(IntoResponse::into_response)?;
|
||||
|
||||
let reverse = params.contains_key("reverse");
|
||||
let shallow = params.contains_key("shallow");
|
||||
let limit = params
|
||||
.get("limit")
|
||||
// Treat `limit=` as None
|
||||
.and_then(|l| if l.is_empty() { None } else { Some(l) })
|
||||
.and_then(|l| l.parse::<u16>().ok());
|
||||
let cursor = params
|
||||
.get("cursor")
|
||||
.map(|c| c.as_str())
|
||||
// Treat `cursor=` as None
|
||||
.and_then(|c| {
|
||||
if c.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(c.to_string())
|
||||
}
|
||||
});
|
||||
|
||||
Ok(ListQueryParams {
|
||||
reverse,
|
||||
shallow,
|
||||
limit,
|
||||
cursor,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,8 +5,8 @@ use pkarr::{
|
||||
Keypair, PkarrClientAsync, SignedPacket,
|
||||
};
|
||||
|
||||
pub async fn publish_server_packet(
|
||||
pkarr_client: PkarrClientAsync,
|
||||
pub(crate) async fn publish_server_packet(
|
||||
pkarr_client: &PkarrClientAsync,
|
||||
keypair: &Keypair,
|
||||
domain: &str,
|
||||
port: u16,
|
||||
|
||||
@@ -1,67 +1,37 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use axum::{
|
||||
body::Body,
|
||||
extract::{Query, State},
|
||||
extract::State,
|
||||
http::{header, Response, StatusCode},
|
||||
response::IntoResponse,
|
||||
};
|
||||
use pubky_common::timestamp::{Timestamp, TimestampError};
|
||||
|
||||
use crate::{
|
||||
database::{tables::events::Event, MAX_LIST_LIMIT},
|
||||
error::Result,
|
||||
error::{Error, Result},
|
||||
extractors::ListQueryParams,
|
||||
server::AppState,
|
||||
};
|
||||
|
||||
pub async fn feed(
|
||||
State(state): State<AppState>,
|
||||
Query(params): Query<HashMap<String, String>>,
|
||||
params: ListQueryParams,
|
||||
) -> Result<impl IntoResponse> {
|
||||
let txn = state.db.env.read_txn()?;
|
||||
|
||||
let limit = params
|
||||
.get("limit")
|
||||
.and_then(|l| l.parse::<u16>().ok())
|
||||
.unwrap_or(MAX_LIST_LIMIT)
|
||||
.min(MAX_LIST_LIMIT);
|
||||
|
||||
let mut cursor = params
|
||||
.get("cursor")
|
||||
.map(|c| c.as_str())
|
||||
.unwrap_or("0000000000000");
|
||||
|
||||
// Guard against bad cursor
|
||||
if cursor.len() < 13 {
|
||||
cursor = "0000000000000"
|
||||
if let Some(ref cursor) = params.cursor {
|
||||
if let Err(timestmap_error) = Timestamp::try_from(cursor.to_string()) {
|
||||
let cause = match timestmap_error {
|
||||
TimestampError::InvalidEncoding => {
|
||||
"Cursor should be valid base32 Crockford encoding of a timestamp"
|
||||
}
|
||||
|
||||
let mut result: Vec<String> = vec![];
|
||||
let mut next_cursor = cursor.to_string();
|
||||
|
||||
for _ in 0..limit {
|
||||
match state
|
||||
.db
|
||||
.tables
|
||||
.events
|
||||
.get_greater_than(&txn, &next_cursor)?
|
||||
{
|
||||
Some((timestamp, event_bytes)) => {
|
||||
let event = Event::deserialize(event_bytes)?;
|
||||
|
||||
let line = format!("{} {}", event.operation(), event.url());
|
||||
next_cursor = timestamp.to_string();
|
||||
|
||||
result.push(line);
|
||||
TimestampError::InvalidBytesLength(size) => {
|
||||
&format!("Cursor should be 13 characters long, got: {size}")
|
||||
}
|
||||
None => break,
|
||||
};
|
||||
|
||||
Err(Error::new(StatusCode::BAD_REQUEST, cause.into()))?
|
||||
}
|
||||
}
|
||||
|
||||
if !result.is_empty() {
|
||||
result.push(format!("cursor: {next_cursor}"))
|
||||
}
|
||||
|
||||
txn.commit()?;
|
||||
let result = state.db.list_events(params.limit, params.cursor)?;
|
||||
|
||||
Ok(Response::builder()
|
||||
.status(StatusCode::OK)
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use axum::{
|
||||
body::{Body, Bytes},
|
||||
extract::{Query, State},
|
||||
extract::State,
|
||||
http::{header, Response, StatusCode},
|
||||
response::IntoResponse,
|
||||
};
|
||||
@@ -12,7 +10,7 @@ use tower_cookies::Cookies;
|
||||
|
||||
use crate::{
|
||||
error::{Error, Result},
|
||||
extractors::{EntryPath, Pubky},
|
||||
extractors::{EntryPath, ListQueryParams, Pubky},
|
||||
server::AppState,
|
||||
};
|
||||
|
||||
@@ -65,7 +63,7 @@ pub async fn get(
|
||||
State(state): State<AppState>,
|
||||
pubky: Pubky,
|
||||
path: EntryPath,
|
||||
Query(params): Query<HashMap<String, String>>,
|
||||
params: ListQueryParams,
|
||||
) -> Result<impl IntoResponse> {
|
||||
verify(path.as_str())?;
|
||||
let public_key = pubky.public_key();
|
||||
@@ -88,10 +86,10 @@ pub async fn get(
|
||||
let vec = state.db.list(
|
||||
&txn,
|
||||
&path,
|
||||
params.contains_key("reverse"),
|
||||
params.get("limit").and_then(|l| l.parse::<u16>().ok()),
|
||||
params.get("cursor").map(|cursor| cursor.into()),
|
||||
params.contains_key("shallow"),
|
||||
params.reverse,
|
||||
params.limit,
|
||||
params.cursor,
|
||||
params.shallow,
|
||||
)?;
|
||||
|
||||
return Ok(Response::builder()
|
||||
|
||||
@@ -14,25 +14,24 @@ use crate::{config::Config, database::DB, pkarr::publish_server_packet};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Homeserver {
|
||||
port: u16,
|
||||
config: Config,
|
||||
state: AppState,
|
||||
tasks: JoinSet<std::io::Result<()>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct AppState {
|
||||
pub verifier: AuthVerifier,
|
||||
pub db: DB,
|
||||
pub pkarr_client: PkarrClientAsync,
|
||||
pub(crate) verifier: AuthVerifier,
|
||||
pub(crate) db: DB,
|
||||
pub(crate) pkarr_client: PkarrClientAsync,
|
||||
pub(crate) config: Config,
|
||||
pub(crate) port: u16,
|
||||
}
|
||||
|
||||
impl Homeserver {
|
||||
pub async fn start(config: Config) -> Result<Self> {
|
||||
debug!(?config);
|
||||
|
||||
let keypair = config.keypair();
|
||||
|
||||
let db = DB::open(&config.storage()?)?;
|
||||
let db = DB::open(config.clone())?;
|
||||
|
||||
let pkarr_client = PkarrClient::new(Settings {
|
||||
dht: DhtSettings {
|
||||
@@ -44,22 +43,22 @@ impl Homeserver {
|
||||
})?
|
||||
.as_async();
|
||||
|
||||
let state = AppState {
|
||||
verifier: AuthVerifier::default(),
|
||||
db,
|
||||
pkarr_client: pkarr_client.clone(),
|
||||
};
|
||||
|
||||
let app = crate::routes::create_app(state);
|
||||
|
||||
let mut tasks = JoinSet::new();
|
||||
|
||||
let app = app.clone();
|
||||
|
||||
let listener = TcpListener::bind(SocketAddr::from(([0, 0, 0, 0], config.port()))).await?;
|
||||
|
||||
let port = listener.local_addr()?.port();
|
||||
|
||||
let state = AppState {
|
||||
verifier: AuthVerifier::default(),
|
||||
db,
|
||||
pkarr_client,
|
||||
config: config.clone(),
|
||||
port,
|
||||
};
|
||||
|
||||
let app = crate::routes::create_app(state.clone());
|
||||
|
||||
// Spawn http server task
|
||||
tasks.spawn(
|
||||
axum::serve(
|
||||
@@ -72,15 +71,24 @@ impl Homeserver {
|
||||
|
||||
info!("Homeserver listening on http://localhost:{port}");
|
||||
|
||||
publish_server_packet(pkarr_client, &keypair, config.domain(), port).await?;
|
||||
|
||||
info!("Homeserver listening on pubky://{}", keypair.public_key());
|
||||
|
||||
Ok(Self {
|
||||
tasks,
|
||||
config,
|
||||
publish_server_packet(
|
||||
&state.pkarr_client,
|
||||
config.keypair(),
|
||||
&state
|
||||
.config
|
||||
.domain()
|
||||
.clone()
|
||||
.unwrap_or("localhost".to_string()),
|
||||
port,
|
||||
})
|
||||
)
|
||||
.await?;
|
||||
|
||||
info!(
|
||||
"Homeserver listening on pubky://{}",
|
||||
config.keypair().public_key()
|
||||
);
|
||||
|
||||
Ok(Self { tasks, state })
|
||||
}
|
||||
|
||||
/// Test version of [Homeserver::start], using mainline Testnet, and a temporary storage.
|
||||
@@ -93,11 +101,11 @@ impl Homeserver {
|
||||
// === Getters ===
|
||||
|
||||
pub fn port(&self) -> u16 {
|
||||
self.port
|
||||
self.state.port
|
||||
}
|
||||
|
||||
pub fn public_key(&self) -> PublicKey {
|
||||
self.config.keypair().public_key()
|
||||
self.state.config.keypair().public_key()
|
||||
}
|
||||
|
||||
// === Public Methods ===
|
||||
|
||||
@@ -11,7 +11,7 @@ use reqwest::{RequestBuilder, Response};
|
||||
use tokio::sync::oneshot;
|
||||
use url::Url;
|
||||
|
||||
use pkarr::Keypair;
|
||||
use pkarr::{Keypair, PkarrClientAsync};
|
||||
|
||||
use ::pkarr::{mainline::dht::Testnet, PkarrClient, PublicKey, SignedPacket};
|
||||
|
||||
@@ -112,6 +112,13 @@ impl PubkyClient {
|
||||
builder.build()
|
||||
}
|
||||
|
||||
// === Getters ===
|
||||
|
||||
/// Returns a reference to the internal [pkarr] Client.
|
||||
pub fn pkarr(&self) -> &PkarrClientAsync {
|
||||
&self.pkarr
|
||||
}
|
||||
|
||||
// === Auth ===
|
||||
|
||||
/// Signup to a homeserver and update Pkarr accordingly.
|
||||
|
||||
@@ -305,10 +305,9 @@ mod tests {
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
let session = pubkyauth_response.await.unwrap().unwrap();
|
||||
let public_key = pubkyauth_response.await.unwrap();
|
||||
|
||||
assert_eq!(session.pubky(), &pubky);
|
||||
assert_eq!(session.capabilities(), &capabilities.0);
|
||||
assert_eq!(&public_key, &pubky);
|
||||
|
||||
// Test access control enforcement
|
||||
|
||||
|
||||
@@ -364,7 +364,6 @@ pub fn publish(record_name: String, record_content: String, secret_key: String)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[uniffi::export]
|
||||
pub fn list(url: String) -> Vec<String> {
|
||||
let runtime = TOKIO_RUNTIME.clone();
|
||||
|
||||
Reference in New Issue
Block a user