mirror of
https://github.com/aljazceru/react-native-pubky.git
synced 2025-12-17 06:34:21 +01:00
refactor: remove rust project
Removes rust directory containing the pubky library. Updates build scripts to include local and remote options. Updates README.md. Adds simple-git as a dev dependency.
This commit is contained in:
14
README.md
14
README.md
@@ -309,12 +309,20 @@ cd rust/pubky/pubky-homeserver && cargo run -- --config=./src/config.toml
|
|||||||
cd example && yarn install && cd ios && pod install && cd ../ && yarn ios
|
cd example && yarn install && cd ios && pod install && cd ../ && yarn ios
|
||||||
```
|
```
|
||||||
|
|
||||||
## Update Bindings
|
## Download Remote Bindings
|
||||||
|
|
||||||
After making changes to any of the Rust files, the bindings will need to be updated. To do this, run the following command:
|
This command will download the current bindings from the [SDK repo](https://github.com/pubky/pubky-core-mobile-sdk):
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
npm run update-bindings
|
npm run update-remote-bindings
|
||||||
|
```
|
||||||
|
|
||||||
|
## Setup Local Bindings
|
||||||
|
|
||||||
|
This command will download the entire Rust project if it doesn't exist and set up the bindings locally for faster iteration and testing:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm run update-local-bindings
|
||||||
```
|
```
|
||||||
|
|
||||||
Finally, ensure that `PubkyModule.kt`, `Pubky.swift`, `Pubky.mm` & `src/index.tsx` are updated accordingly based on the changes made to the Rust files.
|
Finally, ensure that `PubkyModule.kt`, `Pubky.swift`, `Pubky.mm` & `src/index.tsx` are updated accordingly based on the changes made to the Rust files.
|
||||||
|
|||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -1,188 +0,0 @@
|
|||||||
// This file was autogenerated by some hot garbage in the `uniffi` crate.
|
|
||||||
// Trust me, you don't want to mess with it!
|
|
||||||
|
|
||||||
#pragma once
|
|
||||||
|
|
||||||
#include <stdbool.h>
|
|
||||||
#include <stddef.h>
|
|
||||||
#include <stdint.h>
|
|
||||||
|
|
||||||
// The following structs are used to implement the lowest level
|
|
||||||
// of the FFI, and thus useful to multiple uniffied crates.
|
|
||||||
// We ensure they are declared exactly once, with a header guard, UNIFFI_SHARED_H.
|
|
||||||
#ifdef UNIFFI_SHARED_H
|
|
||||||
// We also try to prevent mixing versions of shared uniffi header structs.
|
|
||||||
// If you add anything to the #else block, you must increment the version suffix in UNIFFI_SHARED_HEADER_V4
|
|
||||||
#ifndef UNIFFI_SHARED_HEADER_V4
|
|
||||||
#error Combining helper code from multiple versions of uniffi is not supported
|
|
||||||
#endif // ndef UNIFFI_SHARED_HEADER_V4
|
|
||||||
#else
|
|
||||||
#define UNIFFI_SHARED_H
|
|
||||||
#define UNIFFI_SHARED_HEADER_V4
|
|
||||||
// ⚠️ Attention: If you change this #else block (ending in `#endif // def UNIFFI_SHARED_H`) you *must* ⚠️
|
|
||||||
// ⚠️ increment the version suffix in all instances of UNIFFI_SHARED_HEADER_V4 in this file. ⚠️
|
|
||||||
|
|
||||||
typedef struct RustBuffer
|
|
||||||
{
|
|
||||||
int32_t capacity;
|
|
||||||
int32_t len;
|
|
||||||
uint8_t *_Nullable data;
|
|
||||||
} RustBuffer;
|
|
||||||
|
|
||||||
typedef int32_t (*ForeignCallback)(uint64_t, int32_t, const uint8_t *_Nonnull, int32_t, RustBuffer *_Nonnull);
|
|
||||||
|
|
||||||
// Task defined in Rust that Swift executes
|
|
||||||
typedef void (*UniFfiRustTaskCallback)(const void * _Nullable, int8_t);
|
|
||||||
|
|
||||||
// Callback to execute Rust tasks using a Swift Task
|
|
||||||
//
|
|
||||||
// Args:
|
|
||||||
// executor: ForeignExecutor lowered into a size_t value
|
|
||||||
// delay: Delay in MS
|
|
||||||
// task: UniFfiRustTaskCallback to call
|
|
||||||
// task_data: data to pass the task callback
|
|
||||||
typedef int8_t (*UniFfiForeignExecutorCallback)(size_t, uint32_t, UniFfiRustTaskCallback _Nullable, const void * _Nullable);
|
|
||||||
|
|
||||||
typedef struct ForeignBytes
|
|
||||||
{
|
|
||||||
int32_t len;
|
|
||||||
const uint8_t *_Nullable data;
|
|
||||||
} ForeignBytes;
|
|
||||||
|
|
||||||
// Error definitions
|
|
||||||
typedef struct RustCallStatus {
|
|
||||||
int8_t code;
|
|
||||||
RustBuffer errorBuf;
|
|
||||||
} RustCallStatus;
|
|
||||||
|
|
||||||
// ⚠️ Attention: If you change this #else block (ending in `#endif // def UNIFFI_SHARED_H`) you *must* ⚠️
|
|
||||||
// ⚠️ increment the version suffix in all instances of UNIFFI_SHARED_HEADER_V4 in this file. ⚠️
|
|
||||||
#endif // def UNIFFI_SHARED_H
|
|
||||||
|
|
||||||
// Continuation callback for UniFFI Futures
|
|
||||||
typedef void (*UniFfiRustFutureContinuation)(void * _Nonnull, int8_t);
|
|
||||||
|
|
||||||
// Scaffolding functions
|
|
||||||
void* _Nonnull uniffi_mobile_fn_func_auth(RustBuffer url, RustBuffer secret_key
|
|
||||||
);
|
|
||||||
RustBuffer ffi_mobile_rustbuffer_alloc(int32_t size, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
RustBuffer ffi_mobile_rustbuffer_from_bytes(ForeignBytes bytes, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
void ffi_mobile_rustbuffer_free(RustBuffer buf, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
RustBuffer ffi_mobile_rustbuffer_reserve(RustBuffer buf, int32_t additional, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_continuation_callback_set(UniFfiRustFutureContinuation _Nonnull callback
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_poll_u8(void* _Nonnull handle, void* _Nonnull uniffi_callback
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_cancel_u8(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_free_u8(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
uint8_t ffi_mobile_rust_future_complete_u8(void* _Nonnull handle, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_poll_i8(void* _Nonnull handle, void* _Nonnull uniffi_callback
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_cancel_i8(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_free_i8(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
int8_t ffi_mobile_rust_future_complete_i8(void* _Nonnull handle, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_poll_u16(void* _Nonnull handle, void* _Nonnull uniffi_callback
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_cancel_u16(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_free_u16(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
uint16_t ffi_mobile_rust_future_complete_u16(void* _Nonnull handle, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_poll_i16(void* _Nonnull handle, void* _Nonnull uniffi_callback
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_cancel_i16(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_free_i16(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
int16_t ffi_mobile_rust_future_complete_i16(void* _Nonnull handle, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_poll_u32(void* _Nonnull handle, void* _Nonnull uniffi_callback
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_cancel_u32(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_free_u32(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
uint32_t ffi_mobile_rust_future_complete_u32(void* _Nonnull handle, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_poll_i32(void* _Nonnull handle, void* _Nonnull uniffi_callback
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_cancel_i32(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_free_i32(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
int32_t ffi_mobile_rust_future_complete_i32(void* _Nonnull handle, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_poll_u64(void* _Nonnull handle, void* _Nonnull uniffi_callback
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_cancel_u64(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_free_u64(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
uint64_t ffi_mobile_rust_future_complete_u64(void* _Nonnull handle, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_poll_i64(void* _Nonnull handle, void* _Nonnull uniffi_callback
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_cancel_i64(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_free_i64(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
int64_t ffi_mobile_rust_future_complete_i64(void* _Nonnull handle, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_poll_f32(void* _Nonnull handle, void* _Nonnull uniffi_callback
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_cancel_f32(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_free_f32(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
float ffi_mobile_rust_future_complete_f32(void* _Nonnull handle, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_poll_f64(void* _Nonnull handle, void* _Nonnull uniffi_callback
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_cancel_f64(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_free_f64(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
double ffi_mobile_rust_future_complete_f64(void* _Nonnull handle, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_poll_pointer(void* _Nonnull handle, void* _Nonnull uniffi_callback
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_cancel_pointer(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_free_pointer(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
void*_Nonnull ffi_mobile_rust_future_complete_pointer(void* _Nonnull handle, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_poll_rust_buffer(void* _Nonnull handle, void* _Nonnull uniffi_callback
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_cancel_rust_buffer(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_free_rust_buffer(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
RustBuffer ffi_mobile_rust_future_complete_rust_buffer(void* _Nonnull handle, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_poll_void(void* _Nonnull handle, void* _Nonnull uniffi_callback
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_cancel_void(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_free_void(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_complete_void(void* _Nonnull handle, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
uint16_t uniffi_mobile_checksum_func_auth(void
|
|
||||||
|
|
||||||
);
|
|
||||||
uint32_t ffi_mobile_uniffi_contract_version(void
|
|
||||||
|
|
||||||
);
|
|
||||||
|
|
||||||
Binary file not shown.
Binary file not shown.
@@ -1,188 +0,0 @@
|
|||||||
// This file was autogenerated by some hot garbage in the `uniffi` crate.
|
|
||||||
// Trust me, you don't want to mess with it!
|
|
||||||
|
|
||||||
#pragma once
|
|
||||||
|
|
||||||
#include <stdbool.h>
|
|
||||||
#include <stddef.h>
|
|
||||||
#include <stdint.h>
|
|
||||||
|
|
||||||
// The following structs are used to implement the lowest level
|
|
||||||
// of the FFI, and thus useful to multiple uniffied crates.
|
|
||||||
// We ensure they are declared exactly once, with a header guard, UNIFFI_SHARED_H.
|
|
||||||
#ifdef UNIFFI_SHARED_H
|
|
||||||
// We also try to prevent mixing versions of shared uniffi header structs.
|
|
||||||
// If you add anything to the #else block, you must increment the version suffix in UNIFFI_SHARED_HEADER_V4
|
|
||||||
#ifndef UNIFFI_SHARED_HEADER_V4
|
|
||||||
#error Combining helper code from multiple versions of uniffi is not supported
|
|
||||||
#endif // ndef UNIFFI_SHARED_HEADER_V4
|
|
||||||
#else
|
|
||||||
#define UNIFFI_SHARED_H
|
|
||||||
#define UNIFFI_SHARED_HEADER_V4
|
|
||||||
// ⚠️ Attention: If you change this #else block (ending in `#endif // def UNIFFI_SHARED_H`) you *must* ⚠️
|
|
||||||
// ⚠️ increment the version suffix in all instances of UNIFFI_SHARED_HEADER_V4 in this file. ⚠️
|
|
||||||
|
|
||||||
typedef struct RustBuffer
|
|
||||||
{
|
|
||||||
int32_t capacity;
|
|
||||||
int32_t len;
|
|
||||||
uint8_t *_Nullable data;
|
|
||||||
} RustBuffer;
|
|
||||||
|
|
||||||
typedef int32_t (*ForeignCallback)(uint64_t, int32_t, const uint8_t *_Nonnull, int32_t, RustBuffer *_Nonnull);
|
|
||||||
|
|
||||||
// Task defined in Rust that Swift executes
|
|
||||||
typedef void (*UniFfiRustTaskCallback)(const void * _Nullable, int8_t);
|
|
||||||
|
|
||||||
// Callback to execute Rust tasks using a Swift Task
|
|
||||||
//
|
|
||||||
// Args:
|
|
||||||
// executor: ForeignExecutor lowered into a size_t value
|
|
||||||
// delay: Delay in MS
|
|
||||||
// task: UniFfiRustTaskCallback to call
|
|
||||||
// task_data: data to pass the task callback
|
|
||||||
typedef int8_t (*UniFfiForeignExecutorCallback)(size_t, uint32_t, UniFfiRustTaskCallback _Nullable, const void * _Nullable);
|
|
||||||
|
|
||||||
typedef struct ForeignBytes
|
|
||||||
{
|
|
||||||
int32_t len;
|
|
||||||
const uint8_t *_Nullable data;
|
|
||||||
} ForeignBytes;
|
|
||||||
|
|
||||||
// Error definitions
|
|
||||||
typedef struct RustCallStatus {
|
|
||||||
int8_t code;
|
|
||||||
RustBuffer errorBuf;
|
|
||||||
} RustCallStatus;
|
|
||||||
|
|
||||||
// ⚠️ Attention: If you change this #else block (ending in `#endif // def UNIFFI_SHARED_H`) you *must* ⚠️
|
|
||||||
// ⚠️ increment the version suffix in all instances of UNIFFI_SHARED_HEADER_V4 in this file. ⚠️
|
|
||||||
#endif // def UNIFFI_SHARED_H
|
|
||||||
|
|
||||||
// Continuation callback for UniFFI Futures
|
|
||||||
typedef void (*UniFfiRustFutureContinuation)(void * _Nonnull, int8_t);
|
|
||||||
|
|
||||||
// Scaffolding functions
|
|
||||||
void* _Nonnull uniffi_mobile_fn_func_auth(RustBuffer url, RustBuffer secret_key
|
|
||||||
);
|
|
||||||
RustBuffer ffi_mobile_rustbuffer_alloc(int32_t size, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
RustBuffer ffi_mobile_rustbuffer_from_bytes(ForeignBytes bytes, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
void ffi_mobile_rustbuffer_free(RustBuffer buf, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
RustBuffer ffi_mobile_rustbuffer_reserve(RustBuffer buf, int32_t additional, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_continuation_callback_set(UniFfiRustFutureContinuation _Nonnull callback
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_poll_u8(void* _Nonnull handle, void* _Nonnull uniffi_callback
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_cancel_u8(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_free_u8(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
uint8_t ffi_mobile_rust_future_complete_u8(void* _Nonnull handle, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_poll_i8(void* _Nonnull handle, void* _Nonnull uniffi_callback
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_cancel_i8(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_free_i8(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
int8_t ffi_mobile_rust_future_complete_i8(void* _Nonnull handle, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_poll_u16(void* _Nonnull handle, void* _Nonnull uniffi_callback
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_cancel_u16(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_free_u16(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
uint16_t ffi_mobile_rust_future_complete_u16(void* _Nonnull handle, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_poll_i16(void* _Nonnull handle, void* _Nonnull uniffi_callback
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_cancel_i16(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_free_i16(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
int16_t ffi_mobile_rust_future_complete_i16(void* _Nonnull handle, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_poll_u32(void* _Nonnull handle, void* _Nonnull uniffi_callback
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_cancel_u32(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_free_u32(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
uint32_t ffi_mobile_rust_future_complete_u32(void* _Nonnull handle, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_poll_i32(void* _Nonnull handle, void* _Nonnull uniffi_callback
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_cancel_i32(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_free_i32(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
int32_t ffi_mobile_rust_future_complete_i32(void* _Nonnull handle, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_poll_u64(void* _Nonnull handle, void* _Nonnull uniffi_callback
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_cancel_u64(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_free_u64(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
uint64_t ffi_mobile_rust_future_complete_u64(void* _Nonnull handle, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_poll_i64(void* _Nonnull handle, void* _Nonnull uniffi_callback
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_cancel_i64(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_free_i64(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
int64_t ffi_mobile_rust_future_complete_i64(void* _Nonnull handle, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_poll_f32(void* _Nonnull handle, void* _Nonnull uniffi_callback
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_cancel_f32(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_free_f32(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
float ffi_mobile_rust_future_complete_f32(void* _Nonnull handle, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_poll_f64(void* _Nonnull handle, void* _Nonnull uniffi_callback
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_cancel_f64(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_free_f64(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
double ffi_mobile_rust_future_complete_f64(void* _Nonnull handle, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_poll_pointer(void* _Nonnull handle, void* _Nonnull uniffi_callback
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_cancel_pointer(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_free_pointer(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
void*_Nonnull ffi_mobile_rust_future_complete_pointer(void* _Nonnull handle, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_poll_rust_buffer(void* _Nonnull handle, void* _Nonnull uniffi_callback
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_cancel_rust_buffer(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_free_rust_buffer(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
RustBuffer ffi_mobile_rust_future_complete_rust_buffer(void* _Nonnull handle, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_poll_void(void* _Nonnull handle, void* _Nonnull uniffi_callback
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_cancel_void(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_free_void(void* _Nonnull handle
|
|
||||||
);
|
|
||||||
void ffi_mobile_rust_future_complete_void(void* _Nonnull handle, RustCallStatus *_Nonnull out_status
|
|
||||||
);
|
|
||||||
uint16_t uniffi_mobile_checksum_func_auth(void
|
|
||||||
|
|
||||||
);
|
|
||||||
uint32_t ffi_mobile_uniffi_contract_version(void
|
|
||||||
|
|
||||||
);
|
|
||||||
|
|
||||||
Binary file not shown.
Binary file not shown.
17
package.json
17
package.json
@@ -46,11 +46,15 @@
|
|||||||
"example:ios": "cd example && cd ios && rm -rf Pods && cd ../ && npm i && bundle install && npm run build:ios && npm run ios",
|
"example:ios": "cd example && cd ios && rm -rf Pods && cd ../ && npm i && bundle install && npm run build:ios && npm run ios",
|
||||||
"example:android": "cd example && npm i && npm run build:android && npm run android",
|
"example:android": "cd example && npm i && npm run build:android && npm run android",
|
||||||
"reinstall": "yarn install && npm run clean && npm run prepare",
|
"reinstall": "yarn install && npm run clean && npm run prepare",
|
||||||
"cargo-build": "cd rust && cargo build && cd pubky && cargo build && cd pubky && cargo build && cd ../ && cd pubky-common && cargo build && cd ../ && cd pubky-homeserver && cargo build && cd ../../../",
|
"cargo-build": "yarn install && node setup-rust.js && cd rust && cargo build && cd pubky && cargo build && cd pubky && cargo build && cd ../ && cd pubky-common && cargo build && cd ../ && cd pubky-homeserver && cargo build && cd ../../../",
|
||||||
"update-bindings:ios": "npm run cargo-build && node setup-ios-bindings.js && npm run reinstall",
|
"update-local-bindings:ios": "npm run cargo-build && cd rust && ./build.sh ios && cd ../ && node setup-local-ios-bindings.js && npm run reinstall",
|
||||||
"update-bindings:android": "npm run cargo-build && node setup-android-bindings.js && npm run reinstall",
|
"update-local-bindings:android": "npm run cargo-build && cd rust && ./build.sh android && cd ../ && node setup-local-android-bindings.js && npm run reinstall",
|
||||||
"update-bindings": "npm run reinstall && npm run cargo-build && npm run update-bindings:ios && npm run update-bindings:android",
|
"update-local-bindings": "npm run reinstall && npm run cargo-build && npm run update-local-bindings:ios && npm run update-local-bindings:android",
|
||||||
"rebuild": "rm -rf node_modules && cd example && rm -rf node_modules && cd ios && rm -rf Pods Podfile.lock build && cd ../../ && npm run cargo-build && yarn install && npm run update-bindings && cd example && yarn install && bundle install && cd ios && pod install && cd ../ && yarn build:ios && yarn ios"
|
"rebuild-local": "rm -rf node_modules && cd example && rm -rf node_modules && cd ios && rm -rf Pods Podfile.lock build && cd ../../ && npm run cargo-build && yarn install && npm run update-local-bindings && cd example && yarn install && bundle install && cd ios && pod install && cd ../ && yarn build:ios && yarn ios",
|
||||||
|
"update-remote-bindings:ios": "node setup-remote-ios-bindings.js && npm run reinstall",
|
||||||
|
"update-remote-bindings:android": "node setup-remote-android-bindings.js && npm run reinstall",
|
||||||
|
"update-remote-bindings": "npm run reinstall && npm run update-remote-bindings:ios && npm run update-remote-bindings:android",
|
||||||
|
"rebuild-remote": "rm -rf node_modules && cd example && rm -rf node_modules && cd ios && rm -rf Pods Podfile.lock build && cd ../../ && yarn install && npm run update-remote-bindings && cd example && yarn install && bundle install && cd ios && pod install && cd ../ && yarn build:ios && yarn ios"
|
||||||
},
|
},
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"pubky",
|
"pubky",
|
||||||
@@ -69,7 +73,7 @@
|
|||||||
},
|
},
|
||||||
"homepage": "https://github.com/pubky/react-native-pubky#readme",
|
"homepage": "https://github.com/pubky/react-native-pubky#readme",
|
||||||
"publishConfig": {
|
"publishConfig": {
|
||||||
"access": "restricted",
|
"access": "public",
|
||||||
"registry": "https://registry.npmjs.org/"
|
"registry": "https://registry.npmjs.org/"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
@@ -93,6 +97,7 @@
|
|||||||
"react-native": "0.75.3",
|
"react-native": "0.75.3",
|
||||||
"react-native-builder-bob": "^0.30.2",
|
"react-native-builder-bob": "^0.30.2",
|
||||||
"release-it": "^15.0.0",
|
"release-it": "^15.0.0",
|
||||||
|
"simple-git": "^3.27.0",
|
||||||
"turbo": "^1.10.7",
|
"turbo": "^1.10.7",
|
||||||
"typescript": "^5.2.2"
|
"typescript": "^5.2.2"
|
||||||
},
|
},
|
||||||
|
|||||||
3140
rust/Cargo.lock
generated
3140
rust/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,31 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "react_native_pubky"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2021"
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
crate_type = ["cdylib", "staticlib"]
|
|
||||||
name = "pubkymobile"
|
|
||||||
|
|
||||||
[[bin]]
|
|
||||||
name = "testing"
|
|
||||||
path = "testing/main.rs"
|
|
||||||
|
|
||||||
[net]
|
|
||||||
git-fetch-with-cli = true
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
uniffi = { version = "0.25.3", features = [ "cli" ] }
|
|
||||||
serde_json = "1.0.114"
|
|
||||||
hex = "0.4.3"
|
|
||||||
sha2 = "0.10.8"
|
|
||||||
serde = { version = "^1.0.209", features = ["derive"] }
|
|
||||||
tokio = "1.40.0"
|
|
||||||
url = "2.5.2"
|
|
||||||
pkarr = { git = "https://github.com/Pubky/pkarr", branch = "v3", features = ["async", "rand"] }
|
|
||||||
pubky = { version = "0.1.0", path = "pubky/pubky" }
|
|
||||||
pubky-common = { version = "0.1.0", path = "pubky/pubky-common" }
|
|
||||||
pubky_homeserver = { version = "0.1.0", path = "pubky/pubky-homeserver" }
|
|
||||||
base64 = "0.22.1"
|
|
||||||
once_cell = "1.19.0"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
// This file was autogenerated by some hot garbage in the `uniffi` crate.
|
|
||||||
// Trust me, you don't want to mess with it!
|
|
||||||
module pubkymobileFFI {
|
|
||||||
header "pubkymobileFFI.h"
|
|
||||||
export *
|
|
||||||
}
|
|
||||||
2811
rust/pubky/Cargo.lock
generated
2811
rust/pubky/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,18 +0,0 @@
|
|||||||
[workspace]
|
|
||||||
members = [
|
|
||||||
"pubky",
|
|
||||||
"pubky-*",
|
|
||||||
|
|
||||||
"examples/authz/authenticator"
|
|
||||||
]
|
|
||||||
|
|
||||||
# See: https://github.com/rust-lang/rust/issues/90148#issuecomment-949194352
|
|
||||||
resolver = "2"
|
|
||||||
|
|
||||||
[workspace.dependencies]
|
|
||||||
pkarr = { git = "https://github.com/Pubky/pkarr", branch = "v3", package = "pkarr", features = ["async"] }
|
|
||||||
serde = { version = "^1.0.209", features = ["derive"] }
|
|
||||||
|
|
||||||
[profile.release]
|
|
||||||
lto = true
|
|
||||||
opt-level = 'z'
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
The MIT License (MIT)
|
|
||||||
|
|
||||||
Copyright (c) 2023
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in
|
|
||||||
all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
||||||
THE SOFTWARE.
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
# Pubky
|
|
||||||
|
|
||||||
> The Web, long centralized, must decentralize; Long decentralized, must centralize.
|
|
||||||
|
|
||||||
> [!WARNING]
|
|
||||||
> Pubky is still under heavy development and should be considered an alpha software.
|
|
||||||
>
|
|
||||||
> Features might be added, removed, or changed. Data might be lost.
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
# Logs
|
|
||||||
logs
|
|
||||||
*.log
|
|
||||||
npm-debug.log*
|
|
||||||
yarn-debug.log*
|
|
||||||
yarn-error.log*
|
|
||||||
pnpm-debug.log*
|
|
||||||
lerna-debug.log*
|
|
||||||
|
|
||||||
node_modules
|
|
||||||
dist
|
|
||||||
dist-ssr
|
|
||||||
*.local
|
|
||||||
|
|
||||||
# Editor directories and files
|
|
||||||
.vscode/*
|
|
||||||
!.vscode/extensions.json
|
|
||||||
.idea
|
|
||||||
.DS_Store
|
|
||||||
*.suo
|
|
||||||
*.ntvs*
|
|
||||||
*.njsproj
|
|
||||||
*.sln
|
|
||||||
*.sw?
|
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
<!doctype html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8" />
|
|
||||||
<link rel="icon" type="image/svg+xml" href="/pubky.svg" />
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
|
||||||
<title>Pubky Auth Demo</title>
|
|
||||||
<link rel="stylesheet" href="./src/index.css" />
|
|
||||||
<script type="module">
|
|
||||||
import "@synonymdev/pubky"
|
|
||||||
</script>
|
|
||||||
<script type="module" src="/src/pubky-auth-widget.js"></script>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<pubky-auth-widget
|
|
||||||
id="widget"
|
|
||||||
relay="https://demo.httprelay.io/link/"
|
|
||||||
>
|
|
||||||
</pubky-auth-widget>
|
|
||||||
|
|
||||||
<main>
|
|
||||||
<h1>Third Party app!</h1>
|
|
||||||
<p>this is a demo for using Pubky Auth in an unhosted (no backend) app.</p>
|
|
||||||
<form>
|
|
||||||
<label style="display:block">
|
|
||||||
<input type="checkbox" onChange="document.getElementById('widget').switchTestnet()">testnet (use local test network)</input>
|
|
||||||
</label>
|
|
||||||
<label style="display:block">
|
|
||||||
<input type="checkbox" onChange="let w = document.getElementById('widget'); w.caps.length > 0 ? w.setCapabilities(null) : w.setCapabilities('/pub/pubky.app/:rw,/pub/example.com/nested:rw')">Authz (Authorization, set if your pubky has an account on a Homeserver)</input>
|
|
||||||
</label>
|
|
||||||
</form>
|
|
||||||
</main>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
1136
rust/pubky/examples/authz/3rd-party-app/package-lock.json
generated
1136
rust/pubky/examples/authz/3rd-party-app/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,20 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "pubky-auth-3rd-party",
|
|
||||||
"private": true,
|
|
||||||
"version": "0.0.0",
|
|
||||||
"type": "module",
|
|
||||||
"scripts": {
|
|
||||||
"start": "npm run dev",
|
|
||||||
"dev": "vite --host --open",
|
|
||||||
"build": "vite build",
|
|
||||||
"preview": "vite preview"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"@synonymdev/pubky": "^0.1.16",
|
|
||||||
"lit": "^3.2.0",
|
|
||||||
"qrcode": "^1.5.4"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"vite": "^5.4.2"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
<svg xmlns="http://www.w3.org/2000/svg" version="1.2" viewBox="0 0 452 690">
|
|
||||||
<style>
|
|
||||||
path { fill: black; }
|
|
||||||
@media (prefers-color-scheme: dark) {
|
|
||||||
path { fill: white; }
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
<path fill-rule="evenodd" class="a" d="m0.1 84.7l80.5 17.1 15.8-74.5 73.8 44.2 54.7-71.5 55.2 71.5 70.3-44.2 19.4 74.5 81.6-17.1-74.5 121.5c-40.5-35.3-93.5-56.6-151.4-56.6-57.8 0-110.7 21.3-151.2 56.4zm398.4 293.8c0 40.6-14 78-37.4 107.4l67 203.8h-403.1l66.2-202.3c-24.1-29.7-38.6-67.6-38.6-108.9 0-95.5 77.4-172.8 173-172.8 95.5 0 172.9 77.3 172.9 172.8zm-212.9 82.4l-48.2 147.3h178.1l-48.6-148 2.9-1.6c28.2-15.6 47.3-45.6 47.3-80.1 0-50.5-41-91.4-91.5-91.4-50.6 0-91.6 40.9-91.6 91.4 0 35 19.7 65.4 48.6 80.8z"/>
|
|
||||||
</svg>
|
|
||||||
|
Before Width: | Height: | Size: 724 B |
@@ -1,48 +0,0 @@
|
|||||||
:root {
|
|
||||||
font-family: system-ui, -apple-system, BlinkMacSystemFont, 'Segoe UI',
|
|
||||||
Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif;
|
|
||||||
color: white;
|
|
||||||
|
|
||||||
background: radial-gradient(
|
|
||||||
circle,
|
|
||||||
transparent 20%,
|
|
||||||
#151718 20%,
|
|
||||||
#151718 80%,
|
|
||||||
transparent 80%,
|
|
||||||
transparent
|
|
||||||
),
|
|
||||||
radial-gradient(
|
|
||||||
circle,
|
|
||||||
transparent 20%,
|
|
||||||
#151718 20%,
|
|
||||||
#151718 80%,
|
|
||||||
transparent 80%,
|
|
||||||
transparent
|
|
||||||
)
|
|
||||||
25px 25px,
|
|
||||||
linear-gradient(#202020 1px, transparent 2px) 0 -1px,
|
|
||||||
linear-gradient(90deg, #202020 1px, #151718 2px) -1px 0;
|
|
||||||
background-size: 50px 50px, 50px 50px, 25px 25px, 25px 25px;
|
|
||||||
}
|
|
||||||
|
|
||||||
body {
|
|
||||||
margin: 0;
|
|
||||||
display: flex;
|
|
||||||
place-items: center;
|
|
||||||
min-width: 20rem;
|
|
||||||
min-height: 100vh;
|
|
||||||
font-family: var(--font-family);
|
|
||||||
}
|
|
||||||
|
|
||||||
h1 {
|
|
||||||
font-weight: bold;
|
|
||||||
font-size: 3.2rem;
|
|
||||||
line-height: 1.1;
|
|
||||||
}
|
|
||||||
|
|
||||||
main {
|
|
||||||
max-width: 80rem;
|
|
||||||
margin: 0 auto;
|
|
||||||
padding: 2rem;
|
|
||||||
text-align: center;
|
|
||||||
}
|
|
||||||
@@ -1,391 +0,0 @@
|
|||||||
import { LitElement, css, html } from 'lit'
|
|
||||||
import { createRef, ref } from 'lit/directives/ref.js';
|
|
||||||
import QRCode from 'qrcode'
|
|
||||||
|
|
||||||
const DEFAULT_HTTP_RELAY = "https://demo.httprelay.io/link"
|
|
||||||
|
|
||||||
/**
|
|
||||||
*/
|
|
||||||
export class PubkyAuthWidget extends LitElement {
|
|
||||||
|
|
||||||
static get properties() {
|
|
||||||
return {
|
|
||||||
// === Config ===
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Relay endpoint for the widget to receive Pubky AuthTokens
|
|
||||||
*
|
|
||||||
* Internally, a random channel ID will be generated and a
|
|
||||||
* GET request made for `${realy url}/${channelID}`
|
|
||||||
*
|
|
||||||
* If no relay is passed, the widget will use a default relay:
|
|
||||||
* https://demo.httprelay.io/link
|
|
||||||
*/
|
|
||||||
relay: { type: String },
|
|
||||||
/**
|
|
||||||
* Capabilities requested or this application encoded as a string.
|
|
||||||
*/
|
|
||||||
caps: { type: String },
|
|
||||||
|
|
||||||
// === State ===
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Widget's state (open or closed)
|
|
||||||
*/
|
|
||||||
open: { type: Boolean },
|
|
||||||
/**
|
|
||||||
* Show "copied to clipboard" note
|
|
||||||
*/
|
|
||||||
showCopied: { type: Boolean },
|
|
||||||
|
|
||||||
// === Internal ===
|
|
||||||
testnet: { type: Boolean },
|
|
||||||
pubky: { type: Object }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
canvasRef = createRef();
|
|
||||||
|
|
||||||
constructor() {
|
|
||||||
if (!window.pubky) {
|
|
||||||
throw new Error("window.pubky is unavailable, make sure to import `@synonymdev/pubky` before this web component.")
|
|
||||||
}
|
|
||||||
|
|
||||||
super()
|
|
||||||
|
|
||||||
this.testnet = false;
|
|
||||||
this.open = false;
|
|
||||||
|
|
||||||
/** @type {import("@synonymdev/pubky").PubkyClient} */
|
|
||||||
this.pubkyClient = new window.pubky.PubkyClient();
|
|
||||||
|
|
||||||
this.caps = this.caps || ""
|
|
||||||
}
|
|
||||||
|
|
||||||
connectedCallback() {
|
|
||||||
super.connectedCallback()
|
|
||||||
|
|
||||||
this._generateURL()
|
|
||||||
}
|
|
||||||
|
|
||||||
switchTestnet() {
|
|
||||||
this.testnet = !this.testnet;
|
|
||||||
|
|
||||||
console.debug("Switching testnet");
|
|
||||||
|
|
||||||
if (this.testnet) {
|
|
||||||
this.pubkyClient = window.pubky.PubkyClient.testnet()
|
|
||||||
} else {
|
|
||||||
this.pubkyClient = new window.pubky.PubkyClient();
|
|
||||||
}
|
|
||||||
|
|
||||||
console.debug("Pkarr Relays: " + this.pubkyClient.getPkarrRelays())
|
|
||||||
|
|
||||||
this._generateURL()
|
|
||||||
}
|
|
||||||
|
|
||||||
setCapabilities(caps) {
|
|
||||||
this.caps = caps || ""
|
|
||||||
|
|
||||||
this._generateURL(this.caps);
|
|
||||||
console.debug("Updated capabilities");
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
_generateURL() {
|
|
||||||
let [url, promise] = this.pubkyClient.authRequest(this.relay || DEFAULT_HTTP_RELAY, this.caps);
|
|
||||||
|
|
||||||
promise.then(pubky => {
|
|
||||||
this.pubky = pubky.z32();
|
|
||||||
}).catch(e => {
|
|
||||||
console.error(e)
|
|
||||||
})
|
|
||||||
|
|
||||||
this.authUrl = url
|
|
||||||
|
|
||||||
this._updateQr();
|
|
||||||
}
|
|
||||||
|
|
||||||
_updateQr() {
|
|
||||||
if (this.canvas) {
|
|
||||||
this._setQr(this.canvas);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
_setQr(canvas) {
|
|
||||||
this.canvas = canvas
|
|
||||||
QRCode.toCanvas(canvas, this.authUrl, {
|
|
||||||
margin: 2,
|
|
||||||
scale: 8,
|
|
||||||
|
|
||||||
color: {
|
|
||||||
light: '#fff',
|
|
||||||
dark: '#000',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
_switchOpen() {
|
|
||||||
this.open = !this.open
|
|
||||||
setTimeout(() => { this.pubky = null }, 80)
|
|
||||||
}
|
|
||||||
|
|
||||||
async _copyToClipboard() {
|
|
||||||
try {
|
|
||||||
await navigator.clipboard.writeText(this.authUrl);
|
|
||||||
this.showCopied = true;
|
|
||||||
setTimeout(() => { this.showCopied = false }, 1000)
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Failed to copy text: ', error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
render() {
|
|
||||||
return html`
|
|
||||||
<div
|
|
||||||
id="widget"
|
|
||||||
class=${this.open ? "open" : ""}
|
|
||||||
>
|
|
||||||
<button class="header" @click=${this._switchOpen}>
|
|
||||||
<div class="header-content">
|
|
||||||
<svg id="pubky-icon" xmlns="http://www.w3.org/2000/svg" version="1.2" viewBox="0 0 452 690">
|
|
||||||
<style>
|
|
||||||
path { fill: black; }
|
|
||||||
@media (prefers-color-scheme: dark) {
|
|
||||||
path { fill: white; }
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
<path fill-rule="evenodd" class="a" d="m0.1 84.7l80.5 17.1 15.8-74.5 73.8 44.2 54.7-71.5 55.2 71.5 70.3-44.2 19.4 74.5 81.6-17.1-74.5 121.5c-40.5-35.3-93.5-56.6-151.4-56.6-57.8 0-110.7 21.3-151.2 56.4zm398.4 293.8c0 40.6-14 78-37.4 107.4l67 203.8h-403.1l66.2-202.3c-24.1-29.7-38.6-67.6-38.6-108.9 0-95.5 77.4-172.8 173-172.8 95.5 0 172.9 77.3 172.9 172.8zm-212.9 82.4l-48.2 147.3h178.1l-48.6-148 2.9-1.6c28.2-15.6 47.3-45.6 47.3-80.1 0-50.5-41-91.4-91.5-91.4-50.6 0-91.6 40.9-91.6 91.4 0 35 19.7 65.4 48.6 80.8z"/>
|
|
||||||
</svg>
|
|
||||||
<span class="text">
|
|
||||||
Pubky Auth
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
</button>
|
|
||||||
<div class="line"></div>
|
|
||||||
<div id="widget-content">
|
|
||||||
${this.pubky
|
|
||||||
? this.caps.length > 0
|
|
||||||
? html`
|
|
||||||
<p>Successfully authorized: </p>
|
|
||||||
<p>${this.pubky}</p>
|
|
||||||
<p>With capabilities</p>
|
|
||||||
${this.caps.split(",").map(cap => html`
|
|
||||||
<p>${cap}</p>
|
|
||||||
`)
|
|
||||||
}
|
|
||||||
`
|
|
||||||
: html`
|
|
||||||
<p>Successfully authenticated to: </p>
|
|
||||||
<p>${this.pubky}</p>
|
|
||||||
`
|
|
||||||
: html`
|
|
||||||
<p>Scan or copy Pubky auth URL</p>
|
|
||||||
<div class="card">
|
|
||||||
<canvas id="qr" ${ref(this._setQr)}></canvas>
|
|
||||||
</div>
|
|
||||||
<button class="card url" @click=${this._copyToClipboard}>
|
|
||||||
<div class="copied ${this.showCopied ? "show" : ""}">Copied to Clipboard</div>
|
|
||||||
<p>${this.authUrl}</p>
|
|
||||||
<svg width="14" height="16" viewBox="0 0 14 16" fill="none" xmlns="http://www.w3.org/2000/svg"><rect width="10" height="12" rx="2" fill="white"></rect><rect x="3" y="3" width="10" height="12" rx="2" fill="white" stroke="#3B3B3B"></rect></svg>
|
|
||||||
</button>
|
|
||||||
`
|
|
||||||
}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
`
|
|
||||||
}
|
|
||||||
|
|
||||||
_renderWidgetContentBase() {
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
static get styles() {
|
|
||||||
return css`
|
|
||||||
* {
|
|
||||||
box-sizing: border-box;
|
|
||||||
}
|
|
||||||
|
|
||||||
:host {
|
|
||||||
--full-width: 22rem;
|
|
||||||
--full-height: 31rem;
|
|
||||||
--header-height: 3rem;
|
|
||||||
--closed-width: 3rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
a {
|
|
||||||
text-decoration: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
button {
|
|
||||||
padding: 0;
|
|
||||||
background: none;
|
|
||||||
border: none;
|
|
||||||
color: inherit;
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
|
|
||||||
p {
|
|
||||||
margin: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** End reset */
|
|
||||||
|
|
||||||
#widget {
|
|
||||||
color: white;
|
|
||||||
|
|
||||||
position: fixed;
|
|
||||||
top: 1rem;
|
|
||||||
right: 1rem;
|
|
||||||
|
|
||||||
background-color:red;
|
|
||||||
|
|
||||||
z-index: 99999;
|
|
||||||
overflow: hidden;
|
|
||||||
background: rgba(43, 43, 43, .7372549019607844);
|
|
||||||
border: 1px solid #3c3c3c;
|
|
||||||
box-shadow: 0 10px 34px -10px rgba(236, 243, 222, .05);
|
|
||||||
border-radius: 8px;
|
|
||||||
-webkit-backdrop-filter: blur(8px);
|
|
||||||
backdrop-filter: blur(8px);
|
|
||||||
|
|
||||||
width: var(--closed-width);
|
|
||||||
height: var(--header-height);
|
|
||||||
|
|
||||||
will-change: height,width;
|
|
||||||
transition-property: height, width;
|
|
||||||
transition-duration: 80ms;
|
|
||||||
transition-timing-function: ease-in;
|
|
||||||
}
|
|
||||||
|
|
||||||
#widget.open{
|
|
||||||
width: var(--full-width);
|
|
||||||
height: var(--full-height);
|
|
||||||
}
|
|
||||||
|
|
||||||
.header {
|
|
||||||
width: 100%;
|
|
||||||
height: var(--header-height);
|
|
||||||
display: flex;
|
|
||||||
justify-content: center;
|
|
||||||
align-items:center;
|
|
||||||
}
|
|
||||||
|
|
||||||
.header-content {
|
|
||||||
display: flex;
|
|
||||||
justify-content: center;
|
|
||||||
align-items: baseline;
|
|
||||||
column-gap: .5rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
#widget
|
|
||||||
.header .text {
|
|
||||||
display: none;
|
|
||||||
font-weight: bold;
|
|
||||||
font-size: 1.5rem;
|
|
||||||
}
|
|
||||||
#widget.open
|
|
||||||
.header .text {
|
|
||||||
display: block
|
|
||||||
}
|
|
||||||
|
|
||||||
#widget.open
|
|
||||||
.header {
|
|
||||||
width: var(--full-width);
|
|
||||||
justify-content: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
#pubky-icon {
|
|
||||||
height: 1.5rem;
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
#widget.open
|
|
||||||
#pubky-icon {
|
|
||||||
width: auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
#widget-content{
|
|
||||||
width: var(--full-width);
|
|
||||||
padding: 0 1rem
|
|
||||||
}
|
|
||||||
|
|
||||||
#widget p {
|
|
||||||
font-size: .87rem;
|
|
||||||
line-height: 1rem;
|
|
||||||
text-align: center;
|
|
||||||
color: #fff;
|
|
||||||
opacity: .5;
|
|
||||||
|
|
||||||
/* Fix flash wrap in open animation */
|
|
||||||
text-wrap: nowrap;
|
|
||||||
}
|
|
||||||
|
|
||||||
#qr {
|
|
||||||
width: 18em !important;
|
|
||||||
height: 18em !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
.card {
|
|
||||||
position: relative;
|
|
||||||
background: #3b3b3b;
|
|
||||||
border-radius: 5px;
|
|
||||||
padding: 1rem;
|
|
||||||
margin-top: 1rem;
|
|
||||||
display: flex;
|
|
||||||
justify-content: center;
|
|
||||||
align-items: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
.card.url {
|
|
||||||
padding: .625rem;
|
|
||||||
justify-content: space-between;
|
|
||||||
max-width:100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.url p {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
|
|
||||||
line-height: 1!important;
|
|
||||||
width: 93%;
|
|
||||||
overflow: hidden;
|
|
||||||
text-overflow: ellipsis;
|
|
||||||
text-wrap: nowrap;
|
|
||||||
}
|
|
||||||
|
|
||||||
.line {
|
|
||||||
height: 1px;
|
|
||||||
background-color: #3b3b3b;
|
|
||||||
flex: 1 1;
|
|
||||||
margin-bottom: 1rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.copied {
|
|
||||||
will-change: opacity;
|
|
||||||
transition-property: opacity;
|
|
||||||
transition-duration: 80ms;
|
|
||||||
transition-timing-function: ease-in;
|
|
||||||
|
|
||||||
opacity: 0;
|
|
||||||
|
|
||||||
position: absolute;
|
|
||||||
right: 0;
|
|
||||||
top: -1.6rem;
|
|
||||||
font-size: 0.9em;
|
|
||||||
background: rgb(43 43 43 / 98%);
|
|
||||||
padding: .5rem;
|
|
||||||
border-radius: .3rem;
|
|
||||||
color: #ddd;
|
|
||||||
}
|
|
||||||
|
|
||||||
.copied.show {
|
|
||||||
opacity:1
|
|
||||||
}
|
|
||||||
`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
window.customElements.define('pubky-auth-widget', PubkyAuthWidget)
|
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
# Pubky Auth Example
|
|
||||||
|
|
||||||
This example shows 3rd party authorization in Pubky.
|
|
||||||
|
|
||||||
It consists of 2 parts:
|
|
||||||
|
|
||||||
1. [3rd party app](./3rd-party-app): A web component showing the how to implement a Pubky Auth widget.
|
|
||||||
2. [Authenticator CLI](./authenticator): A CLI showing the authenticator (key chain) asking user for consent and generating the AuthToken.
|
|
||||||
|
|
||||||
## Usage
|
|
||||||
|
|
||||||
First you need to be running a local testnet Homeserver, in the root of this repo run
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cargo run --bin pubky_homeserver -- --testnet
|
|
||||||
```
|
|
||||||
|
|
||||||
Run the frontend of the 3rd party app
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd ./3rd-party-app
|
|
||||||
npm start
|
|
||||||
```
|
|
||||||
|
|
||||||
Copy the Pubky Auth URL from the frontend.
|
|
||||||
|
|
||||||
Finally run the CLI to paste the Pubky Auth in.
|
|
||||||
|
|
||||||
You should see the frontend reacting by showing the success of authorization and session details.
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "authenticator"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2021"
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
anyhow = "1.0.86"
|
|
||||||
base64 = "0.22.1"
|
|
||||||
clap = { version = "4.5.16", features = ["derive"] }
|
|
||||||
pubky = { version = "0.1.0", path = "../../../pubky" }
|
|
||||||
pubky-common = { version = "0.1.0", path = "../../../pubky-common" }
|
|
||||||
rpassword = "7.3.1"
|
|
||||||
tokio = { version = "1.40.0", features = ["macros", "rt-multi-thread"] }
|
|
||||||
url = "2.5.2"
|
|
||||||
@@ -1,80 +0,0 @@
|
|||||||
use anyhow::Result;
|
|
||||||
use clap::Parser;
|
|
||||||
use pubky::PubkyClient;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
use pubky_common::{capabilities::Capability, crypto::PublicKey};
|
|
||||||
|
|
||||||
/// local testnet HOMESERVER
|
|
||||||
const HOMESERVER: &str = "8pinxxgqs41n4aididenw5apqp1urfmzdztr8jt4abrkdn435ewo";
|
|
||||||
|
|
||||||
#[derive(Parser, Debug)]
|
|
||||||
#[command(version, about, long_about = None)]
|
|
||||||
struct Cli {
|
|
||||||
/// Path to a recovery_file of the Pubky you want to sign in with
|
|
||||||
recovery_file: PathBuf,
|
|
||||||
|
|
||||||
/// Pubky Auth url
|
|
||||||
url: Url,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::main]
|
|
||||||
async fn main() -> Result<()> {
|
|
||||||
let cli = Cli::parse();
|
|
||||||
|
|
||||||
let recovery_file = std::fs::read(&cli.recovery_file)?;
|
|
||||||
println!("\nSuccessfully opened recovery file");
|
|
||||||
|
|
||||||
let url = cli.url;
|
|
||||||
|
|
||||||
let caps = url
|
|
||||||
.query_pairs()
|
|
||||||
.filter_map(|(key, value)| {
|
|
||||||
if key == "caps" {
|
|
||||||
return Some(
|
|
||||||
value
|
|
||||||
.split(',')
|
|
||||||
.filter_map(|cap| Capability::try_from(cap).ok())
|
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
);
|
|
||||||
};
|
|
||||||
None
|
|
||||||
})
|
|
||||||
.next()
|
|
||||||
.unwrap_or_default();
|
|
||||||
|
|
||||||
if !caps.is_empty() {
|
|
||||||
println!("\nRequired Capabilities:");
|
|
||||||
}
|
|
||||||
|
|
||||||
for cap in &caps {
|
|
||||||
println!(" {} : {:?}", cap.scope, cap.actions);
|
|
||||||
}
|
|
||||||
|
|
||||||
// === Consent form ===
|
|
||||||
|
|
||||||
println!("\nEnter your recovery_file's passphrase to confirm:");
|
|
||||||
let passphrase = rpassword::read_password()?;
|
|
||||||
|
|
||||||
let keypair = pubky_common::recovery_file::decrypt_recovery_file(&recovery_file, &passphrase)?;
|
|
||||||
|
|
||||||
println!("Successfully decrypted recovery file...");
|
|
||||||
println!("PublicKey: {}", keypair.public_key());
|
|
||||||
|
|
||||||
let client = PubkyClient::testnet();
|
|
||||||
|
|
||||||
// For the purposes of this demo, we need to make sure
|
|
||||||
// the user has an account on the local homeserver.
|
|
||||||
if client.signin(&keypair).await.is_err() {
|
|
||||||
client
|
|
||||||
.signup(&keypair, &PublicKey::try_from(HOMESERVER).unwrap())
|
|
||||||
.await?;
|
|
||||||
};
|
|
||||||
|
|
||||||
println!("Sending AuthToken to the 3rd party app...");
|
|
||||||
|
|
||||||
client.send_auth_token(&keypair, url).await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
@@ -1,33 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "pubky-common"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2021"
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
base32 = "0.5.0"
|
|
||||||
blake3 = "1.5.1"
|
|
||||||
ed25519-dalek = "2.1.1"
|
|
||||||
once_cell = "1.19.0"
|
|
||||||
pkarr = { workspace = true }
|
|
||||||
rand = "0.8.5"
|
|
||||||
thiserror = "1.0.60"
|
|
||||||
postcard = { version = "1.0.8", features = ["alloc"] }
|
|
||||||
crypto_secretbox = { version = "0.1.1", features = ["std"] }
|
|
||||||
argon2 = { version = "0.5.3", features = ["std"] }
|
|
||||||
|
|
||||||
serde = { workspace = true, optional = true }
|
|
||||||
|
|
||||||
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
|
||||||
js-sys = "0.3.69"
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
postcard = "1.0.8"
|
|
||||||
|
|
||||||
[features]
|
|
||||||
|
|
||||||
serde = ["dep:serde", "ed25519-dalek/serde", "pkarr/serde"]
|
|
||||||
full = ['serde']
|
|
||||||
|
|
||||||
default = ['full']
|
|
||||||
@@ -1,279 +0,0 @@
|
|||||||
//! Client-server Authentication using signed timesteps
|
|
||||||
|
|
||||||
use std::sync::{Arc, Mutex};
|
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
capabilities::{Capabilities, Capability},
|
|
||||||
crypto::{Keypair, PublicKey, Signature},
|
|
||||||
namespaces::PUBKY_AUTH,
|
|
||||||
timestamp::Timestamp,
|
|
||||||
};
|
|
||||||
|
|
||||||
// 30 seconds
|
|
||||||
const TIME_INTERVAL: u64 = 30 * 1_000_000;
|
|
||||||
|
|
||||||
const CURRENT_VERSION: u8 = 0;
|
|
||||||
// 45 seconds in the past or the future
|
|
||||||
const TIMESTAMP_WINDOW: i64 = 45 * 1_000_000;
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Serialize, Deserialize)]
|
|
||||||
pub struct AuthToken {
|
|
||||||
/// Signature over the token.
|
|
||||||
signature: Signature,
|
|
||||||
/// A namespace to ensure this signature can't be used for any
|
|
||||||
/// other purposes that share the same message structurea by accident.
|
|
||||||
namespace: [u8; 10],
|
|
||||||
/// Version of the [AuthToken], in case we need to upgrade it to support unforseen usecases.
|
|
||||||
///
|
|
||||||
/// Version 0:
|
|
||||||
/// - Signer is implicitly the same as the root keypair for
|
|
||||||
/// the [AuthToken::pubky], without any delegation.
|
|
||||||
/// - Capabilities are only meant for resoucres on the homeserver.
|
|
||||||
version: u8,
|
|
||||||
/// Timestamp
|
|
||||||
timestamp: Timestamp,
|
|
||||||
/// The [PublicKey] of the owner of the resources being accessed by this token.
|
|
||||||
pubky: PublicKey,
|
|
||||||
// Variable length capabilities
|
|
||||||
capabilities: Capabilities,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AuthToken {
|
|
||||||
pub fn sign(keypair: &Keypair, capabilities: impl Into<Capabilities>) -> Self {
|
|
||||||
let timestamp = Timestamp::now();
|
|
||||||
|
|
||||||
let mut token = Self {
|
|
||||||
signature: Signature::from_bytes(&[0; 64]),
|
|
||||||
namespace: *PUBKY_AUTH,
|
|
||||||
version: 0,
|
|
||||||
timestamp,
|
|
||||||
pubky: keypair.public_key(),
|
|
||||||
capabilities: capabilities.into(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let serialized = token.serialize();
|
|
||||||
|
|
||||||
token.signature = keypair.sign(&serialized[65..]);
|
|
||||||
|
|
||||||
token
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn capabilities(&self) -> &[Capability] {
|
|
||||||
&self.capabilities.0
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn verify(bytes: &[u8]) -> Result<Self, Error> {
|
|
||||||
if bytes[75] > CURRENT_VERSION {
|
|
||||||
return Err(Error::UnknownVersion);
|
|
||||||
}
|
|
||||||
|
|
||||||
let token = AuthToken::deserialize(bytes)?;
|
|
||||||
|
|
||||||
match token.version {
|
|
||||||
0 => {
|
|
||||||
let now = Timestamp::now();
|
|
||||||
|
|
||||||
// Chcek timestamp;
|
|
||||||
let diff = token.timestamp.difference(&now);
|
|
||||||
if diff > TIMESTAMP_WINDOW {
|
|
||||||
return Err(Error::TooFarInTheFuture);
|
|
||||||
}
|
|
||||||
if diff < -TIMESTAMP_WINDOW {
|
|
||||||
return Err(Error::Expired);
|
|
||||||
}
|
|
||||||
|
|
||||||
token
|
|
||||||
.pubky
|
|
||||||
.verify(AuthToken::signable(token.version, bytes), &token.signature)
|
|
||||||
.map_err(|_| Error::InvalidSignature)?;
|
|
||||||
|
|
||||||
Ok(token)
|
|
||||||
}
|
|
||||||
_ => unreachable!(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
|
||||||
postcard::to_allocvec(self).unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deserialize(bytes: &[u8]) -> Result<Self, Error> {
|
|
||||||
Ok(postcard::from_bytes(bytes)?)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn pubky(&self) -> &PublicKey {
|
|
||||||
&self.pubky
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A unique ID for this [AuthToken], which is a concatenation of
|
|
||||||
/// [AuthToken::pubky] and [AuthToken::timestamp].
|
|
||||||
///
|
|
||||||
/// Assuming that [AuthToken::timestamp] is unique for every [AuthToken::pubky].
|
|
||||||
fn id(version: u8, bytes: &[u8]) -> Box<[u8]> {
|
|
||||||
match version {
|
|
||||||
0 => bytes[75..115].into(),
|
|
||||||
_ => unreachable!(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signable(version: u8, bytes: &[u8]) -> &[u8] {
|
|
||||||
match version {
|
|
||||||
0 => bytes[65..].into(),
|
|
||||||
_ => unreachable!(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Default)]
|
|
||||||
/// Keeps track of used AuthToken until they expire.
|
|
||||||
pub struct AuthVerifier {
|
|
||||||
seen: Arc<Mutex<Vec<Box<[u8]>>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AuthVerifier {
|
|
||||||
pub fn verify(&self, bytes: &[u8]) -> Result<AuthToken, Error> {
|
|
||||||
self.gc();
|
|
||||||
|
|
||||||
let token = AuthToken::verify(bytes)?;
|
|
||||||
|
|
||||||
let mut seen = self.seen.lock().unwrap();
|
|
||||||
|
|
||||||
let id = AuthToken::id(token.version, bytes);
|
|
||||||
|
|
||||||
match seen.binary_search_by(|element| element.cmp(&id)) {
|
|
||||||
Ok(_) => Err(Error::AlreadyUsed),
|
|
||||||
Err(index) => {
|
|
||||||
seen.insert(index, id);
|
|
||||||
Ok(token)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// === Private Methods ===
|
|
||||||
|
|
||||||
/// Remove all tokens older than two time intervals in the past.
|
|
||||||
fn gc(&self) {
|
|
||||||
let threshold = ((Timestamp::now().into_inner() / TIME_INTERVAL) - 2).to_be_bytes();
|
|
||||||
|
|
||||||
let mut inner = self.seen.lock().unwrap();
|
|
||||||
|
|
||||||
match inner.binary_search_by(|element| element[0..8].cmp(&threshold)) {
|
|
||||||
Ok(index) | Err(index) => {
|
|
||||||
inner.drain(0..index);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(thiserror::Error, Debug, PartialEq, Eq)]
|
|
||||||
pub enum Error {
|
|
||||||
#[error("Unknown version")]
|
|
||||||
UnknownVersion,
|
|
||||||
#[error("AuthToken has a timestamp that is more than 45 seconds in the future")]
|
|
||||||
TooFarInTheFuture,
|
|
||||||
#[error("AuthToken has a timestamp that is more than 45 seconds in the past")]
|
|
||||||
Expired,
|
|
||||||
#[error("Invalid Signature")]
|
|
||||||
InvalidSignature,
|
|
||||||
#[error(transparent)]
|
|
||||||
Postcard(#[from] postcard::Error),
|
|
||||||
#[error("AuthToken already used")]
|
|
||||||
AlreadyUsed,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use crate::{
|
|
||||||
auth::TIMESTAMP_WINDOW, capabilities::Capability, crypto::Keypair, timestamp::Timestamp,
|
|
||||||
};
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn v0_id_signable() {
|
|
||||||
let signer = Keypair::random();
|
|
||||||
let capabilities = vec![Capability::root()];
|
|
||||||
|
|
||||||
let token = AuthToken::sign(&signer, capabilities.clone());
|
|
||||||
|
|
||||||
let serialized = &token.serialize();
|
|
||||||
|
|
||||||
let mut id = vec![];
|
|
||||||
id.extend_from_slice(&token.timestamp.to_bytes());
|
|
||||||
id.extend_from_slice(signer.public_key().as_bytes());
|
|
||||||
|
|
||||||
assert_eq!(AuthToken::id(token.version, serialized), id.into());
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
AuthToken::signable(token.version, serialized),
|
|
||||||
&serialized[65..]
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn sign_verify() {
|
|
||||||
let signer = Keypair::random();
|
|
||||||
let capabilities = vec![Capability::root()];
|
|
||||||
|
|
||||||
let verifier = AuthVerifier::default();
|
|
||||||
|
|
||||||
let token = AuthToken::sign(&signer, capabilities.clone());
|
|
||||||
|
|
||||||
let serialized = &token.serialize();
|
|
||||||
|
|
||||||
verifier.verify(serialized).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(token.capabilities, capabilities.into());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn expired() {
|
|
||||||
let signer = Keypair::random();
|
|
||||||
let capabilities = Capabilities(vec![Capability::root()]);
|
|
||||||
|
|
||||||
let verifier = AuthVerifier::default();
|
|
||||||
|
|
||||||
let timestamp = (&Timestamp::now()) - (TIMESTAMP_WINDOW as u64);
|
|
||||||
|
|
||||||
let mut signable = vec![];
|
|
||||||
signable.extend_from_slice(signer.public_key().as_bytes());
|
|
||||||
signable.extend_from_slice(&postcard::to_allocvec(&capabilities).unwrap());
|
|
||||||
|
|
||||||
let signature = signer.sign(&signable);
|
|
||||||
|
|
||||||
let token = AuthToken {
|
|
||||||
signature,
|
|
||||||
namespace: *PUBKY_AUTH,
|
|
||||||
version: 0,
|
|
||||||
timestamp,
|
|
||||||
pubky: signer.public_key(),
|
|
||||||
capabilities,
|
|
||||||
};
|
|
||||||
|
|
||||||
let serialized = token.serialize();
|
|
||||||
|
|
||||||
let result = verifier.verify(&serialized);
|
|
||||||
|
|
||||||
assert_eq!(result, Err(Error::Expired));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn already_used() {
|
|
||||||
let signer = Keypair::random();
|
|
||||||
let capabilities = vec![Capability::root()];
|
|
||||||
|
|
||||||
let verifier = AuthVerifier::default();
|
|
||||||
|
|
||||||
let token = AuthToken::sign(&signer, capabilities.clone());
|
|
||||||
|
|
||||||
let serialized = &token.serialize();
|
|
||||||
|
|
||||||
verifier.verify(serialized).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(token.capabilities, capabilities.into());
|
|
||||||
|
|
||||||
assert_eq!(verifier.verify(serialized), Err(Error::AlreadyUsed));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,237 +0,0 @@
|
|||||||
use std::fmt::Display;
|
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
|
||||||
pub struct Capability {
|
|
||||||
pub scope: String,
|
|
||||||
pub actions: Vec<Action>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Capability {
|
|
||||||
/// Create a root [Capability] at the `/` path with all the available [PubkyAbility]
|
|
||||||
pub fn root() -> Self {
|
|
||||||
Capability {
|
|
||||||
scope: "/".to_string(),
|
|
||||||
actions: vec![Action::Read, Action::Write],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
|
||||||
pub enum Action {
|
|
||||||
/// Can read the scope at the specified path (GET requests).
|
|
||||||
Read,
|
|
||||||
/// Can write to the scope at the specified path (PUT/POST/DELETE requests).
|
|
||||||
Write,
|
|
||||||
/// Unknown ability
|
|
||||||
Unknown(char),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&Action> for char {
|
|
||||||
fn from(value: &Action) -> Self {
|
|
||||||
match value {
|
|
||||||
Action::Read => 'r',
|
|
||||||
Action::Write => 'w',
|
|
||||||
Action::Unknown(char) => char.to_owned(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<char> for Action {
|
|
||||||
type Error = Error;
|
|
||||||
|
|
||||||
fn try_from(value: char) -> Result<Self, Error> {
|
|
||||||
match value {
|
|
||||||
'r' => Ok(Self::Read),
|
|
||||||
'w' => Ok(Self::Write),
|
|
||||||
_ => Err(Error::InvalidAction),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Display for Capability {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
write!(
|
|
||||||
f,
|
|
||||||
"{}:{}",
|
|
||||||
self.scope,
|
|
||||||
self.actions.iter().map(char::from).collect::<String>()
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<String> for Capability {
|
|
||||||
type Error = Error;
|
|
||||||
|
|
||||||
fn try_from(value: String) -> Result<Self, Error> {
|
|
||||||
value.as_str().try_into()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<&str> for Capability {
|
|
||||||
type Error = Error;
|
|
||||||
|
|
||||||
fn try_from(value: &str) -> Result<Self, Error> {
|
|
||||||
if value.matches(':').count() != 1 {
|
|
||||||
return Err(Error::InvalidFormat);
|
|
||||||
}
|
|
||||||
|
|
||||||
if !value.starts_with('/') {
|
|
||||||
return Err(Error::InvalidScope);
|
|
||||||
}
|
|
||||||
|
|
||||||
let actions_str = value.rsplit(':').next().unwrap_or("");
|
|
||||||
|
|
||||||
let mut actions = Vec::new();
|
|
||||||
|
|
||||||
for char in actions_str.chars() {
|
|
||||||
let ability = Action::try_from(char)?;
|
|
||||||
|
|
||||||
match actions.binary_search_by(|element| char::from(element).cmp(&char)) {
|
|
||||||
Ok(_) => {}
|
|
||||||
Err(index) => {
|
|
||||||
actions.insert(index, ability);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let scope = value[0..value.len() - actions_str.len() - 1].to_string();
|
|
||||||
|
|
||||||
Ok(Capability { scope, actions })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Serialize for Capability {
|
|
||||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
|
||||||
where
|
|
||||||
S: serde::Serializer,
|
|
||||||
{
|
|
||||||
let string = self.to_string();
|
|
||||||
|
|
||||||
string.serialize(serializer)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'de> Deserialize<'de> for Capability {
|
|
||||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
|
||||||
where
|
|
||||||
D: serde::Deserializer<'de>,
|
|
||||||
{
|
|
||||||
let string: String = Deserialize::deserialize(deserializer)?;
|
|
||||||
|
|
||||||
string.try_into().map_err(serde::de::Error::custom)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(thiserror::Error, Debug, PartialEq, Eq)]
|
|
||||||
pub enum Error {
|
|
||||||
#[error("Capability: Invalid scope: does not start with `/`")]
|
|
||||||
InvalidScope,
|
|
||||||
#[error("Capability: Invalid format should be <scope>:<abilities>")]
|
|
||||||
InvalidFormat,
|
|
||||||
#[error("Capability: Invalid Action")]
|
|
||||||
InvalidAction,
|
|
||||||
#[error("Capabilities: Invalid capabilities format")]
|
|
||||||
InvalidCapabilities,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Default, Debug, PartialEq, Eq)]
|
|
||||||
/// A wrapper around `Vec<Capability>` to enable serialization without
|
|
||||||
/// a varint. Useful when [Capabilities] are at the end of a struct.
|
|
||||||
pub struct Capabilities(pub Vec<Capability>);
|
|
||||||
|
|
||||||
impl Capabilities {
|
|
||||||
pub fn contains(&self, capability: &Capability) -> bool {
|
|
||||||
self.0.contains(capability)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<Vec<Capability>> for Capabilities {
|
|
||||||
fn from(value: Vec<Capability>) -> Self {
|
|
||||||
Self(value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<Capabilities> for Vec<Capability> {
|
|
||||||
fn from(value: Capabilities) -> Self {
|
|
||||||
value.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<&str> for Capabilities {
|
|
||||||
type Error = Error;
|
|
||||||
|
|
||||||
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
|
||||||
let mut caps = vec![];
|
|
||||||
|
|
||||||
for s in value.split(',') {
|
|
||||||
if let Ok(cap) = Capability::try_from(s) {
|
|
||||||
caps.push(cap);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Capabilities(caps))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Display for Capabilities {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
let string = self
|
|
||||||
.0
|
|
||||||
.iter()
|
|
||||||
.map(|c| c.to_string())
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join(",");
|
|
||||||
|
|
||||||
write!(f, "{}", string)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Serialize for Capabilities {
|
|
||||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
|
||||||
where
|
|
||||||
S: serde::Serializer,
|
|
||||||
{
|
|
||||||
self.to_string().serialize(serializer)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'de> Deserialize<'de> for Capabilities {
|
|
||||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
|
||||||
where
|
|
||||||
D: serde::Deserializer<'de>,
|
|
||||||
{
|
|
||||||
let string: String = Deserialize::deserialize(deserializer)?;
|
|
||||||
|
|
||||||
let mut caps = vec![];
|
|
||||||
|
|
||||||
for s in string.split(',') {
|
|
||||||
if let Ok(cap) = Capability::try_from(s) {
|
|
||||||
caps.push(cap);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Capabilities(caps))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn pubky_caps() {
|
|
||||||
let cap = Capability {
|
|
||||||
scope: "/pub/pubky.app/".to_string(),
|
|
||||||
actions: vec![Action::Read, Action::Write],
|
|
||||||
};
|
|
||||||
|
|
||||||
// Read and write withing directory `/pub/pubky.app/`.
|
|
||||||
let expected_string = "/pub/pubky.app/:rw";
|
|
||||||
|
|
||||||
assert_eq!(cap.to_string(), expected_string);
|
|
||||||
|
|
||||||
assert_eq!(Capability::try_from(expected_string), Ok(cap))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,71 +0,0 @@
|
|||||||
use crypto_secretbox::{
|
|
||||||
aead::{Aead, AeadCore, KeyInit, OsRng},
|
|
||||||
XSalsa20Poly1305,
|
|
||||||
};
|
|
||||||
use rand::prelude::Rng;
|
|
||||||
|
|
||||||
pub use pkarr::{Keypair, PublicKey};
|
|
||||||
|
|
||||||
pub use ed25519_dalek::Signature;
|
|
||||||
|
|
||||||
pub type Hash = blake3::Hash;
|
|
||||||
|
|
||||||
pub use blake3::hash;
|
|
||||||
|
|
||||||
pub use blake3::Hasher;
|
|
||||||
|
|
||||||
pub fn random_hash() -> Hash {
|
|
||||||
let mut rng = rand::thread_rng();
|
|
||||||
Hash::from_bytes(rng.gen())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn random_bytes<const N: usize>() -> [u8; N] {
|
|
||||||
let mut rng = rand::thread_rng();
|
|
||||||
let mut arr = [0u8; N];
|
|
||||||
|
|
||||||
#[allow(clippy::needless_range_loop)]
|
|
||||||
for i in 0..N {
|
|
||||||
arr[i] = rng.gen();
|
|
||||||
}
|
|
||||||
arr
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn encrypt(plain_text: &[u8], encryption_key: &[u8; 32]) -> Result<Vec<u8>, Error> {
|
|
||||||
let cipher = XSalsa20Poly1305::new(encryption_key.into());
|
|
||||||
let nonce = XSalsa20Poly1305::generate_nonce(&mut OsRng); // unique per message
|
|
||||||
let ciphertext = cipher.encrypt(&nonce, plain_text)?;
|
|
||||||
|
|
||||||
let mut out: Vec<u8> = Vec::with_capacity(nonce.len() + ciphertext.len());
|
|
||||||
out.extend_from_slice(nonce.as_slice());
|
|
||||||
out.extend_from_slice(&ciphertext);
|
|
||||||
|
|
||||||
Ok(out)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn decrypt(bytes: &[u8], encryption_key: &[u8; 32]) -> Result<Vec<u8>, Error> {
|
|
||||||
let cipher = XSalsa20Poly1305::new(encryption_key.into());
|
|
||||||
|
|
||||||
Ok(cipher.decrypt(bytes[..24].into(), &bytes[24..])?)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(thiserror::Error, Debug)]
|
|
||||||
pub enum Error {
|
|
||||||
#[error(transparent)]
|
|
||||||
SecretBox(#[from] crypto_secretbox::Error),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn encrypt_decrypt() {
|
|
||||||
let plain_text = "Plain text!";
|
|
||||||
let encryption_key = [0; 32];
|
|
||||||
|
|
||||||
let encrypted = encrypt(plain_text.as_bytes(), &encryption_key).unwrap();
|
|
||||||
let decrypted = decrypt(&encrypted, &encryption_key).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(decrypted, plain_text.as_bytes())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
pub mod auth;
|
|
||||||
pub mod capabilities;
|
|
||||||
pub mod crypto;
|
|
||||||
pub mod namespaces;
|
|
||||||
pub mod recovery_file;
|
|
||||||
pub mod session;
|
|
||||||
pub mod timestamp;
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
pub const PUBKY_AUTH: &[u8; 10] = b"PUBKY:AUTH";
|
|
||||||
@@ -1,102 +0,0 @@
|
|||||||
use argon2::Argon2;
|
|
||||||
use pkarr::Keypair;
|
|
||||||
|
|
||||||
use crate::crypto::{decrypt, encrypt};
|
|
||||||
|
|
||||||
static SPEC_NAME: &str = "recovery";
|
|
||||||
static SPEC_LINE: &str = "pubky.org/recovery";
|
|
||||||
|
|
||||||
pub fn decrypt_recovery_file(recovery_file: &[u8], passphrase: &str) -> Result<Keypair, Error> {
|
|
||||||
let encryption_key = recovery_file_encryption_key_from_passphrase(passphrase)?;
|
|
||||||
|
|
||||||
let newline_index = recovery_file
|
|
||||||
.iter()
|
|
||||||
.position(|&r| r == 10)
|
|
||||||
.ok_or(())
|
|
||||||
.map_err(|_| Error::RecoveryFileMissingSpecLine)?;
|
|
||||||
|
|
||||||
let spec_line = &recovery_file[..newline_index];
|
|
||||||
|
|
||||||
if !(spec_line.starts_with(SPEC_LINE.as_bytes())
|
|
||||||
|| spec_line.starts_with(b"pkarr.org/recovery"))
|
|
||||||
{
|
|
||||||
return Err(Error::RecoveryFileVersionNotSupported);
|
|
||||||
}
|
|
||||||
|
|
||||||
let encrypted = &recovery_file[newline_index + 1..];
|
|
||||||
|
|
||||||
if encrypted.is_empty() {
|
|
||||||
return Err(Error::RecoverFileMissingEncryptedSecretKey);
|
|
||||||
};
|
|
||||||
|
|
||||||
let decrypted = decrypt(encrypted, &encryption_key)?;
|
|
||||||
let length = decrypted.len();
|
|
||||||
let secret_key: [u8; 32] = decrypted
|
|
||||||
.try_into()
|
|
||||||
.map_err(|_| Error::RecoverFileInvalidSecretKeyLength(length))?;
|
|
||||||
|
|
||||||
Ok(Keypair::from_secret_key(&secret_key))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn create_recovery_file(keypair: &Keypair, passphrase: &str) -> Result<Vec<u8>, Error> {
|
|
||||||
let encryption_key = recovery_file_encryption_key_from_passphrase(passphrase)?;
|
|
||||||
let secret_key = keypair.secret_key();
|
|
||||||
|
|
||||||
let encrypted_secret_key = encrypt(&secret_key, &encryption_key)?;
|
|
||||||
|
|
||||||
let mut out = Vec::with_capacity(SPEC_LINE.len() + 1 + encrypted_secret_key.len());
|
|
||||||
|
|
||||||
out.extend_from_slice(SPEC_LINE.as_bytes());
|
|
||||||
out.extend_from_slice(b"\n");
|
|
||||||
out.extend_from_slice(&encrypted_secret_key);
|
|
||||||
|
|
||||||
Ok(out)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn recovery_file_encryption_key_from_passphrase(passphrase: &str) -> Result<[u8; 32], Error> {
|
|
||||||
let argon2id = Argon2::default();
|
|
||||||
|
|
||||||
let mut out = [0; 32];
|
|
||||||
|
|
||||||
argon2id.hash_password_into(passphrase.as_bytes(), SPEC_NAME.as_bytes(), &mut out)?;
|
|
||||||
|
|
||||||
Ok(out)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(thiserror::Error, Debug)]
|
|
||||||
pub enum Error {
|
|
||||||
// === Recovery file ==
|
|
||||||
#[error("Recovery file should start with a spec line, followed by a new line character")]
|
|
||||||
RecoveryFileMissingSpecLine,
|
|
||||||
|
|
||||||
#[error("Recovery file should start with a spec line, followed by a new line character")]
|
|
||||||
RecoveryFileVersionNotSupported,
|
|
||||||
|
|
||||||
#[error("Recovery file should contain an encrypted secret key after the new line character")]
|
|
||||||
RecoverFileMissingEncryptedSecretKey,
|
|
||||||
|
|
||||||
#[error("Recovery file encrypted secret key should be 32 bytes, got {0}")]
|
|
||||||
RecoverFileInvalidSecretKeyLength(usize),
|
|
||||||
|
|
||||||
#[error(transparent)]
|
|
||||||
Argon(#[from] argon2::Error),
|
|
||||||
|
|
||||||
#[error(transparent)]
|
|
||||||
Crypto(#[from] crate::crypto::Error),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn encrypt_decrypt_recovery_file() {
|
|
||||||
let passphrase = "very secure password";
|
|
||||||
let keypair = Keypair::random();
|
|
||||||
|
|
||||||
let recovery_file = create_recovery_file(&keypair, passphrase).unwrap();
|
|
||||||
let recovered = decrypt_recovery_file(&recovery_file, passphrase).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(recovered.public_key(), keypair.public_key());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,139 +0,0 @@
|
|||||||
use pkarr::PublicKey;
|
|
||||||
use postcard::{from_bytes, to_allocvec};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
extern crate alloc;
|
|
||||||
use alloc::vec::Vec;
|
|
||||||
|
|
||||||
use crate::{auth::AuthToken, capabilities::Capability, timestamp::Timestamp};
|
|
||||||
|
|
||||||
// TODO: add IP address?
|
|
||||||
// TODO: use https://crates.io/crates/user-agent-parser to parse the session
|
|
||||||
// and get more informations from the user-agent.
|
|
||||||
#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
|
|
||||||
pub struct Session {
|
|
||||||
version: usize,
|
|
||||||
pubky: PublicKey,
|
|
||||||
created_at: u64,
|
|
||||||
/// User specified name, defaults to the user-agent.
|
|
||||||
name: String,
|
|
||||||
user_agent: String,
|
|
||||||
capabilities: Vec<Capability>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Session {
|
|
||||||
pub fn new(token: &AuthToken, user_agent: Option<String>) -> Self {
|
|
||||||
Self {
|
|
||||||
version: 0,
|
|
||||||
pubky: token.pubky().to_owned(),
|
|
||||||
created_at: Timestamp::now().into_inner(),
|
|
||||||
capabilities: token.capabilities().to_vec(),
|
|
||||||
user_agent: user_agent.as_deref().unwrap_or("").to_string(),
|
|
||||||
name: user_agent.as_deref().unwrap_or("").to_string(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// === Getters ===
|
|
||||||
|
|
||||||
pub fn pubky(&self) -> &PublicKey {
|
|
||||||
&self.pubky
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn capabilities(&self) -> &Vec<Capability> {
|
|
||||||
&self.capabilities
|
|
||||||
}
|
|
||||||
|
|
||||||
// === Setters ===
|
|
||||||
|
|
||||||
pub fn set_user_agent(&mut self, user_agent: String) -> &mut Self {
|
|
||||||
self.user_agent = user_agent;
|
|
||||||
|
|
||||||
if self.name.is_empty() {
|
|
||||||
self.name.clone_from(&self.user_agent)
|
|
||||||
}
|
|
||||||
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set_capabilities(&mut self, capabilities: Vec<Capability>) -> &mut Self {
|
|
||||||
self.capabilities = capabilities;
|
|
||||||
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
// === Public Methods ===
|
|
||||||
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
|
||||||
to_allocvec(self).expect("Session::serialize")
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deserialize(bytes: &[u8]) -> Result<Self> {
|
|
||||||
if bytes.is_empty() {
|
|
||||||
return Err(Error::EmptyPayload);
|
|
||||||
}
|
|
||||||
|
|
||||||
if bytes[0] > 0 {
|
|
||||||
return Err(Error::UnknownVersion);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(from_bytes(bytes)?)
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: add `can_read()`, `can_write()` and `is_root()` methods
|
|
||||||
}
|
|
||||||
|
|
||||||
pub type Result<T> = core::result::Result<T, Error>;
|
|
||||||
|
|
||||||
#[derive(thiserror::Error, Debug, PartialEq)]
|
|
||||||
pub enum Error {
|
|
||||||
#[error("Empty payload")]
|
|
||||||
EmptyPayload,
|
|
||||||
#[error("Unknown version")]
|
|
||||||
UnknownVersion,
|
|
||||||
#[error(transparent)]
|
|
||||||
Postcard(#[from] postcard::Error),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use crate::crypto::Keypair;
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn serialize() {
|
|
||||||
let keypair = Keypair::from_secret_key(&[0; 32]);
|
|
||||||
let pubky = keypair.public_key();
|
|
||||||
|
|
||||||
let session = Session {
|
|
||||||
user_agent: "foo".to_string(),
|
|
||||||
capabilities: vec![Capability::root()],
|
|
||||||
created_at: 0,
|
|
||||||
pubky,
|
|
||||||
version: 0,
|
|
||||||
name: "".to_string(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let serialized = session.serialize();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
serialized,
|
|
||||||
[
|
|
||||||
0, 59, 106, 39, 188, 206, 182, 164, 45, 98, 163, 168, 208, 42, 111, 13, 115, 101,
|
|
||||||
50, 21, 119, 29, 226, 67, 166, 58, 192, 72, 161, 139, 89, 218, 41, 0, 0, 3, 102,
|
|
||||||
111, 111, 1, 4, 47, 58, 114, 119
|
|
||||||
]
|
|
||||||
);
|
|
||||||
|
|
||||||
let deseiralized = Session::deserialize(&serialized).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(deseiralized, session)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn deserialize() {
|
|
||||||
let result = Session::deserialize(&[]);
|
|
||||||
|
|
||||||
assert_eq!(result, Err(Error::EmptyPayload));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,280 +0,0 @@
|
|||||||
//! Strictly monotonic unix timestamp in microseconds
|
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::fmt::Display;
|
|
||||||
use std::{
|
|
||||||
ops::{Add, Sub},
|
|
||||||
sync::Mutex,
|
|
||||||
};
|
|
||||||
|
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use rand::Rng;
|
|
||||||
|
|
||||||
#[cfg(not(target_arch = "wasm32"))]
|
|
||||||
use std::time::SystemTime;
|
|
||||||
|
|
||||||
/// ~4% chance of none of 10 clocks have matching id.
|
|
||||||
const CLOCK_MASK: u64 = (1 << 8) - 1;
|
|
||||||
const TIME_MASK: u64 = !0 >> 8;
|
|
||||||
|
|
||||||
pub struct TimestampFactory {
|
|
||||||
clock_id: u64,
|
|
||||||
last_time: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TimestampFactory {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
clock_id: rand::thread_rng().gen::<u64>() & CLOCK_MASK,
|
|
||||||
last_time: system_time() & TIME_MASK,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn now(&mut self) -> Timestamp {
|
|
||||||
// Ensure strict monotonicity.
|
|
||||||
self.last_time = (system_time() & TIME_MASK).max(self.last_time + CLOCK_MASK + 1);
|
|
||||||
|
|
||||||
// Add clock_id to the end of the timestamp
|
|
||||||
Timestamp(self.last_time | self.clock_id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for TimestampFactory {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::new()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static DEFAULT_FACTORY: Lazy<Mutex<TimestampFactory>> =
|
|
||||||
Lazy::new(|| Mutex::new(TimestampFactory::default()));
|
|
||||||
|
|
||||||
/// STrictly monotonic timestamp since [SystemTime::UNIX_EPOCH] in microseconds as u64.
|
|
||||||
///
|
|
||||||
/// The purpose of this timestamp is to unique per "user", not globally,
|
|
||||||
/// it achieves this by:
|
|
||||||
/// 1. Override the last byte with a random `clock_id`, reducing the probability
|
|
||||||
/// of two matching timestamps across multiple machines/threads.
|
|
||||||
/// 2. Gurantee that the remaining 3 bytes are ever increasing (strictly monotonic) within
|
|
||||||
/// the same thread regardless of the wall clock value
|
|
||||||
///
|
|
||||||
/// This timestamp is also serialized as BE bytes to remain sortable.
|
|
||||||
/// If a `utf-8` encoding is necessary, it is encoded as [base32::Alphabet::Crockford]
|
|
||||||
/// to act as a sortable Id.
|
|
||||||
///
|
|
||||||
/// U64 of microseconds is valid for the next 500 thousand years!
|
|
||||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Hash, Eq, Ord)]
|
|
||||||
pub struct Timestamp(u64);
|
|
||||||
|
|
||||||
impl Timestamp {
|
|
||||||
pub fn now() -> Self {
|
|
||||||
DEFAULT_FACTORY.lock().unwrap().now()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return big endian bytes
|
|
||||||
pub fn to_bytes(&self) -> [u8; 8] {
|
|
||||||
self.0.to_be_bytes()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn difference(&self, rhs: &Timestamp) -> i64 {
|
|
||||||
(self.0 as i64) - (rhs.0 as i64)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn into_inner(&self) -> u64 {
|
|
||||||
self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for Timestamp {
|
|
||||||
fn default() -> Self {
|
|
||||||
Timestamp::now()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Display for Timestamp {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
let bytes: [u8; 8] = self.into();
|
|
||||||
f.write_str(&base32::encode(base32::Alphabet::Crockford, &bytes))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<String> for Timestamp {
|
|
||||||
type Error = TimestampError;
|
|
||||||
|
|
||||||
fn try_from(value: String) -> Result<Self, Self::Error> {
|
|
||||||
match base32::decode(base32::Alphabet::Crockford, &value) {
|
|
||||||
Some(vec) => {
|
|
||||||
let bytes: [u8; 8] = vec
|
|
||||||
.try_into()
|
|
||||||
.map_err(|_| TimestampError::InvalidEncoding)?;
|
|
||||||
|
|
||||||
Ok(bytes.into())
|
|
||||||
}
|
|
||||||
None => Err(TimestampError::InvalidEncoding),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<&[u8]> for Timestamp {
|
|
||||||
type Error = TimestampError;
|
|
||||||
|
|
||||||
fn try_from(bytes: &[u8]) -> Result<Self, Self::Error> {
|
|
||||||
let bytes: [u8; 8] = bytes
|
|
||||||
.try_into()
|
|
||||||
.map_err(|_| TimestampError::InvalidBytesLength(bytes.len()))?;
|
|
||||||
|
|
||||||
Ok(bytes.into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&Timestamp> for [u8; 8] {
|
|
||||||
fn from(timestamp: &Timestamp) -> Self {
|
|
||||||
timestamp.0.to_be_bytes()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<[u8; 8]> for Timestamp {
|
|
||||||
fn from(bytes: [u8; 8]) -> Self {
|
|
||||||
Self(u64::from_be_bytes(bytes))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// === U64 conversion ===
|
|
||||||
|
|
||||||
impl From<Timestamp> for u64 {
|
|
||||||
fn from(value: Timestamp) -> Self {
|
|
||||||
value.into_inner()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Add<u64> for &Timestamp {
|
|
||||||
type Output = Timestamp;
|
|
||||||
|
|
||||||
fn add(self, rhs: u64) -> Self::Output {
|
|
||||||
Timestamp(self.0 + rhs)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Sub<u64> for &Timestamp {
|
|
||||||
type Output = Timestamp;
|
|
||||||
|
|
||||||
fn sub(self, rhs: u64) -> Self::Output {
|
|
||||||
Timestamp(self.0 - rhs)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Serialize for Timestamp {
|
|
||||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
|
||||||
where
|
|
||||||
S: serde::Serializer,
|
|
||||||
{
|
|
||||||
let bytes = self.to_bytes();
|
|
||||||
bytes.serialize(serializer)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'de> Deserialize<'de> for Timestamp {
|
|
||||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
|
||||||
where
|
|
||||||
D: serde::Deserializer<'de>,
|
|
||||||
{
|
|
||||||
let bytes: [u8; 8] = Deserialize::deserialize(deserializer)?;
|
|
||||||
Ok(Timestamp(u64::from_be_bytes(bytes)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(target_arch = "wasm32"))]
|
|
||||||
/// Return the number of microseconds since [SystemTime::UNIX_EPOCH]
|
|
||||||
fn system_time() -> u64 {
|
|
||||||
SystemTime::now()
|
|
||||||
.duration_since(SystemTime::UNIX_EPOCH)
|
|
||||||
.expect("time drift")
|
|
||||||
.as_micros() as u64
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(target_arch = "wasm32")]
|
|
||||||
/// Return the number of microseconds since [SystemTime::UNIX_EPOCH]
|
|
||||||
pub fn system_time() -> u64 {
|
|
||||||
// Won't be an issue for more than 5000 years!
|
|
||||||
(js_sys::Date::now() as u64 )
|
|
||||||
// Turn miliseconds to microseconds
|
|
||||||
* 1000
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(thiserror::Error, Debug)]
|
|
||||||
pub enum TimestampError {
|
|
||||||
#[error("Invalid bytes length, Timestamp should be encoded as 8 bytes, got {0}")]
|
|
||||||
InvalidBytesLength(usize),
|
|
||||||
#[error("Invalid timestamp encoding")]
|
|
||||||
InvalidEncoding,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use std::collections::HashSet;
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn strictly_monotonic() {
|
|
||||||
const COUNT: usize = 100;
|
|
||||||
|
|
||||||
let mut set = HashSet::with_capacity(COUNT);
|
|
||||||
let mut vec = Vec::with_capacity(COUNT);
|
|
||||||
|
|
||||||
for _ in 0..COUNT {
|
|
||||||
let timestamp = Timestamp::now();
|
|
||||||
|
|
||||||
set.insert(timestamp.clone());
|
|
||||||
vec.push(timestamp);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut ordered = vec.clone();
|
|
||||||
ordered.sort();
|
|
||||||
|
|
||||||
assert_eq!(set.len(), COUNT, "unique");
|
|
||||||
assert_eq!(ordered, vec, "ordered");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn strings() {
|
|
||||||
const COUNT: usize = 100;
|
|
||||||
|
|
||||||
let mut set = HashSet::with_capacity(COUNT);
|
|
||||||
let mut vec = Vec::with_capacity(COUNT);
|
|
||||||
|
|
||||||
for _ in 0..COUNT {
|
|
||||||
let string = Timestamp::now().to_string();
|
|
||||||
|
|
||||||
set.insert(string.clone());
|
|
||||||
vec.push(string)
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut ordered = vec.clone();
|
|
||||||
ordered.sort();
|
|
||||||
|
|
||||||
assert_eq!(set.len(), COUNT, "unique");
|
|
||||||
assert_eq!(ordered, vec, "ordered");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn to_from_string() {
|
|
||||||
let timestamp = Timestamp::now();
|
|
||||||
let string = timestamp.to_string();
|
|
||||||
let decoded: Timestamp = string.try_into().unwrap();
|
|
||||||
|
|
||||||
assert_eq!(decoded, timestamp)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn serde() {
|
|
||||||
let timestamp = Timestamp::now();
|
|
||||||
|
|
||||||
let serialized = postcard::to_allocvec(×tamp).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(serialized, timestamp.to_bytes());
|
|
||||||
|
|
||||||
let deserialized: Timestamp = postcard::from_bytes(&serialized).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(deserialized, timestamp);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,27 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "pubky_homeserver"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2021"
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
anyhow = "1.0.82"
|
|
||||||
axum = { version = "0.7.5", features = ["macros"] }
|
|
||||||
axum-extra = { version = "0.9.3", features = ["typed-header", "async-read-body"] }
|
|
||||||
base32 = "0.5.1"
|
|
||||||
bytes = "^1.7.1"
|
|
||||||
clap = { version = "4.5.11", features = ["derive"] }
|
|
||||||
dirs-next = "2.0.0"
|
|
||||||
flume = "0.11.0"
|
|
||||||
futures-util = "0.3.30"
|
|
||||||
heed = "0.20.3"
|
|
||||||
hex = "0.4.3"
|
|
||||||
pkarr = { workspace = true }
|
|
||||||
postcard = { version = "1.0.8", features = ["alloc"] }
|
|
||||||
pubky-common = { version = "0.1.0", path = "../pubky-common" }
|
|
||||||
serde = { workspace = true }
|
|
||||||
tokio = { version = "1.37.0", features = ["full"] }
|
|
||||||
toml = "0.8.19"
|
|
||||||
tower-cookies = "0.10.0"
|
|
||||||
tower-http = { version = "0.5.2", features = ["cors", "trace"] }
|
|
||||||
tracing = "0.1.40"
|
|
||||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
# Pubky Homeserver
|
|
||||||
|
|
||||||
## Usage
|
|
||||||
|
|
||||||
Use `cargo run`
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cargo run -- --config=./src/config.toml
|
|
||||||
```
|
|
||||||
|
|
||||||
Or Build first then run from target.
|
|
||||||
|
|
||||||
Build
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cargo build --release
|
|
||||||
```
|
|
||||||
|
|
||||||
Run with an optional config file
|
|
||||||
|
|
||||||
```bash
|
|
||||||
../target/release/pubky-homeserver --config=./src/config.toml
|
|
||||||
```
|
|
||||||
@@ -1,248 +0,0 @@
|
|||||||
//! Configuration for the server
|
|
||||||
|
|
||||||
use anyhow::{anyhow, Context, Result};
|
|
||||||
use pkarr::Keypair;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::{
|
|
||||||
fmt::Debug,
|
|
||||||
path::{Path, PathBuf},
|
|
||||||
time::Duration,
|
|
||||||
};
|
|
||||||
use tracing::info;
|
|
||||||
|
|
||||||
use pubky_common::timestamp::Timestamp;
|
|
||||||
|
|
||||||
const DEFAULT_HOMESERVER_PORT: u16 = 6287;
|
|
||||||
const DEFAULT_STORAGE_DIR: &str = "pubky";
|
|
||||||
|
|
||||||
pub const DEFAULT_LIST_LIMIT: u16 = 100;
|
|
||||||
pub const DEFAULT_MAX_LIST_LIMIT: u16 = 1000;
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Clone, PartialEq)]
|
|
||||||
struct ConfigToml {
|
|
||||||
testnet: Option<bool>,
|
|
||||||
port: Option<u16>,
|
|
||||||
bootstrap: Option<Vec<String>>,
|
|
||||||
domain: Option<String>,
|
|
||||||
storage: Option<PathBuf>,
|
|
||||||
secret_key: Option<String>,
|
|
||||||
dht_request_timeout: Option<Duration>,
|
|
||||||
default_list_limit: Option<u16>,
|
|
||||||
max_list_limit: Option<u16>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Server configuration
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct Config {
|
|
||||||
/// Whether or not this server is running in a testnet.
|
|
||||||
testnet: bool,
|
|
||||||
/// The configured port for this server.
|
|
||||||
port: Option<u16>,
|
|
||||||
/// Bootstrapping DHT nodes.
|
|
||||||
///
|
|
||||||
/// Helpful to run the server locally or in testnet.
|
|
||||||
bootstrap: Option<Vec<String>>,
|
|
||||||
/// A public domain for this server
|
|
||||||
/// necessary for web browsers running in https environment.
|
|
||||||
domain: Option<String>,
|
|
||||||
/// Path to the storage directory.
|
|
||||||
///
|
|
||||||
/// Defaults to a directory in the OS data directory
|
|
||||||
storage: PathBuf,
|
|
||||||
/// Server keypair.
|
|
||||||
///
|
|
||||||
/// Defaults to a random keypair.
|
|
||||||
keypair: Keypair,
|
|
||||||
dht_request_timeout: Option<Duration>,
|
|
||||||
/// The default limit of a list api if no `limit` query parameter is provided.
|
|
||||||
///
|
|
||||||
/// Defaults to `100`
|
|
||||||
default_list_limit: u16,
|
|
||||||
/// The maximum limit of a list api, even if a `limit` query parameter is provided.
|
|
||||||
///
|
|
||||||
/// Defaults to `1000`
|
|
||||||
max_list_limit: u16,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Config {
|
|
||||||
fn try_from_str(value: &str) -> Result<Self> {
|
|
||||||
let config_toml: ConfigToml = toml::from_str(value)?;
|
|
||||||
|
|
||||||
let keypair = if let Some(secret_key) = config_toml.secret_key {
|
|
||||||
let secret_key = deserialize_secret_key(secret_key)?;
|
|
||||||
Keypair::from_secret_key(&secret_key)
|
|
||||||
} else {
|
|
||||||
Keypair::random()
|
|
||||||
};
|
|
||||||
|
|
||||||
let storage = {
|
|
||||||
let dir = if let Some(storage) = config_toml.storage {
|
|
||||||
storage
|
|
||||||
} else {
|
|
||||||
let path = dirs_next::data_dir().ok_or_else(|| {
|
|
||||||
anyhow!("operating environment provides no directory for application data")
|
|
||||||
})?;
|
|
||||||
path.join(DEFAULT_STORAGE_DIR)
|
|
||||||
};
|
|
||||||
|
|
||||||
dir.join("homeserver")
|
|
||||||
};
|
|
||||||
|
|
||||||
let config = Config {
|
|
||||||
testnet: config_toml.testnet.unwrap_or(false),
|
|
||||||
port: config_toml.port,
|
|
||||||
bootstrap: config_toml.bootstrap,
|
|
||||||
domain: config_toml.domain,
|
|
||||||
keypair,
|
|
||||||
storage,
|
|
||||||
dht_request_timeout: config_toml.dht_request_timeout,
|
|
||||||
default_list_limit: config_toml.default_list_limit.unwrap_or(DEFAULT_LIST_LIMIT),
|
|
||||||
max_list_limit: config_toml
|
|
||||||
.default_list_limit
|
|
||||||
.unwrap_or(DEFAULT_MAX_LIST_LIMIT),
|
|
||||||
};
|
|
||||||
|
|
||||||
if config.testnet {
|
|
||||||
let testnet_config = Config::testnet();
|
|
||||||
|
|
||||||
return Ok(Config {
|
|
||||||
bootstrap: testnet_config.bootstrap,
|
|
||||||
..config
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(config)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Load the config from a file.
|
|
||||||
pub async fn load(path: impl AsRef<Path>) -> Result<Config> {
|
|
||||||
let s = tokio::fs::read_to_string(path.as_ref())
|
|
||||||
.await
|
|
||||||
.with_context(|| format!("failed to read {}", path.as_ref().to_string_lossy()))?;
|
|
||||||
|
|
||||||
Config::try_from_str(&s)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Testnet configurations
|
|
||||||
pub fn testnet() -> Self {
|
|
||||||
let testnet = pkarr::mainline::Testnet::new(10);
|
|
||||||
info!(?testnet.bootstrap, "Testnet bootstrap nodes");
|
|
||||||
|
|
||||||
let bootstrap = Some(testnet.bootstrap.to_owned());
|
|
||||||
let storage = std::env::temp_dir()
|
|
||||||
.join(Timestamp::now().to_string())
|
|
||||||
.join(DEFAULT_STORAGE_DIR);
|
|
||||||
|
|
||||||
Self {
|
|
||||||
bootstrap,
|
|
||||||
storage,
|
|
||||||
port: Some(15411),
|
|
||||||
dht_request_timeout: Some(Duration::from_millis(10)),
|
|
||||||
..Default::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Test configurations
|
|
||||||
pub fn test(testnet: &pkarr::mainline::Testnet) -> Self {
|
|
||||||
let bootstrap = Some(testnet.bootstrap.to_owned());
|
|
||||||
let storage = std::env::temp_dir()
|
|
||||||
.join(Timestamp::now().to_string())
|
|
||||||
.join(DEFAULT_STORAGE_DIR);
|
|
||||||
|
|
||||||
Self {
|
|
||||||
bootstrap,
|
|
||||||
storage,
|
|
||||||
..Default::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn port(&self) -> u16 {
|
|
||||||
self.port.unwrap_or(DEFAULT_HOMESERVER_PORT)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn bootstsrap(&self) -> Option<Vec<String>> {
|
|
||||||
self.bootstrap.to_owned()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn domain(&self) -> &Option<String> {
|
|
||||||
&self.domain
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn keypair(&self) -> &Keypair {
|
|
||||||
&self.keypair
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn default_list_limit(&self) -> u16 {
|
|
||||||
self.default_list_limit
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn max_list_limit(&self) -> u16 {
|
|
||||||
self.max_list_limit
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the path to the storage directory
|
|
||||||
pub fn storage(&self) -> &PathBuf {
|
|
||||||
&self.storage
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn dht_request_timeout(&self) -> Option<Duration> {
|
|
||||||
self.dht_request_timeout
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for Config {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self {
|
|
||||||
testnet: false,
|
|
||||||
port: Some(0),
|
|
||||||
bootstrap: None,
|
|
||||||
domain: None,
|
|
||||||
storage: storage(None)
|
|
||||||
.expect("operating environment provides no directory for application data"),
|
|
||||||
keypair: Keypair::random(),
|
|
||||||
dht_request_timeout: None,
|
|
||||||
default_list_limit: DEFAULT_LIST_LIMIT,
|
|
||||||
max_list_limit: DEFAULT_MAX_LIST_LIMIT,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn deserialize_secret_key(s: String) -> anyhow::Result<[u8; 32]> {
|
|
||||||
let bytes =
|
|
||||||
hex::decode(s).map_err(|_| anyhow!("secret_key in config.toml should hex encoded"))?;
|
|
||||||
|
|
||||||
if bytes.len() != 32 {
|
|
||||||
return Err(anyhow!(format!(
|
|
||||||
"secret_key in config.toml should be 32 bytes in hex (64 characters), got: {}",
|
|
||||||
bytes.len()
|
|
||||||
)));
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut arr = [0u8; 32];
|
|
||||||
arr.copy_from_slice(&bytes);
|
|
||||||
|
|
||||||
Ok(arr)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn storage(storage: Option<String>) -> Result<PathBuf> {
|
|
||||||
let dir = if let Some(storage) = storage {
|
|
||||||
PathBuf::from(storage)
|
|
||||||
} else {
|
|
||||||
let path = dirs_next::data_dir().ok_or_else(|| {
|
|
||||||
anyhow!("operating environment provides no directory for application data")
|
|
||||||
})?;
|
|
||||||
path.join(DEFAULT_STORAGE_DIR)
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(dir.join("homeserver"))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn parse_empty() {
|
|
||||||
Config::try_from_str("").unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
# Use testnet network (local DHT) for testing.
|
|
||||||
testnet = true
|
|
||||||
# Secret key (in hex) to generate the Homeserver's Keypair
|
|
||||||
secret_key = "0000000000000000000000000000000000000000000000000000000000000000"
|
|
||||||
# Domain to be published in Pkarr records for this server to be accessible by.
|
|
||||||
domain = "localhost"
|
|
||||||
# Port for the Homeserver to listen on.
|
|
||||||
port = 6287
|
|
||||||
# Storage directory Defaults to <System's Data Directory>
|
|
||||||
# storage = ""
|
|
||||||
@@ -1,81 +0,0 @@
|
|||||||
use std::fs;
|
|
||||||
|
|
||||||
use heed::{Env, EnvOpenOptions};
|
|
||||||
|
|
||||||
mod migrations;
|
|
||||||
pub mod tables;
|
|
||||||
|
|
||||||
use crate::config::Config;
|
|
||||||
|
|
||||||
use tables::{Tables, TABLES_COUNT};
|
|
||||||
|
|
||||||
pub const DEFAULT_MAP_SIZE: usize = 10995116277760; // 10TB (not = disk-space used)
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct DB {
|
|
||||||
pub(crate) env: Env,
|
|
||||||
pub(crate) tables: Tables,
|
|
||||||
pub(crate) config: Config,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DB {
|
|
||||||
pub fn open(config: Config) -> anyhow::Result<Self> {
|
|
||||||
fs::create_dir_all(config.storage())?;
|
|
||||||
|
|
||||||
let env = unsafe {
|
|
||||||
EnvOpenOptions::new()
|
|
||||||
.max_dbs(TABLES_COUNT)
|
|
||||||
// TODO: Add a configuration option?
|
|
||||||
.map_size(DEFAULT_MAP_SIZE)
|
|
||||||
.open(config.storage())
|
|
||||||
}?;
|
|
||||||
|
|
||||||
let tables = migrations::run(&env)?;
|
|
||||||
|
|
||||||
let db = DB {
|
|
||||||
env,
|
|
||||||
tables,
|
|
||||||
config,
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(db)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use bytes::Bytes;
|
|
||||||
use pkarr::{mainline::Testnet, Keypair};
|
|
||||||
|
|
||||||
use crate::config::Config;
|
|
||||||
|
|
||||||
use super::DB;
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn entries() {
|
|
||||||
let db = DB::open(Config::test(&Testnet::new(0))).unwrap();
|
|
||||||
|
|
||||||
let keypair = Keypair::random();
|
|
||||||
let path = "/pub/foo.txt";
|
|
||||||
|
|
||||||
let (tx, rx) = flume::bounded::<Bytes>(0);
|
|
||||||
|
|
||||||
let mut cloned = db.clone();
|
|
||||||
let cloned_keypair = keypair.clone();
|
|
||||||
|
|
||||||
let done = tokio::task::spawn_blocking(move || {
|
|
||||||
cloned
|
|
||||||
.put_entry(&cloned_keypair.public_key(), path, rx)
|
|
||||||
.unwrap();
|
|
||||||
});
|
|
||||||
|
|
||||||
tx.send(vec![1, 2, 3, 4, 5].into()).unwrap();
|
|
||||||
drop(tx);
|
|
||||||
|
|
||||||
done.await.unwrap();
|
|
||||||
|
|
||||||
let blob = db.get_blob(&keypair.public_key(), path).unwrap().unwrap();
|
|
||||||
|
|
||||||
assert_eq!(blob, Bytes::from(vec![1, 2, 3, 4, 5]));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
use heed::Env;
|
|
||||||
|
|
||||||
mod m0;
|
|
||||||
|
|
||||||
use super::tables::Tables;
|
|
||||||
|
|
||||||
pub fn run(env: &Env) -> anyhow::Result<Tables> {
|
|
||||||
let mut wtxn = env.write_txn()?;
|
|
||||||
|
|
||||||
m0::run(env, &mut wtxn)?;
|
|
||||||
|
|
||||||
let tables = Tables::new(env, &mut wtxn)?;
|
|
||||||
|
|
||||||
wtxn.commit()?;
|
|
||||||
|
|
||||||
Ok(tables)
|
|
||||||
}
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
use heed::{Env, RwTxn};
|
|
||||||
|
|
||||||
use crate::database::tables::{blobs, entries, events, sessions, users};
|
|
||||||
|
|
||||||
pub fn run(env: &Env, wtxn: &mut RwTxn) -> anyhow::Result<()> {
|
|
||||||
let _: users::UsersTable = env.create_database(wtxn, Some(users::USERS_TABLE))?;
|
|
||||||
|
|
||||||
let _: sessions::SessionsTable = env.create_database(wtxn, Some(sessions::SESSIONS_TABLE))?;
|
|
||||||
|
|
||||||
let _: blobs::BlobsTable = env.create_database(wtxn, Some(blobs::BLOBS_TABLE))?;
|
|
||||||
|
|
||||||
let _: entries::EntriesTable = env.create_database(wtxn, Some(entries::ENTRIES_TABLE))?;
|
|
||||||
|
|
||||||
let _: events::EventsTable = env.create_database(wtxn, Some(events::EVENTS_TABLE))?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
@@ -1,49 +0,0 @@
|
|||||||
pub mod blobs;
|
|
||||||
pub mod entries;
|
|
||||||
pub mod events;
|
|
||||||
pub mod sessions;
|
|
||||||
pub mod users;
|
|
||||||
|
|
||||||
use heed::{Env, RwTxn};
|
|
||||||
|
|
||||||
use blobs::{BlobsTable, BLOBS_TABLE};
|
|
||||||
use entries::{EntriesTable, ENTRIES_TABLE};
|
|
||||||
|
|
||||||
use self::{
|
|
||||||
events::{EventsTable, EVENTS_TABLE},
|
|
||||||
sessions::{SessionsTable, SESSIONS_TABLE},
|
|
||||||
users::{UsersTable, USERS_TABLE},
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const TABLES_COUNT: u32 = 5;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct Tables {
|
|
||||||
pub users: UsersTable,
|
|
||||||
pub sessions: SessionsTable,
|
|
||||||
pub blobs: BlobsTable,
|
|
||||||
pub entries: EntriesTable,
|
|
||||||
pub events: EventsTable,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Tables {
|
|
||||||
pub fn new(env: &Env, wtxn: &mut RwTxn) -> anyhow::Result<Self> {
|
|
||||||
Ok(Self {
|
|
||||||
users: env
|
|
||||||
.open_database(wtxn, Some(USERS_TABLE))?
|
|
||||||
.expect("Users table already created"),
|
|
||||||
sessions: env
|
|
||||||
.open_database(wtxn, Some(SESSIONS_TABLE))?
|
|
||||||
.expect("Sessions table already created"),
|
|
||||||
blobs: env
|
|
||||||
.open_database(wtxn, Some(BLOBS_TABLE))?
|
|
||||||
.expect("Blobs table already created"),
|
|
||||||
entries: env
|
|
||||||
.open_database(wtxn, Some(ENTRIES_TABLE))?
|
|
||||||
.expect("Entries table already created"),
|
|
||||||
events: env
|
|
||||||
.open_database(wtxn, Some(EVENTS_TABLE))?
|
|
||||||
.expect("Events table already created"),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,38 +0,0 @@
|
|||||||
use heed::{types::Bytes, Database};
|
|
||||||
use pkarr::PublicKey;
|
|
||||||
|
|
||||||
use crate::database::DB;
|
|
||||||
|
|
||||||
use super::entries::Entry;
|
|
||||||
|
|
||||||
/// hash of the blob => bytes.
|
|
||||||
pub type BlobsTable = Database<Bytes, Bytes>;
|
|
||||||
|
|
||||||
pub const BLOBS_TABLE: &str = "blobs";
|
|
||||||
|
|
||||||
impl DB {
|
|
||||||
pub fn get_blob(
|
|
||||||
&self,
|
|
||||||
public_key: &PublicKey,
|
|
||||||
path: &str,
|
|
||||||
) -> anyhow::Result<Option<bytes::Bytes>> {
|
|
||||||
let rtxn = self.env.read_txn()?;
|
|
||||||
|
|
||||||
let key = format!("{public_key}/{path}");
|
|
||||||
|
|
||||||
let result = if let Some(bytes) = self.tables.entries.get(&rtxn, &key)? {
|
|
||||||
let entry = Entry::deserialize(bytes)?;
|
|
||||||
|
|
||||||
self.tables
|
|
||||||
.blobs
|
|
||||||
.get(&rtxn, entry.content_hash())?
|
|
||||||
.map(|blob| bytes::Bytes::from(blob[8..].to_vec()))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
rtxn.commit()?;
|
|
||||||
|
|
||||||
Ok(result)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,315 +0,0 @@
|
|||||||
use pkarr::PublicKey;
|
|
||||||
use postcard::{from_bytes, to_allocvec};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use tracing::instrument;
|
|
||||||
|
|
||||||
use heed::{
|
|
||||||
types::{Bytes, Str},
|
|
||||||
Database, RoTxn,
|
|
||||||
};
|
|
||||||
|
|
||||||
use pubky_common::{
|
|
||||||
crypto::{Hash, Hasher},
|
|
||||||
timestamp::Timestamp,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::database::DB;
|
|
||||||
|
|
||||||
use super::events::Event;
|
|
||||||
|
|
||||||
/// full_path(pubky/*path) => Entry.
|
|
||||||
pub type EntriesTable = Database<Str, Bytes>;
|
|
||||||
|
|
||||||
pub const ENTRIES_TABLE: &str = "entries";
|
|
||||||
|
|
||||||
impl DB {
|
|
||||||
pub fn put_entry(
|
|
||||||
&mut self,
|
|
||||||
public_key: &PublicKey,
|
|
||||||
path: &str,
|
|
||||||
rx: flume::Receiver<bytes::Bytes>,
|
|
||||||
) -> anyhow::Result<()> {
|
|
||||||
let mut wtxn = self.env.write_txn()?;
|
|
||||||
|
|
||||||
let mut hasher = Hasher::new();
|
|
||||||
let mut bytes = vec![];
|
|
||||||
let mut length = 0;
|
|
||||||
|
|
||||||
while let Ok(chunk) = rx.recv() {
|
|
||||||
hasher.update(&chunk);
|
|
||||||
bytes.extend_from_slice(&chunk);
|
|
||||||
length += chunk.len();
|
|
||||||
}
|
|
||||||
|
|
||||||
let hash = hasher.finalize();
|
|
||||||
|
|
||||||
let key = hash.as_bytes();
|
|
||||||
|
|
||||||
let mut bytes_with_ref_count = Vec::with_capacity(bytes.len() + 8);
|
|
||||||
bytes_with_ref_count.extend_from_slice(&u64::to_be_bytes(0));
|
|
||||||
bytes_with_ref_count.extend_from_slice(&bytes);
|
|
||||||
|
|
||||||
// TODO: For now, we set the first 8 bytes to a reference counter
|
|
||||||
let exists = self
|
|
||||||
.tables
|
|
||||||
.blobs
|
|
||||||
.get(&wtxn, key)?
|
|
||||||
.unwrap_or(bytes_with_ref_count.as_slice());
|
|
||||||
|
|
||||||
let new_count = u64::from_be_bytes(exists[0..8].try_into().unwrap()) + 1;
|
|
||||||
|
|
||||||
bytes_with_ref_count[0..8].copy_from_slice(&u64::to_be_bytes(new_count));
|
|
||||||
|
|
||||||
self.tables
|
|
||||||
.blobs
|
|
||||||
.put(&mut wtxn, hash.as_bytes(), &bytes_with_ref_count)?;
|
|
||||||
|
|
||||||
let mut entry = Entry::new();
|
|
||||||
|
|
||||||
entry.set_content_hash(hash);
|
|
||||||
entry.set_content_length(length);
|
|
||||||
|
|
||||||
let key = format!("{public_key}/{path}");
|
|
||||||
|
|
||||||
self.tables
|
|
||||||
.entries
|
|
||||||
.put(&mut wtxn, &key, &entry.serialize())?;
|
|
||||||
|
|
||||||
if path.starts_with("pub/") {
|
|
||||||
let url = format!("pubky://{key}");
|
|
||||||
let event = Event::put(&url);
|
|
||||||
let value = event.serialize();
|
|
||||||
|
|
||||||
let key = entry.timestamp.to_string();
|
|
||||||
|
|
||||||
self.tables.events.put(&mut wtxn, &key, &value)?;
|
|
||||||
|
|
||||||
// TODO: delete older events.
|
|
||||||
// TODO: move to events.rs
|
|
||||||
}
|
|
||||||
|
|
||||||
wtxn.commit()?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn delete_entry(&mut self, public_key: &PublicKey, path: &str) -> anyhow::Result<bool> {
|
|
||||||
let mut wtxn = self.env.write_txn()?;
|
|
||||||
|
|
||||||
let key = format!("{public_key}/{path}");
|
|
||||||
|
|
||||||
let deleted = if let Some(bytes) = self.tables.entries.get(&wtxn, &key)? {
|
|
||||||
let entry = Entry::deserialize(bytes)?;
|
|
||||||
|
|
||||||
let mut bytes_with_ref_count = self
|
|
||||||
.tables
|
|
||||||
.blobs
|
|
||||||
.get(&wtxn, entry.content_hash())?
|
|
||||||
.map_or(vec![], |s| s.to_vec());
|
|
||||||
|
|
||||||
let arr: [u8; 8] = bytes_with_ref_count[0..8].try_into().unwrap_or([0; 8]);
|
|
||||||
let reference_count = u64::from_be_bytes(arr);
|
|
||||||
|
|
||||||
let deleted_blobs = if reference_count > 1 {
|
|
||||||
// decrement reference count
|
|
||||||
|
|
||||||
bytes_with_ref_count[0..8].copy_from_slice(&(reference_count - 1).to_be_bytes());
|
|
||||||
|
|
||||||
self.tables
|
|
||||||
.blobs
|
|
||||||
.put(&mut wtxn, entry.content_hash(), &bytes_with_ref_count)?;
|
|
||||||
|
|
||||||
true
|
|
||||||
} else {
|
|
||||||
self.tables.blobs.delete(&mut wtxn, entry.content_hash())?
|
|
||||||
};
|
|
||||||
|
|
||||||
let deleted_entry = self.tables.entries.delete(&mut wtxn, &key)?;
|
|
||||||
|
|
||||||
// create DELETE event
|
|
||||||
if path.starts_with("pub/") {
|
|
||||||
let url = format!("pubky://{key}");
|
|
||||||
|
|
||||||
let event = Event::delete(&url);
|
|
||||||
let value = event.serialize();
|
|
||||||
|
|
||||||
let key = Timestamp::now().to_string();
|
|
||||||
|
|
||||||
self.tables.events.put(&mut wtxn, &key, &value)?;
|
|
||||||
|
|
||||||
// TODO: delete older events.
|
|
||||||
// TODO: move to events.rs
|
|
||||||
}
|
|
||||||
|
|
||||||
deleted_entry && deleted_blobs
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
};
|
|
||||||
|
|
||||||
wtxn.commit()?;
|
|
||||||
|
|
||||||
Ok(deleted)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn contains_directory(&self, txn: &RoTxn, path: &str) -> anyhow::Result<bool> {
|
|
||||||
Ok(self.tables.entries.get_greater_than(txn, path)?.is_some())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return a list of pubky urls.
|
|
||||||
///
|
|
||||||
/// - limit defaults to [Config::default_list_limit] and capped by [Config::max_list_limit]
|
|
||||||
pub fn list(
|
|
||||||
&self,
|
|
||||||
txn: &RoTxn,
|
|
||||||
path: &str,
|
|
||||||
reverse: bool,
|
|
||||||
limit: Option<u16>,
|
|
||||||
cursor: Option<String>,
|
|
||||||
shallow: bool,
|
|
||||||
) -> anyhow::Result<Vec<String>> {
|
|
||||||
// Vector to store results
|
|
||||||
let mut results = Vec::new();
|
|
||||||
|
|
||||||
let limit = limit
|
|
||||||
.unwrap_or(self.config.default_list_limit())
|
|
||||||
.min(self.config.max_list_limit());
|
|
||||||
|
|
||||||
// TODO: make this more performant than split and allocations?
|
|
||||||
|
|
||||||
let mut threshold = cursor
|
|
||||||
.map(|cursor| {
|
|
||||||
// Removing leading forward slashes
|
|
||||||
let mut file_or_directory = cursor.trim_start_matches('/');
|
|
||||||
|
|
||||||
if cursor.starts_with("pubky://") {
|
|
||||||
file_or_directory = cursor.split(path).last().expect("should not be reachable")
|
|
||||||
};
|
|
||||||
|
|
||||||
next_threshold(
|
|
||||||
path,
|
|
||||||
file_or_directory,
|
|
||||||
file_or_directory.ends_with('/'),
|
|
||||||
reverse,
|
|
||||||
shallow,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.unwrap_or(next_threshold(path, "", false, reverse, shallow));
|
|
||||||
|
|
||||||
for _ in 0..limit {
|
|
||||||
if let Some((key, _)) = if reverse {
|
|
||||||
self.tables.entries.get_lower_than(txn, &threshold)?
|
|
||||||
} else {
|
|
||||||
self.tables.entries.get_greater_than(txn, &threshold)?
|
|
||||||
} {
|
|
||||||
if !key.starts_with(path) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
if shallow {
|
|
||||||
let mut split = key[path.len()..].split('/');
|
|
||||||
let file_or_directory = split.next().expect("should not be reachable");
|
|
||||||
|
|
||||||
let is_directory = split.next().is_some();
|
|
||||||
|
|
||||||
threshold =
|
|
||||||
next_threshold(path, file_or_directory, is_directory, reverse, shallow);
|
|
||||||
|
|
||||||
results.push(format!(
|
|
||||||
"pubky://{path}{file_or_directory}{}",
|
|
||||||
if is_directory { "/" } else { "" }
|
|
||||||
));
|
|
||||||
} else {
|
|
||||||
threshold = key.to_string();
|
|
||||||
results.push(format!("pubky://{}", key))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(results)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Calculate the next threshold
|
|
||||||
#[instrument]
|
|
||||||
fn next_threshold(
|
|
||||||
path: &str,
|
|
||||||
file_or_directory: &str,
|
|
||||||
is_directory: bool,
|
|
||||||
reverse: bool,
|
|
||||||
shallow: bool,
|
|
||||||
) -> String {
|
|
||||||
format!(
|
|
||||||
"{path}{file_or_directory}{}",
|
|
||||||
if file_or_directory.is_empty() {
|
|
||||||
// No file_or_directory, early return
|
|
||||||
if reverse {
|
|
||||||
// `path/to/dir/\x7f` to catch all paths than `path/to/dir/`
|
|
||||||
"\x7f"
|
|
||||||
} else {
|
|
||||||
""
|
|
||||||
}
|
|
||||||
} else if shallow & is_directory {
|
|
||||||
if reverse {
|
|
||||||
// threshold = `path/to/dir\x2e`, since `\x2e` is lower than `/`
|
|
||||||
"\x2e"
|
|
||||||
} else {
|
|
||||||
//threshold = `path/to/dir\x7f`, since `\x7f` is greater than `/`
|
|
||||||
"\x7f"
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
""
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Default, Serialize, Deserialize, Debug, Eq, PartialEq)]
|
|
||||||
pub struct Entry {
|
|
||||||
/// Encoding version
|
|
||||||
version: usize,
|
|
||||||
/// Modified at
|
|
||||||
timestamp: Timestamp,
|
|
||||||
content_hash: [u8; 32],
|
|
||||||
content_length: usize,
|
|
||||||
content_type: String,
|
|
||||||
// user_metadata: ?
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: get headers like Etag
|
|
||||||
|
|
||||||
impl Entry {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Default::default()
|
|
||||||
}
|
|
||||||
|
|
||||||
// === Setters ===
|
|
||||||
|
|
||||||
pub fn set_content_hash(&mut self, content_hash: Hash) -> &mut Self {
|
|
||||||
content_hash.as_bytes().clone_into(&mut self.content_hash);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set_content_length(&mut self, content_length: usize) -> &mut Self {
|
|
||||||
self.content_length = content_length;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
// === Getters ===
|
|
||||||
|
|
||||||
pub fn content_hash(&self) -> &[u8; 32] {
|
|
||||||
&self.content_hash
|
|
||||||
}
|
|
||||||
|
|
||||||
// === Public Method ===
|
|
||||||
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
|
||||||
to_allocvec(self).expect("Session::serialize")
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deserialize(bytes: &[u8]) -> core::result::Result<Self, postcard::Error> {
|
|
||||||
if bytes[0] > 0 {
|
|
||||||
panic!("Unknown Entry version");
|
|
||||||
}
|
|
||||||
|
|
||||||
from_bytes(bytes)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,105 +0,0 @@
|
|||||||
//! Server events (Put and Delete entries)
|
|
||||||
//!
|
|
||||||
//! Useful as a realtime sync with Indexers until
|
|
||||||
//! we implement more self-authenticated merkle data.
|
|
||||||
|
|
||||||
use heed::{
|
|
||||||
types::{Bytes, Str},
|
|
||||||
Database,
|
|
||||||
};
|
|
||||||
use postcard::{from_bytes, to_allocvec};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
use crate::database::DB;
|
|
||||||
|
|
||||||
/// Event [Timestamp] base32 => Encoded event.
|
|
||||||
pub type EventsTable = Database<Str, Bytes>;
|
|
||||||
|
|
||||||
pub const EVENTS_TABLE: &str = "events";
|
|
||||||
|
|
||||||
#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
|
|
||||||
pub enum Event {
|
|
||||||
Put(String),
|
|
||||||
Delete(String),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Event {
|
|
||||||
pub fn put(url: &str) -> Self {
|
|
||||||
Self::Put(url.to_string())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn delete(url: &str) -> Self {
|
|
||||||
Self::Delete(url.to_string())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
|
||||||
to_allocvec(self).expect("Session::serialize")
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deserialize(bytes: &[u8]) -> core::result::Result<Self, postcard::Error> {
|
|
||||||
if bytes[0] > 1 {
|
|
||||||
panic!("Unknown Event version");
|
|
||||||
}
|
|
||||||
|
|
||||||
from_bytes(bytes)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn url(&self) -> &str {
|
|
||||||
match self {
|
|
||||||
Event::Put(url) => url,
|
|
||||||
Event::Delete(url) => url,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn operation(&self) -> &str {
|
|
||||||
match self {
|
|
||||||
Event::Put(_) => "PUT",
|
|
||||||
Event::Delete(_) => "DEL",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DB {
|
|
||||||
/// Returns a list of events formatted as `<OP> <url>`.
|
|
||||||
///
|
|
||||||
/// - limit defaults to [Config::default_list_limit] and capped by [Config::max_list_limit]
|
|
||||||
/// - cursor is a 13 character string encoding of a timestamp
|
|
||||||
pub fn list_events(
|
|
||||||
&self,
|
|
||||||
limit: Option<u16>,
|
|
||||||
cursor: Option<String>,
|
|
||||||
) -> anyhow::Result<Vec<String>> {
|
|
||||||
let txn = self.env.read_txn()?;
|
|
||||||
|
|
||||||
let limit = limit
|
|
||||||
.unwrap_or(self.config.default_list_limit())
|
|
||||||
.min(self.config.max_list_limit());
|
|
||||||
|
|
||||||
let cursor = cursor.unwrap_or("0000000000000".to_string());
|
|
||||||
|
|
||||||
let mut result: Vec<String> = vec![];
|
|
||||||
let mut next_cursor = cursor.to_string();
|
|
||||||
|
|
||||||
for _ in 0..limit {
|
|
||||||
match self.tables.events.get_greater_than(&txn, &next_cursor)? {
|
|
||||||
Some((timestamp, event_bytes)) => {
|
|
||||||
let event = Event::deserialize(event_bytes)?;
|
|
||||||
|
|
||||||
let line = format!("{} {}", event.operation(), event.url());
|
|
||||||
next_cursor = timestamp.to_string();
|
|
||||||
|
|
||||||
result.push(line);
|
|
||||||
}
|
|
||||||
None => break,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if !result.is_empty() {
|
|
||||||
result.push(format!("cursor: {next_cursor}"))
|
|
||||||
}
|
|
||||||
|
|
||||||
txn.commit()?;
|
|
||||||
|
|
||||||
Ok(result)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,51 +0,0 @@
|
|||||||
use heed::{
|
|
||||||
types::{Bytes, Str},
|
|
||||||
Database,
|
|
||||||
};
|
|
||||||
use pkarr::PublicKey;
|
|
||||||
use pubky_common::session::Session;
|
|
||||||
use tower_cookies::Cookies;
|
|
||||||
|
|
||||||
use crate::database::DB;
|
|
||||||
|
|
||||||
/// session secret => Session.
|
|
||||||
pub type SessionsTable = Database<Str, Bytes>;
|
|
||||||
|
|
||||||
pub const SESSIONS_TABLE: &str = "sessions";
|
|
||||||
|
|
||||||
impl DB {
|
|
||||||
pub fn get_session(
|
|
||||||
&mut self,
|
|
||||||
cookies: Cookies,
|
|
||||||
public_key: &PublicKey,
|
|
||||||
) -> anyhow::Result<Option<Session>> {
|
|
||||||
if let Some(bytes) = self.get_session_bytes(cookies, public_key)? {
|
|
||||||
return Ok(Some(Session::deserialize(&bytes)?));
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_session_bytes(
|
|
||||||
&mut self,
|
|
||||||
cookies: Cookies,
|
|
||||||
public_key: &PublicKey,
|
|
||||||
) -> anyhow::Result<Option<Vec<u8>>> {
|
|
||||||
if let Some(cookie) = cookies.get(&public_key.to_string()) {
|
|
||||||
let rtxn = self.env.read_txn()?;
|
|
||||||
|
|
||||||
let sessions: SessionsTable = self
|
|
||||||
.env
|
|
||||||
.open_database(&rtxn, Some(SESSIONS_TABLE))?
|
|
||||||
.expect("Session table already created");
|
|
||||||
|
|
||||||
let session = sessions.get(&rtxn, cookie.value())?.map(|s| s.to_vec());
|
|
||||||
|
|
||||||
rtxn.commit()?;
|
|
||||||
|
|
||||||
return Ok(session);
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,58 +0,0 @@
|
|||||||
use std::borrow::Cow;
|
|
||||||
|
|
||||||
use postcard::{from_bytes, to_allocvec};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
use heed::{BoxedError, BytesDecode, BytesEncode, Database};
|
|
||||||
use pkarr::PublicKey;
|
|
||||||
|
|
||||||
extern crate alloc;
|
|
||||||
|
|
||||||
/// PublicKey => User.
|
|
||||||
pub type UsersTable = Database<PublicKeyCodec, User>;
|
|
||||||
|
|
||||||
pub const USERS_TABLE: &str = "users";
|
|
||||||
|
|
||||||
// TODO: add more adminstration metadata like quota, invitation links, etc..
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
|
|
||||||
pub struct User {
|
|
||||||
pub created_at: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> BytesEncode<'a> for User {
|
|
||||||
type EItem = Self;
|
|
||||||
|
|
||||||
fn bytes_encode(user: &Self::EItem) -> Result<Cow<[u8]>, BoxedError> {
|
|
||||||
let vec = to_allocvec(user).unwrap();
|
|
||||||
|
|
||||||
Ok(Cow::Owned(vec))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> BytesDecode<'a> for User {
|
|
||||||
type DItem = Self;
|
|
||||||
|
|
||||||
fn bytes_decode(bytes: &'a [u8]) -> Result<Self::DItem, BoxedError> {
|
|
||||||
let user: User = from_bytes(bytes).unwrap();
|
|
||||||
|
|
||||||
Ok(user)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct PublicKeyCodec {}
|
|
||||||
|
|
||||||
impl<'a> BytesEncode<'a> for PublicKeyCodec {
|
|
||||||
type EItem = PublicKey;
|
|
||||||
|
|
||||||
fn bytes_encode(pubky: &Self::EItem) -> Result<Cow<[u8]>, BoxedError> {
|
|
||||||
Ok(Cow::Borrowed(pubky.as_bytes()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> BytesDecode<'a> for PublicKeyCodec {
|
|
||||||
type DItem = PublicKey;
|
|
||||||
|
|
||||||
fn bytes_decode(bytes: &'a [u8]) -> Result<Self::DItem, BoxedError> {
|
|
||||||
Ok(PublicKey::try_from(bytes)?)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,128 +0,0 @@
|
|||||||
//! Server error
|
|
||||||
|
|
||||||
use axum::{
|
|
||||||
extract::rejection::{ExtensionRejection, PathRejection, QueryRejection},
|
|
||||||
http::StatusCode,
|
|
||||||
response::IntoResponse,
|
|
||||||
};
|
|
||||||
use tracing::debug;
|
|
||||||
|
|
||||||
pub type Result<T, E = Error> = core::result::Result<T, E>;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct Error {
|
|
||||||
// #[serde(with = "serde_status_code")]
|
|
||||||
status: StatusCode,
|
|
||||||
detail: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for Error {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self {
|
|
||||||
status: StatusCode::INTERNAL_SERVER_ERROR,
|
|
||||||
detail: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Error {
|
|
||||||
pub fn with_status(status: StatusCode) -> Error {
|
|
||||||
Self {
|
|
||||||
status,
|
|
||||||
detail: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a new [`Error`].
|
|
||||||
pub fn new(status_code: StatusCode, message: Option<impl ToString>) -> Error {
|
|
||||||
Self {
|
|
||||||
status: status_code,
|
|
||||||
detail: message.map(|m| m.to_string()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl IntoResponse for Error {
|
|
||||||
fn into_response(self) -> axum::response::Response {
|
|
||||||
match self.detail {
|
|
||||||
Some(detail) => (self.status, detail).into_response(),
|
|
||||||
_ => (self.status,).into_response(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<QueryRejection> for Error {
|
|
||||||
fn from(error: QueryRejection) -> Self {
|
|
||||||
Self::new(StatusCode::BAD_REQUEST, error.into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<ExtensionRejection> for Error {
|
|
||||||
fn from(error: ExtensionRejection) -> Self {
|
|
||||||
Self::new(StatusCode::BAD_REQUEST, error.into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<PathRejection> for Error {
|
|
||||||
fn from(error: PathRejection) -> Self {
|
|
||||||
Self::new(StatusCode::BAD_REQUEST, error.into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// === Pubky specific errors ===
|
|
||||||
|
|
||||||
impl From<pubky_common::auth::Error> for Error {
|
|
||||||
fn from(error: pubky_common::auth::Error) -> Self {
|
|
||||||
Self::new(StatusCode::BAD_REQUEST, Some(error))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<pkarr::Error> for Error {
|
|
||||||
fn from(error: pkarr::Error) -> Self {
|
|
||||||
Self::new(StatusCode::BAD_REQUEST, Some(error))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// === INTERNAL_SERVER_ERROR ===
|
|
||||||
|
|
||||||
impl From<std::io::Error> for Error {
|
|
||||||
fn from(error: std::io::Error) -> Self {
|
|
||||||
debug!(?error);
|
|
||||||
Self::new(StatusCode::INTERNAL_SERVER_ERROR, error.into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<heed::Error> for Error {
|
|
||||||
fn from(error: heed::Error) -> Self {
|
|
||||||
debug!(?error);
|
|
||||||
Self::new(StatusCode::INTERNAL_SERVER_ERROR, error.into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<anyhow::Error> for Error {
|
|
||||||
fn from(error: anyhow::Error) -> Self {
|
|
||||||
debug!(?error);
|
|
||||||
Self::new(StatusCode::INTERNAL_SERVER_ERROR, error.into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<postcard::Error> for Error {
|
|
||||||
fn from(error: postcard::Error) -> Self {
|
|
||||||
debug!(?error);
|
|
||||||
Self::new(StatusCode::INTERNAL_SERVER_ERROR, error.into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<axum::Error> for Error {
|
|
||||||
fn from(error: axum::Error) -> Self {
|
|
||||||
debug!(?error);
|
|
||||||
Self::new(StatusCode::INTERNAL_SERVER_ERROR, error.into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> From<flume::SendError<T>> for Error {
|
|
||||||
fn from(error: flume::SendError<T>) -> Self {
|
|
||||||
debug!(?error);
|
|
||||||
Self::new(StatusCode::INTERNAL_SERVER_ERROR, error.into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,123 +0,0 @@
|
|||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
use axum::{
|
|
||||||
async_trait,
|
|
||||||
extract::{FromRequestParts, Path, Query},
|
|
||||||
http::{request::Parts, StatusCode},
|
|
||||||
response::{IntoResponse, Response},
|
|
||||||
RequestPartsExt,
|
|
||||||
};
|
|
||||||
|
|
||||||
use pkarr::PublicKey;
|
|
||||||
|
|
||||||
use crate::error::{Error, Result};
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Pubky(PublicKey);
|
|
||||||
|
|
||||||
impl Pubky {
|
|
||||||
pub fn public_key(&self) -> &PublicKey {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl<S> FromRequestParts<S> for Pubky
|
|
||||||
where
|
|
||||||
S: Send + Sync,
|
|
||||||
{
|
|
||||||
type Rejection = Response;
|
|
||||||
|
|
||||||
async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result<Self, Self::Rejection> {
|
|
||||||
let params: Path<HashMap<String, String>> =
|
|
||||||
parts.extract().await.map_err(IntoResponse::into_response)?;
|
|
||||||
|
|
||||||
let pubky_id = params
|
|
||||||
.get("pubky")
|
|
||||||
.ok_or_else(|| (StatusCode::NOT_FOUND, "pubky param missing").into_response())?;
|
|
||||||
|
|
||||||
let public_key = PublicKey::try_from(pubky_id.to_string())
|
|
||||||
.map_err(Error::try_from)
|
|
||||||
.map_err(IntoResponse::into_response)?;
|
|
||||||
|
|
||||||
// TODO: return 404 if the user doesn't exist, but exclude signups.
|
|
||||||
|
|
||||||
Ok(Pubky(public_key))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct EntryPath(pub(crate) String);
|
|
||||||
|
|
||||||
impl EntryPath {
|
|
||||||
pub fn as_str(&self) -> &str {
|
|
||||||
self.0.as_str()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl<S> FromRequestParts<S> for EntryPath
|
|
||||||
where
|
|
||||||
S: Send + Sync,
|
|
||||||
{
|
|
||||||
type Rejection = Response;
|
|
||||||
|
|
||||||
async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result<Self, Self::Rejection> {
|
|
||||||
let params: Path<HashMap<String, String>> =
|
|
||||||
parts.extract().await.map_err(IntoResponse::into_response)?;
|
|
||||||
|
|
||||||
// TODO: enforce path limits like no trailing '/'
|
|
||||||
|
|
||||||
let path = params
|
|
||||||
.get("path")
|
|
||||||
.ok_or_else(|| (StatusCode::NOT_FOUND, "entry path missing").into_response())?;
|
|
||||||
|
|
||||||
Ok(EntryPath(path.to_string()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct ListQueryParams {
|
|
||||||
pub limit: Option<u16>,
|
|
||||||
pub cursor: Option<String>,
|
|
||||||
pub reverse: bool,
|
|
||||||
pub shallow: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl<S> FromRequestParts<S> for ListQueryParams
|
|
||||||
where
|
|
||||||
S: Send + Sync,
|
|
||||||
{
|
|
||||||
type Rejection = Response;
|
|
||||||
|
|
||||||
async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result<Self, Self::Rejection> {
|
|
||||||
let params: Query<HashMap<String, String>> =
|
|
||||||
parts.extract().await.map_err(IntoResponse::into_response)?;
|
|
||||||
|
|
||||||
let reverse = params.contains_key("reverse");
|
|
||||||
let shallow = params.contains_key("shallow");
|
|
||||||
let limit = params
|
|
||||||
.get("limit")
|
|
||||||
// Treat `limit=` as None
|
|
||||||
.and_then(|l| if l.is_empty() { None } else { Some(l) })
|
|
||||||
.and_then(|l| l.parse::<u16>().ok());
|
|
||||||
let cursor = params
|
|
||||||
.get("cursor")
|
|
||||||
.map(|c| c.as_str())
|
|
||||||
// Treat `cursor=` as None
|
|
||||||
.and_then(|c| {
|
|
||||||
if c.is_empty() {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(c.to_string())
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok(ListQueryParams {
|
|
||||||
reverse,
|
|
||||||
shallow,
|
|
||||||
limit,
|
|
||||||
cursor,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
pub mod config;
|
|
||||||
mod database;
|
|
||||||
mod error;
|
|
||||||
mod extractors;
|
|
||||||
mod pkarr;
|
|
||||||
mod routes;
|
|
||||||
mod server;
|
|
||||||
|
|
||||||
pub use server::Homeserver;
|
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
use anyhow::Result;
|
|
||||||
use pubky_homeserver::{config::Config, Homeserver};
|
|
||||||
|
|
||||||
use clap::Parser;
|
|
||||||
|
|
||||||
#[derive(Parser, Debug)]
|
|
||||||
struct Cli {
|
|
||||||
/// [tracing_subscriber::EnvFilter]
|
|
||||||
#[clap(short, long)]
|
|
||||||
tracing_env_filter: Option<String>,
|
|
||||||
|
|
||||||
/// Run Homeserver in a local testnet
|
|
||||||
#[clap(long)]
|
|
||||||
testnet: bool,
|
|
||||||
|
|
||||||
/// Optional Path to config file.
|
|
||||||
#[clap(short, long)]
|
|
||||||
config: Option<PathBuf>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::main]
|
|
||||||
async fn main() -> Result<()> {
|
|
||||||
let args = Cli::parse();
|
|
||||||
|
|
||||||
tracing_subscriber::fmt()
|
|
||||||
.with_env_filter(
|
|
||||||
args.tracing_env_filter
|
|
||||||
.unwrap_or("pubky_homeserver=debug,tower_http=debug".to_string()),
|
|
||||||
)
|
|
||||||
.init();
|
|
||||||
|
|
||||||
let server = Homeserver::start(if args.testnet {
|
|
||||||
Config::testnet()
|
|
||||||
} else if let Some(config_path) = args.config {
|
|
||||||
Config::load(config_path).await?
|
|
||||||
} else {
|
|
||||||
Config::default()
|
|
||||||
})
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
server.run_until_done().await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
//! Pkarr related task
|
|
||||||
|
|
||||||
use pkarr::{
|
|
||||||
dns::{rdata::SVCB, Packet},
|
|
||||||
Keypair, PkarrClientAsync, SignedPacket,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub(crate) async fn publish_server_packet(
|
|
||||||
pkarr_client: &PkarrClientAsync,
|
|
||||||
keypair: &Keypair,
|
|
||||||
domain: &str,
|
|
||||||
port: u16,
|
|
||||||
) -> anyhow::Result<()> {
|
|
||||||
// TODO: Try to resolve first before publishing.
|
|
||||||
|
|
||||||
let mut packet = Packet::new_reply(0);
|
|
||||||
|
|
||||||
let mut svcb = SVCB::new(0, domain.try_into()?);
|
|
||||||
|
|
||||||
// Publishing port only for localhost domain,
|
|
||||||
// assuming any other domain will point to a reverse proxy
|
|
||||||
// at the conventional ports.
|
|
||||||
if domain == "localhost" {
|
|
||||||
svcb.priority = 1;
|
|
||||||
svcb.set_port(port);
|
|
||||||
|
|
||||||
// TODO: Add more parameteres like the signer key!
|
|
||||||
// svcb.set_param(key, value)
|
|
||||||
};
|
|
||||||
|
|
||||||
// TODO: announce A/AAAA records as well for Noise connections?
|
|
||||||
// Or maybe Iroh's magic socket
|
|
||||||
|
|
||||||
packet.answers.push(pkarr::dns::ResourceRecord::new(
|
|
||||||
"@".try_into().unwrap(),
|
|
||||||
pkarr::dns::CLASS::IN,
|
|
||||||
60 * 60,
|
|
||||||
pkarr::dns::rdata::RData::SVCB(svcb),
|
|
||||||
));
|
|
||||||
|
|
||||||
let signed_packet = SignedPacket::from_packet(keypair, &packet)?;
|
|
||||||
|
|
||||||
pkarr_client.publish(&signed_packet).await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
@@ -1,43 +0,0 @@
|
|||||||
use axum::{
|
|
||||||
extract::DefaultBodyLimit,
|
|
||||||
routing::{delete, get, post, put},
|
|
||||||
Router,
|
|
||||||
};
|
|
||||||
use tower_cookies::CookieManagerLayer;
|
|
||||||
use tower_http::{cors::CorsLayer, trace::TraceLayer};
|
|
||||||
|
|
||||||
use crate::server::AppState;
|
|
||||||
|
|
||||||
use self::pkarr::pkarr_router;
|
|
||||||
|
|
||||||
mod auth;
|
|
||||||
mod feed;
|
|
||||||
mod pkarr;
|
|
||||||
mod public;
|
|
||||||
mod root;
|
|
||||||
|
|
||||||
fn base(state: AppState) -> Router {
|
|
||||||
Router::new()
|
|
||||||
.route("/", get(root::handler))
|
|
||||||
.route("/signup", post(auth::signup))
|
|
||||||
.route("/session", post(auth::signin))
|
|
||||||
.route("/:pubky/session", get(auth::session))
|
|
||||||
.route("/:pubky/session", delete(auth::signout))
|
|
||||||
.route("/:pubky/*path", put(public::put))
|
|
||||||
.route("/:pubky/*path", get(public::get))
|
|
||||||
.route("/:pubky/*path", delete(public::delete))
|
|
||||||
.route("/events/", get(feed::feed))
|
|
||||||
.layer(CookieManagerLayer::new())
|
|
||||||
// TODO: revisit if we enable streaming big payloads
|
|
||||||
// TODO: maybe add to a separate router (drive router?).
|
|
||||||
.layer(DefaultBodyLimit::max(16 * 1024))
|
|
||||||
.with_state(state)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn create_app(state: AppState) -> Router {
|
|
||||||
base(state.clone())
|
|
||||||
// TODO: Only enable this for test environments?
|
|
||||||
.nest("/pkarr", pkarr_router(state))
|
|
||||||
.layer(CorsLayer::very_permissive())
|
|
||||||
.layer(TraceLayer::new_for_http())
|
|
||||||
}
|
|
||||||
@@ -1,138 +0,0 @@
|
|||||||
use axum::{
|
|
||||||
debug_handler,
|
|
||||||
extract::State,
|
|
||||||
http::{uri::Scheme, StatusCode, Uri},
|
|
||||||
response::IntoResponse,
|
|
||||||
};
|
|
||||||
use axum_extra::{headers::UserAgent, TypedHeader};
|
|
||||||
use bytes::Bytes;
|
|
||||||
use tower_cookies::{cookie::SameSite, Cookie, Cookies};
|
|
||||||
|
|
||||||
use pubky_common::{crypto::random_bytes, session::Session, timestamp::Timestamp};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
database::tables::{
|
|
||||||
sessions::{SessionsTable, SESSIONS_TABLE},
|
|
||||||
users::User,
|
|
||||||
},
|
|
||||||
error::{Error, Result},
|
|
||||||
extractors::Pubky,
|
|
||||||
server::AppState,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[debug_handler]
|
|
||||||
pub async fn signup(
|
|
||||||
State(state): State<AppState>,
|
|
||||||
user_agent: Option<TypedHeader<UserAgent>>,
|
|
||||||
cookies: Cookies,
|
|
||||||
uri: Uri,
|
|
||||||
body: Bytes,
|
|
||||||
) -> Result<impl IntoResponse> {
|
|
||||||
// TODO: Verify invitation link.
|
|
||||||
// TODO: add errors in case of already axisting user.
|
|
||||||
signin(State(state), user_agent, cookies, uri, body).await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn session(
|
|
||||||
State(state): State<AppState>,
|
|
||||||
cookies: Cookies,
|
|
||||||
pubky: Pubky,
|
|
||||||
) -> Result<impl IntoResponse> {
|
|
||||||
if let Some(cookie) = cookies.get(&pubky.public_key().to_string()) {
|
|
||||||
let rtxn = state.db.env.read_txn()?;
|
|
||||||
|
|
||||||
let sessions: SessionsTable = state
|
|
||||||
.db
|
|
||||||
.env
|
|
||||||
.open_database(&rtxn, Some(SESSIONS_TABLE))?
|
|
||||||
.expect("Session table already created");
|
|
||||||
|
|
||||||
if let Some(session) = sessions.get(&rtxn, cookie.value())? {
|
|
||||||
let session = session.to_owned();
|
|
||||||
rtxn.commit()?;
|
|
||||||
|
|
||||||
// TODO: add content-type
|
|
||||||
return Ok(session);
|
|
||||||
};
|
|
||||||
|
|
||||||
rtxn.commit()?;
|
|
||||||
};
|
|
||||||
|
|
||||||
Err(Error::with_status(StatusCode::NOT_FOUND))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn signout(
|
|
||||||
State(state): State<AppState>,
|
|
||||||
cookies: Cookies,
|
|
||||||
pubky: Pubky,
|
|
||||||
) -> Result<impl IntoResponse> {
|
|
||||||
if let Some(cookie) = cookies.get(&pubky.public_key().to_string()) {
|
|
||||||
let mut wtxn = state.db.env.write_txn()?;
|
|
||||||
|
|
||||||
let sessions: SessionsTable = state
|
|
||||||
.db
|
|
||||||
.env
|
|
||||||
.open_database(&wtxn, Some(SESSIONS_TABLE))?
|
|
||||||
.expect("Session table already created");
|
|
||||||
|
|
||||||
let _ = sessions.delete(&mut wtxn, cookie.value());
|
|
||||||
|
|
||||||
wtxn.commit()?;
|
|
||||||
|
|
||||||
return Ok(());
|
|
||||||
};
|
|
||||||
|
|
||||||
Err(Error::with_status(StatusCode::UNAUTHORIZED))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn signin(
|
|
||||||
State(state): State<AppState>,
|
|
||||||
user_agent: Option<TypedHeader<UserAgent>>,
|
|
||||||
cookies: Cookies,
|
|
||||||
uri: Uri,
|
|
||||||
body: Bytes,
|
|
||||||
) -> Result<impl IntoResponse> {
|
|
||||||
let token = state.verifier.verify(&body)?;
|
|
||||||
|
|
||||||
let public_key = token.pubky();
|
|
||||||
|
|
||||||
let mut wtxn = state.db.env.write_txn()?;
|
|
||||||
|
|
||||||
let users = state.db.tables.users;
|
|
||||||
if let Some(existing) = users.get(&wtxn, public_key)? {
|
|
||||||
users.put(&mut wtxn, public_key, &existing)?;
|
|
||||||
} else {
|
|
||||||
users.put(
|
|
||||||
&mut wtxn,
|
|
||||||
public_key,
|
|
||||||
&User {
|
|
||||||
created_at: Timestamp::now().into_inner(),
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let session_secret = base32::encode(base32::Alphabet::Crockford, &random_bytes::<16>());
|
|
||||||
|
|
||||||
let session = Session::new(&token, user_agent.map(|ua| ua.to_string())).serialize();
|
|
||||||
|
|
||||||
state
|
|
||||||
.db
|
|
||||||
.tables
|
|
||||||
.sessions
|
|
||||||
.put(&mut wtxn, &session_secret, &session)?;
|
|
||||||
|
|
||||||
let mut cookie = Cookie::new(public_key.to_string(), session_secret);
|
|
||||||
|
|
||||||
cookie.set_path("/");
|
|
||||||
if *uri.scheme().unwrap_or(&Scheme::HTTP) == Scheme::HTTPS {
|
|
||||||
cookie.set_secure(true);
|
|
||||||
cookie.set_same_site(SameSite::None);
|
|
||||||
}
|
|
||||||
cookie.set_http_only(true);
|
|
||||||
|
|
||||||
cookies.add(cookie);
|
|
||||||
|
|
||||||
wtxn.commit()?;
|
|
||||||
|
|
||||||
Ok(session)
|
|
||||||
}
|
|
||||||
@@ -1,41 +0,0 @@
|
|||||||
use axum::{
|
|
||||||
body::Body,
|
|
||||||
extract::State,
|
|
||||||
http::{header, Response, StatusCode},
|
|
||||||
response::IntoResponse,
|
|
||||||
};
|
|
||||||
use pubky_common::timestamp::{Timestamp, TimestampError};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
error::{Error, Result},
|
|
||||||
extractors::ListQueryParams,
|
|
||||||
server::AppState,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub async fn feed(
|
|
||||||
State(state): State<AppState>,
|
|
||||||
params: ListQueryParams,
|
|
||||||
) -> Result<impl IntoResponse> {
|
|
||||||
if let Some(ref cursor) = params.cursor {
|
|
||||||
if let Err(timestmap_error) = Timestamp::try_from(cursor.to_string()) {
|
|
||||||
let cause = match timestmap_error {
|
|
||||||
TimestampError::InvalidEncoding => {
|
|
||||||
"Cursor should be valid base32 Crockford encoding of a timestamp"
|
|
||||||
}
|
|
||||||
TimestampError::InvalidBytesLength(size) => {
|
|
||||||
&format!("Cursor should be 13 characters long, got: {size}")
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Err(Error::new(StatusCode::BAD_REQUEST, cause.into()))?
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let result = state.db.list_events(params.limit, params.cursor)?;
|
|
||||||
|
|
||||||
Ok(Response::builder()
|
|
||||||
.status(StatusCode::OK)
|
|
||||||
.header(header::CONTENT_TYPE, "text/plain")
|
|
||||||
.body(Body::from(result.join("\n")))
|
|
||||||
.unwrap())
|
|
||||||
}
|
|
||||||
@@ -1,58 +0,0 @@
|
|||||||
use axum::{
|
|
||||||
body::{Body, Bytes},
|
|
||||||
extract::State,
|
|
||||||
http::StatusCode,
|
|
||||||
response::IntoResponse,
|
|
||||||
routing::{get, put},
|
|
||||||
Router,
|
|
||||||
};
|
|
||||||
use futures_util::stream::StreamExt;
|
|
||||||
|
|
||||||
use pkarr::SignedPacket;
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
error::{Error, Result},
|
|
||||||
extractors::Pubky,
|
|
||||||
server::AppState,
|
|
||||||
};
|
|
||||||
|
|
||||||
/// Pkarr relay, helpful for testing.
|
|
||||||
///
|
|
||||||
/// For real productioin, you should use a [production ready
|
|
||||||
/// relay](https://github.com/pubky/pkarr/server).
|
|
||||||
pub fn pkarr_router(state: AppState) -> Router {
|
|
||||||
Router::new()
|
|
||||||
.route("/:pubky", put(pkarr_put))
|
|
||||||
.route("/:pubky", get(pkarr_get))
|
|
||||||
.with_state(state)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn pkarr_put(
|
|
||||||
State(state): State<AppState>,
|
|
||||||
pubky: Pubky,
|
|
||||||
body: Body,
|
|
||||||
) -> Result<impl IntoResponse> {
|
|
||||||
let mut bytes = Vec::with_capacity(1104);
|
|
||||||
|
|
||||||
let mut stream = body.into_data_stream();
|
|
||||||
|
|
||||||
while let Some(chunk) = stream.next().await {
|
|
||||||
bytes.extend_from_slice(&chunk?)
|
|
||||||
}
|
|
||||||
|
|
||||||
let public_key = pubky.public_key().to_owned();
|
|
||||||
|
|
||||||
let signed_packet = SignedPacket::from_relay_payload(&public_key, &Bytes::from(bytes))?;
|
|
||||||
|
|
||||||
state.pkarr_client.publish(&signed_packet).await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn pkarr_get(State(state): State<AppState>, pubky: Pubky) -> Result<impl IntoResponse> {
|
|
||||||
if let Some(signed_packet) = state.pkarr_client.resolve(pubky.public_key()).await? {
|
|
||||||
return Ok(signed_packet.to_relay_payload());
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(Error::with_status(StatusCode::NOT_FOUND))
|
|
||||||
}
|
|
||||||
@@ -1,174 +0,0 @@
|
|||||||
use axum::{
|
|
||||||
body::{Body, Bytes},
|
|
||||||
extract::State,
|
|
||||||
http::{header, Response, StatusCode},
|
|
||||||
response::IntoResponse,
|
|
||||||
};
|
|
||||||
use futures_util::stream::StreamExt;
|
|
||||||
use pkarr::PublicKey;
|
|
||||||
use tower_cookies::Cookies;
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
error::{Error, Result},
|
|
||||||
extractors::{EntryPath, ListQueryParams, Pubky},
|
|
||||||
server::AppState,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub async fn put(
|
|
||||||
State(mut state): State<AppState>,
|
|
||||||
pubky: Pubky,
|
|
||||||
path: EntryPath,
|
|
||||||
cookies: Cookies,
|
|
||||||
body: Body,
|
|
||||||
) -> Result<impl IntoResponse> {
|
|
||||||
let public_key = pubky.public_key().clone();
|
|
||||||
let path = path.as_str();
|
|
||||||
|
|
||||||
verify(path)?;
|
|
||||||
authorize(&mut state, cookies, &public_key, path)?;
|
|
||||||
|
|
||||||
let mut stream = body.into_data_stream();
|
|
||||||
|
|
||||||
let (tx, rx) = flume::bounded::<Bytes>(1);
|
|
||||||
|
|
||||||
let path = path.to_string();
|
|
||||||
|
|
||||||
// TODO: refactor Database to clean up this scope.
|
|
||||||
let done = tokio::task::spawn_blocking(move || -> Result<()> {
|
|
||||||
// TODO: this is a blocking operation, which is ok for small
|
|
||||||
// payloads (we have 16 kb limit for now) but later we need
|
|
||||||
// to stream this to filesystem, and keep track of any failed
|
|
||||||
// writes to GC these files later.
|
|
||||||
|
|
||||||
state.db.put_entry(&public_key, &path, rx)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
});
|
|
||||||
|
|
||||||
while let Some(next) = stream.next().await {
|
|
||||||
let chunk = next?;
|
|
||||||
|
|
||||||
tx.send(chunk)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
drop(tx);
|
|
||||||
done.await.expect("join error")?;
|
|
||||||
|
|
||||||
// TODO: return relevant headers, like Etag?
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get(
|
|
||||||
State(state): State<AppState>,
|
|
||||||
pubky: Pubky,
|
|
||||||
path: EntryPath,
|
|
||||||
params: ListQueryParams,
|
|
||||||
) -> Result<impl IntoResponse> {
|
|
||||||
verify(path.as_str())?;
|
|
||||||
let public_key = pubky.public_key();
|
|
||||||
|
|
||||||
let path = path.as_str();
|
|
||||||
|
|
||||||
if path.ends_with('/') {
|
|
||||||
let txn = state.db.env.read_txn()?;
|
|
||||||
|
|
||||||
let path = format!("{public_key}/{path}");
|
|
||||||
|
|
||||||
if !state.db.contains_directory(&txn, &path)? {
|
|
||||||
return Err(Error::new(
|
|
||||||
StatusCode::NOT_FOUND,
|
|
||||||
"Directory Not Found".into(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle listing
|
|
||||||
let vec = state.db.list(
|
|
||||||
&txn,
|
|
||||||
&path,
|
|
||||||
params.reverse,
|
|
||||||
params.limit,
|
|
||||||
params.cursor,
|
|
||||||
params.shallow,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
return Ok(Response::builder()
|
|
||||||
.status(StatusCode::OK)
|
|
||||||
.header(header::CONTENT_TYPE, "text/plain")
|
|
||||||
.body(Body::from(vec.join("\n")))
|
|
||||||
.unwrap());
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: Enable streaming
|
|
||||||
|
|
||||||
match state.db.get_blob(public_key, path) {
|
|
||||||
Err(error) => Err(error)?,
|
|
||||||
Ok(Some(bytes)) => Ok(Response::builder().body(Body::from(bytes)).unwrap()),
|
|
||||||
Ok(None) => Err(Error::new(StatusCode::NOT_FOUND, "File Not Found".into())),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn delete(
|
|
||||||
State(mut state): State<AppState>,
|
|
||||||
pubky: Pubky,
|
|
||||||
path: EntryPath,
|
|
||||||
cookies: Cookies,
|
|
||||||
) -> Result<impl IntoResponse> {
|
|
||||||
let public_key = pubky.public_key().clone();
|
|
||||||
let path = path.as_str();
|
|
||||||
|
|
||||||
authorize(&mut state, cookies, &public_key, path)?;
|
|
||||||
verify(path)?;
|
|
||||||
|
|
||||||
let deleted = state.db.delete_entry(&public_key, path)?;
|
|
||||||
|
|
||||||
if !deleted {
|
|
||||||
// TODO: if the path ends with `/` return a `CONFLICT` error?
|
|
||||||
return Err(Error::with_status(StatusCode::NOT_FOUND));
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: return relevant headers, like Etag?
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Authorize write (PUT or DELETE) for Public paths.
|
|
||||||
fn authorize(
|
|
||||||
state: &mut AppState,
|
|
||||||
cookies: Cookies,
|
|
||||||
public_key: &PublicKey,
|
|
||||||
path: &str,
|
|
||||||
) -> Result<()> {
|
|
||||||
// TODO: can we move this logic to the extractor or a layer
|
|
||||||
// to perform this validation?
|
|
||||||
let session = state
|
|
||||||
.db
|
|
||||||
.get_session(cookies, public_key)?
|
|
||||||
.ok_or(Error::with_status(StatusCode::UNAUTHORIZED))?;
|
|
||||||
|
|
||||||
if session.pubky() == public_key
|
|
||||||
&& session.capabilities().iter().any(|cap| {
|
|
||||||
path.starts_with(&cap.scope[1..])
|
|
||||||
&& cap
|
|
||||||
.actions
|
|
||||||
.contains(&pubky_common::capabilities::Action::Write)
|
|
||||||
})
|
|
||||||
{
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(Error::with_status(StatusCode::FORBIDDEN))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn verify(path: &str) -> Result<()> {
|
|
||||||
if !path.starts_with("pub/") {
|
|
||||||
return Err(Error::new(
|
|
||||||
StatusCode::FORBIDDEN,
|
|
||||||
"Writing to directories other than '/pub/' is forbidden".into(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: should we forbid paths ending with `/`?
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
use axum::response::IntoResponse;
|
|
||||||
|
|
||||||
pub async fn handler() -> Result<impl IntoResponse, String> {
|
|
||||||
Ok("This a Pubky homeserver.".to_string())
|
|
||||||
}
|
|
||||||
@@ -1,169 +0,0 @@
|
|||||||
use std::{future::IntoFuture, net::SocketAddr};
|
|
||||||
|
|
||||||
use anyhow::{Error, Result};
|
|
||||||
use pubky_common::auth::AuthVerifier;
|
|
||||||
use tokio::{net::TcpListener, signal, task::JoinSet};
|
|
||||||
use tracing::{debug, info, warn};
|
|
||||||
|
|
||||||
use pkarr::{
|
|
||||||
mainline::dht::{DhtSettings, Testnet},
|
|
||||||
PkarrClient, PkarrClientAsync, PublicKey, Settings,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::{config::Config, database::DB, pkarr::publish_server_packet};
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Homeserver {
|
|
||||||
state: AppState,
|
|
||||||
tasks: JoinSet<std::io::Result<()>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub(crate) struct AppState {
|
|
||||||
pub(crate) verifier: AuthVerifier,
|
|
||||||
pub(crate) db: DB,
|
|
||||||
pub(crate) pkarr_client: PkarrClientAsync,
|
|
||||||
pub(crate) config: Config,
|
|
||||||
pub(crate) port: u16,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Homeserver {
|
|
||||||
pub async fn start(config: Config) -> Result<Self> {
|
|
||||||
debug!(?config);
|
|
||||||
|
|
||||||
let db = DB::open(config.clone())?;
|
|
||||||
|
|
||||||
let pkarr_client = PkarrClient::new(Settings {
|
|
||||||
dht: DhtSettings {
|
|
||||||
bootstrap: config.bootstsrap(),
|
|
||||||
request_timeout: config.dht_request_timeout(),
|
|
||||||
..Default::default()
|
|
||||||
},
|
|
||||||
..Default::default()
|
|
||||||
})?
|
|
||||||
.as_async();
|
|
||||||
|
|
||||||
let mut tasks = JoinSet::new();
|
|
||||||
|
|
||||||
let listener = TcpListener::bind(SocketAddr::from(([0, 0, 0, 0], config.port()))).await?;
|
|
||||||
|
|
||||||
let port = listener.local_addr()?.port();
|
|
||||||
|
|
||||||
let state = AppState {
|
|
||||||
verifier: AuthVerifier::default(),
|
|
||||||
db,
|
|
||||||
pkarr_client,
|
|
||||||
config: config.clone(),
|
|
||||||
port,
|
|
||||||
};
|
|
||||||
|
|
||||||
let app = crate::routes::create_app(state.clone());
|
|
||||||
|
|
||||||
// Spawn http server task
|
|
||||||
tasks.spawn(
|
|
||||||
axum::serve(
|
|
||||||
listener,
|
|
||||||
app.into_make_service_with_connect_info::<SocketAddr>(),
|
|
||||||
)
|
|
||||||
.with_graceful_shutdown(shutdown_signal())
|
|
||||||
.into_future(),
|
|
||||||
);
|
|
||||||
|
|
||||||
info!("Homeserver listening on http://localhost:{port}");
|
|
||||||
|
|
||||||
publish_server_packet(
|
|
||||||
&state.pkarr_client,
|
|
||||||
config.keypair(),
|
|
||||||
&state
|
|
||||||
.config
|
|
||||||
.domain()
|
|
||||||
.clone()
|
|
||||||
.unwrap_or("localhost".to_string()),
|
|
||||||
port,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
info!(
|
|
||||||
"Homeserver listening on pubky://{}",
|
|
||||||
config.keypair().public_key()
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(Self { tasks, state })
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Test version of [Homeserver::start], using mainline Testnet, and a temporary storage.
|
|
||||||
pub async fn start_test(testnet: &Testnet) -> Result<Self> {
|
|
||||||
info!("Running testnet..");
|
|
||||||
|
|
||||||
Homeserver::start(Config::test(testnet)).await
|
|
||||||
}
|
|
||||||
|
|
||||||
// === Getters ===
|
|
||||||
|
|
||||||
pub fn port(&self) -> u16 {
|
|
||||||
self.state.port
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn public_key(&self) -> PublicKey {
|
|
||||||
self.state.config.keypair().public_key()
|
|
||||||
}
|
|
||||||
|
|
||||||
// === Public Methods ===
|
|
||||||
|
|
||||||
/// Shutdown the server and wait for all tasks to complete.
|
|
||||||
pub async fn shutdown(mut self) -> Result<()> {
|
|
||||||
self.tasks.abort_all();
|
|
||||||
self.run_until_done().await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Wait for all tasks to complete.
|
|
||||||
///
|
|
||||||
/// Runs forever unless tasks fail.
|
|
||||||
pub async fn run_until_done(mut self) -> Result<()> {
|
|
||||||
let mut final_res: Result<()> = Ok(());
|
|
||||||
while let Some(res) = self.tasks.join_next().await {
|
|
||||||
match res {
|
|
||||||
Ok(Ok(())) => {}
|
|
||||||
Err(err) if err.is_cancelled() => {}
|
|
||||||
Ok(Err(err)) => {
|
|
||||||
warn!(?err, "task failed");
|
|
||||||
final_res = Err(Error::from(err));
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
warn!(?err, "task panicked");
|
|
||||||
final_res = Err(err.into());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
final_res
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn shutdown_signal() {
|
|
||||||
let ctrl_c = async {
|
|
||||||
signal::ctrl_c()
|
|
||||||
.await
|
|
||||||
.expect("failed to install Ctrl+C handler");
|
|
||||||
};
|
|
||||||
|
|
||||||
#[cfg(unix)]
|
|
||||||
let terminate = async {
|
|
||||||
signal::unix::signal(signal::unix::SignalKind::terminate())
|
|
||||||
.expect("failed to install signal handler")
|
|
||||||
.recv()
|
|
||||||
.await;
|
|
||||||
};
|
|
||||||
|
|
||||||
#[cfg(not(unix))]
|
|
||||||
let terminate = std::future::pending::<()>();
|
|
||||||
|
|
||||||
fn graceful_shutdown() {
|
|
||||||
info!("Gracefully Shutting down..");
|
|
||||||
}
|
|
||||||
|
|
||||||
tokio::select! {
|
|
||||||
_ = ctrl_c => graceful_shutdown(),
|
|
||||||
_ = terminate => graceful_shutdown(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,44 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "pubky"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2021"
|
|
||||||
description = "Pubky client"
|
|
||||||
license = "MIT"
|
|
||||||
repository = "https://github.com/pubky/pubky"
|
|
||||||
keywords = ["web", "dht", "dns", "decentralized", "identity"]
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
crate-type = ["cdylib", "rlib"]
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
thiserror = "1.0.62"
|
|
||||||
wasm-bindgen = "0.2.92"
|
|
||||||
url = "2.5.2"
|
|
||||||
bytes = "^1.7.1"
|
|
||||||
base64 = "0.22.1"
|
|
||||||
|
|
||||||
pubky-common = { version = "0.1.0", path = "../pubky-common" }
|
|
||||||
pkarr = { workspace = true, features = ["async"] }
|
|
||||||
|
|
||||||
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
|
|
||||||
reqwest = { version = "0.12.5", features = ["cookies", "rustls-tls"], default-features = false }
|
|
||||||
tokio = { version = "1.37.0", features = ["full"] }
|
|
||||||
|
|
||||||
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
|
||||||
reqwest = { version = "0.12.5", default-features = false }
|
|
||||||
|
|
||||||
js-sys = "0.3.69"
|
|
||||||
wasm-bindgen = "0.2.92"
|
|
||||||
wasm-bindgen-futures = "0.4.42"
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
pubky_homeserver = { path = "../pubky-homeserver" }
|
|
||||||
tokio = "1.37.0"
|
|
||||||
|
|
||||||
[features]
|
|
||||||
|
|
||||||
[package.metadata.docs.rs]
|
|
||||||
all-features = true
|
|
||||||
|
|
||||||
[package.metadata.wasm-pack.profile.release]
|
|
||||||
wasm-opt = ['-g', '-O']
|
|
||||||
6
rust/pubky/pubky/pkg/.gitignore
vendored
6
rust/pubky/pubky/pkg/.gitignore
vendored
@@ -1,6 +0,0 @@
|
|||||||
index.cjs
|
|
||||||
browser.js
|
|
||||||
coverage
|
|
||||||
node_modules
|
|
||||||
package-lock.json
|
|
||||||
pubky*
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
The MIT License (MIT)
|
|
||||||
|
|
||||||
Copyright (c) 2023
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in
|
|
||||||
all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
||||||
THE SOFTWARE.
|
|
||||||
@@ -1,266 +0,0 @@
|
|||||||
# Pubky
|
|
||||||
|
|
||||||
JavaScript implementation of [Pubky](https://github.com/pubky/pubky).
|
|
||||||
|
|
||||||
## Table of Contents
|
|
||||||
- [Install](#install)
|
|
||||||
- [Getting Started](#getting-started)
|
|
||||||
- [API](#api)
|
|
||||||
- [Test and Development](#test-and-development)
|
|
||||||
|
|
||||||
## Install
|
|
||||||
|
|
||||||
```bash
|
|
||||||
npm install @synonymdev/pubky
|
|
||||||
```
|
|
||||||
|
|
||||||
### Prerequisites
|
|
||||||
|
|
||||||
For Nodejs, you need Node v20 or later.
|
|
||||||
|
|
||||||
## Getting started
|
|
||||||
|
|
||||||
```js
|
|
||||||
import { PubkyClient, Keypair, PublicKey } from '../index.js'
|
|
||||||
|
|
||||||
// Initialize PubkyClient with Pkarr relay(s).
|
|
||||||
let client = new PubkyClient();
|
|
||||||
|
|
||||||
// Generate a keypair
|
|
||||||
let keypair = Keypair.random();
|
|
||||||
|
|
||||||
// Create a new account
|
|
||||||
let homeserver = PublicKey.from("8pinxxgqs41n4aididenw5apqp1urfmzdztr8jt4abrkdn435ewo");
|
|
||||||
|
|
||||||
await client.signup(keypair, homeserver)
|
|
||||||
|
|
||||||
const publicKey = keypair.publicKey();
|
|
||||||
|
|
||||||
// Pubky URL
|
|
||||||
let url = `pubky://${publicKey.z32()}/pub/example.com/arbitrary`;
|
|
||||||
|
|
||||||
// Verify that you are signed in.
|
|
||||||
const session = await client.session(publicKey)
|
|
||||||
|
|
||||||
const body = Buffer.from(JSON.stringify({ foo: 'bar' }))
|
|
||||||
|
|
||||||
// PUT public data, by authorized client
|
|
||||||
await client.put(url, body);
|
|
||||||
|
|
||||||
// GET public data without signup or signin
|
|
||||||
{
|
|
||||||
const client = new PubkyClient();
|
|
||||||
|
|
||||||
let response = await client.get(url);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Delete public data, by authorized client
|
|
||||||
await client.delete(url);
|
|
||||||
```
|
|
||||||
|
|
||||||
## API
|
|
||||||
|
|
||||||
### PubkyClient
|
|
||||||
|
|
||||||
#### constructor
|
|
||||||
```js
|
|
||||||
let client = new PubkyClient()
|
|
||||||
```
|
|
||||||
|
|
||||||
#### signup
|
|
||||||
```js
|
|
||||||
await client.signup(keypair, homeserver)
|
|
||||||
```
|
|
||||||
- keypair: An instance of [Keypair](#keypair).
|
|
||||||
- homeserver: An instance of [PublicKey](#publickey) representing the homeserver.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
- session: An instance of [Session](#session).
|
|
||||||
|
|
||||||
#### signin
|
|
||||||
```js
|
|
||||||
let session = await client.signin(keypair)
|
|
||||||
```
|
|
||||||
- keypair: An instance of [Keypair](#keypair).
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
- An instance of [Session](#session).
|
|
||||||
|
|
||||||
#### signout
|
|
||||||
```js
|
|
||||||
await client.signout(publicKey)
|
|
||||||
```
|
|
||||||
- publicKey: An instance of [PublicKey](#publicKey).
|
|
||||||
|
|
||||||
#### authRequest
|
|
||||||
```js
|
|
||||||
let [pubkyauthUrl, sessionPromise] = client.authRequest(relay, capabilities);
|
|
||||||
|
|
||||||
showQr(pubkyauthUrl);
|
|
||||||
|
|
||||||
let pubky = await sessionPromise;
|
|
||||||
```
|
|
||||||
|
|
||||||
Sign in to a user's Homeserver, without access to their [Keypair](#keypair), nor even [PublicKey](#publickey),
|
|
||||||
instead request permissions (showing the user pubkyauthUrl), and await a Session after the user consenting to that request.
|
|
||||||
|
|
||||||
- relay: A URL to an [HTTP relay](https://httprelay.io/features/link/) endpoint.
|
|
||||||
- capabilities: A list of capabilities required for the app for example `/pub/pubky.app/:rw,/pub/example.com/:r`.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
- pubkyauthUrl: A url to show to the user to scan or paste into an Authenticator app holding the user [Keypair](#keypair)
|
|
||||||
- sessionPromise: A promise that resolves into a [PublicKey](#publickey) on success, which you can use in `client.session(pubky)` to resolve more information about the Session.
|
|
||||||
|
|
||||||
#### sendAuthToken
|
|
||||||
```js
|
|
||||||
await client.sendAuthToken(keypair, pubkyauthUrl);
|
|
||||||
```
|
|
||||||
Consenting to authentication or authorization according to the required capabilities in the `pubkyauthUrl` , and sign and send an auth token to the requester.
|
|
||||||
|
|
||||||
- keypair: An instance of [KeyPair](#keypair)
|
|
||||||
- pubkyauthUrl: A string `pubkyauth://` url
|
|
||||||
|
|
||||||
#### session {#session-method}
|
|
||||||
```js
|
|
||||||
let session = await client.session(publicKey)
|
|
||||||
```
|
|
||||||
- publicKey: An instance of [PublicKey](#publickey).
|
|
||||||
- Returns: A [Session](#session) object if signed in, or undefined if not.
|
|
||||||
|
|
||||||
#### put
|
|
||||||
```js
|
|
||||||
let response = await client.put(url, body);
|
|
||||||
```
|
|
||||||
- url: A string representing the Pubky URL.
|
|
||||||
- body: A Buffer containing the data to be stored.
|
|
||||||
|
|
||||||
### get
|
|
||||||
```js
|
|
||||||
let response = await client.get(url)
|
|
||||||
```
|
|
||||||
- url: A string representing the Pubky URL.
|
|
||||||
- Returns: A Uint8Array object containing the requested data, or `undefined` if `NOT_FOUND`.
|
|
||||||
|
|
||||||
### delete
|
|
||||||
|
|
||||||
```js
|
|
||||||
let response = await client.delete(url);
|
|
||||||
```
|
|
||||||
- url: A string representing the Pubky URL.
|
|
||||||
|
|
||||||
### list
|
|
||||||
```js
|
|
||||||
let response = await client.list(url, cursor, reverse, limit)
|
|
||||||
```
|
|
||||||
- url: A string representing the Pubky URL. The path in that url is the prefix that you want to list files within.
|
|
||||||
- cursor: Usually the last URL from previous calls. List urls after/before (depending on `reverse`) the cursor.
|
|
||||||
- reverse: Whether or not return urls in reverse order.
|
|
||||||
- limit: Number of urls to return.
|
|
||||||
- Returns: A list of URLs of the files in the `url` you passed.
|
|
||||||
|
|
||||||
### Keypair
|
|
||||||
|
|
||||||
#### random
|
|
||||||
```js
|
|
||||||
let keypair = Keypair.random()
|
|
||||||
```
|
|
||||||
- Returns: A new random Keypair.
|
|
||||||
|
|
||||||
#### fromSecretKey
|
|
||||||
```js
|
|
||||||
let keypair = Keypair.fromSecretKey(secretKey)
|
|
||||||
```
|
|
||||||
- secretKey: A 32 bytes Uint8array.
|
|
||||||
- Returns: A new Keypair.
|
|
||||||
|
|
||||||
|
|
||||||
#### publicKey {#publickey-method}
|
|
||||||
```js
|
|
||||||
let publicKey = keypair.publicKey()
|
|
||||||
```
|
|
||||||
- Returns: The [PublicKey](#publickey) associated with the Keypair.
|
|
||||||
|
|
||||||
#### secretKey
|
|
||||||
```js
|
|
||||||
let secretKey = keypair.secretKey()
|
|
||||||
```
|
|
||||||
- Returns: The Uint8array secret key associated with the Keypair.
|
|
||||||
|
|
||||||
### PublicKey
|
|
||||||
|
|
||||||
#### from
|
|
||||||
|
|
||||||
```js
|
|
||||||
let publicKey = PublicKey.from(string);
|
|
||||||
```
|
|
||||||
- string: A string representing the public key.
|
|
||||||
- Returns: A new PublicKey instance.
|
|
||||||
|
|
||||||
#### z32
|
|
||||||
```js
|
|
||||||
let pubky = publicKey.z32();
|
|
||||||
```
|
|
||||||
Returns: The z-base-32 encoded string representation of the PublicKey.
|
|
||||||
|
|
||||||
### Session
|
|
||||||
|
|
||||||
#### pubky
|
|
||||||
```js
|
|
||||||
let pubky = session.pubky();
|
|
||||||
```
|
|
||||||
Returns an instance of [PublicKey](#publickey)
|
|
||||||
|
|
||||||
#### capabilities
|
|
||||||
```js
|
|
||||||
let capabilities = session.capabilities();
|
|
||||||
```
|
|
||||||
Returns an array of capabilities, for example `["/pub/pubky.app/:rw"]`
|
|
||||||
|
|
||||||
### Helper functions
|
|
||||||
|
|
||||||
#### createRecoveryFile
|
|
||||||
```js
|
|
||||||
let recoveryFile = createRecoveryFile(keypair, passphrase)
|
|
||||||
```
|
|
||||||
- keypair: An instance of [Keypair](#keypair).
|
|
||||||
- passphrase: A utf-8 string [passphrase](https://www.useapassphrase.com/).
|
|
||||||
- Returns: A recovery file with a spec line and an encrypted secret key.
|
|
||||||
|
|
||||||
#### createRecoveryFile
|
|
||||||
```js
|
|
||||||
let keypair = decryptRecoveryfile(recoveryFile, passphrase)
|
|
||||||
```
|
|
||||||
- recoveryFile: An instance of Uint8Array containing the recovery file blob.
|
|
||||||
- passphrase: A utf-8 string [passphrase](https://www.useapassphrase.com/).
|
|
||||||
- Returns: An instance of [Keypair](#keypair).
|
|
||||||
|
|
||||||
## Test and Development
|
|
||||||
|
|
||||||
For test and development, you can run a local homeserver in a test network.
|
|
||||||
|
|
||||||
If you don't have Cargo Installed, start by installing it:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
curl https://sh.rustup.rs -sSf | sh
|
|
||||||
```
|
|
||||||
|
|
||||||
Clone the Pubky repository:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git clone https://github.com/pubky/pubky
|
|
||||||
cd pubky/pkg
|
|
||||||
```
|
|
||||||
|
|
||||||
Run the local testnet server
|
|
||||||
|
|
||||||
```bash
|
|
||||||
npm run testnet
|
|
||||||
```
|
|
||||||
|
|
||||||
Use the logged addresses as inputs to `PubkyClient`
|
|
||||||
|
|
||||||
```js
|
|
||||||
import { PubkyClient } from '../index.js'
|
|
||||||
|
|
||||||
const client = PubkyClient().testnet();
|
|
||||||
```
|
|
||||||
@@ -1,41 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "@synonymdev/pubky",
|
|
||||||
"type": "module",
|
|
||||||
"description": "Pubky client",
|
|
||||||
"version": "0.1.16",
|
|
||||||
"license": "MIT",
|
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "https://github.com/pubky/pubky"
|
|
||||||
},
|
|
||||||
"scripts": {
|
|
||||||
"testnet": "cargo run -p pubky_homeserver -- --testnet",
|
|
||||||
"test": "npm run test-nodejs && npm run test-browser",
|
|
||||||
"test-nodejs": "tape test/*.js -cov",
|
|
||||||
"test-browser": "browserify test/*.js -p esmify | npx tape-run",
|
|
||||||
"build": "cargo run --bin bundle_pubky_npm",
|
|
||||||
"prepublishOnly": "npm run build && npm run test"
|
|
||||||
},
|
|
||||||
"files": [
|
|
||||||
"index.cjs",
|
|
||||||
"browser.js",
|
|
||||||
"pubky.d.ts",
|
|
||||||
"pubky_bg.wasm"
|
|
||||||
],
|
|
||||||
"main": "index.cjs",
|
|
||||||
"browser": "browser.js",
|
|
||||||
"types": "pubky.d.ts",
|
|
||||||
"keywords": [
|
|
||||||
"web",
|
|
||||||
"dht",
|
|
||||||
"dns",
|
|
||||||
"decentralized",
|
|
||||||
"identity"
|
|
||||||
],
|
|
||||||
"devDependencies": {
|
|
||||||
"browser-resolve": "^2.0.0",
|
|
||||||
"esmify": "^2.1.1",
|
|
||||||
"tape": "^5.8.1",
|
|
||||||
"tape-run": "^11.0.0"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,65 +0,0 @@
|
|||||||
import test from 'tape'
|
|
||||||
|
|
||||||
import { PubkyClient, Keypair, PublicKey } from '../index.cjs'
|
|
||||||
|
|
||||||
const Homeserver = PublicKey.from('8pinxxgqs41n4aididenw5apqp1urfmzdztr8jt4abrkdn435ewo')
|
|
||||||
|
|
||||||
test('auth', async (t) => {
|
|
||||||
const client = PubkyClient.testnet();
|
|
||||||
|
|
||||||
const keypair = Keypair.random()
|
|
||||||
const publicKey = keypair.publicKey()
|
|
||||||
|
|
||||||
await client.signup(keypair, Homeserver)
|
|
||||||
|
|
||||||
const session = await client.session(publicKey)
|
|
||||||
t.ok(session, "signup")
|
|
||||||
|
|
||||||
{
|
|
||||||
await client.signout(publicKey)
|
|
||||||
|
|
||||||
const session = await client.session(publicKey)
|
|
||||||
t.notOk(session, "singout")
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
await client.signin(keypair)
|
|
||||||
|
|
||||||
const session = await client.session(publicKey)
|
|
||||||
t.ok(session, "signin")
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
test("3rd party signin", async (t) => {
|
|
||||||
let keypair = Keypair.random();
|
|
||||||
let pubky = keypair.publicKey().z32();
|
|
||||||
|
|
||||||
// Third party app side
|
|
||||||
let capabilities = "/pub/pubky.app/:rw,/pub/foo.bar/file:r";
|
|
||||||
let client = PubkyClient.testnet();
|
|
||||||
let [pubkyauth_url, pubkyauthResponse] = client
|
|
||||||
.authRequest("https://demo.httprelay.io/link", capabilities);
|
|
||||||
|
|
||||||
if (globalThis.document) {
|
|
||||||
// Skip `sendAuthToken` in browser
|
|
||||||
// TODO: figure out why does it fail in browser unit tests
|
|
||||||
// but not in real browser (check pubky-auth-widget.js commented part)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Authenticator side
|
|
||||||
{
|
|
||||||
let client = PubkyClient.testnet();
|
|
||||||
|
|
||||||
await client.signup(keypair, Homeserver);
|
|
||||||
|
|
||||||
await client.sendAuthToken(keypair, pubkyauth_url)
|
|
||||||
}
|
|
||||||
|
|
||||||
let authedPubky = await pubkyauthResponse;
|
|
||||||
|
|
||||||
t.is(authedPubky.z32(), pubky);
|
|
||||||
|
|
||||||
let session = await client.session(authedPubky);
|
|
||||||
t.deepEqual(session.capabilities(), capabilities.split(','))
|
|
||||||
})
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
import test from 'tape'
|
|
||||||
|
|
||||||
import { Keypair } from '../index.cjs'
|
|
||||||
|
|
||||||
test('generate keys from a seed', async (t) => {
|
|
||||||
const secretkey = Buffer.from('5aa93b299a343aa2691739771f2b5b85e740ca14c685793d67870f88fa89dc51', 'hex')
|
|
||||||
|
|
||||||
const keypair = Keypair.fromSecretKey(secretkey)
|
|
||||||
|
|
||||||
const publicKey = keypair.publicKey()
|
|
||||||
|
|
||||||
t.is(publicKey.z32(), 'gcumbhd7sqit6nn457jxmrwqx9pyymqwamnarekgo3xppqo6a19o')
|
|
||||||
})
|
|
||||||
|
|
||||||
test('fromSecretKey error', async (t) => {
|
|
||||||
const secretkey = Buffer.from('5aa93b299a343aa2691739771f2b5b', 'hex')
|
|
||||||
|
|
||||||
|
|
||||||
t.throws(() => Keypair.fromSecretKey(null), /Expected secret_key to be an instance of Uint8Array/)
|
|
||||||
t.throws(() => Keypair.fromSecretKey(secretkey), /Expected secret_key to be 32 bytes, got 15/)
|
|
||||||
})
|
|
||||||
@@ -1,351 +0,0 @@
|
|||||||
import test from 'tape'
|
|
||||||
|
|
||||||
import { PubkyClient, Keypair, PublicKey } from '../index.cjs'
|
|
||||||
|
|
||||||
const Homeserver = PublicKey.from('8pinxxgqs41n4aididenw5apqp1urfmzdztr8jt4abrkdn435ewo');
|
|
||||||
|
|
||||||
test('public: put/get', async (t) => {
|
|
||||||
const client = PubkyClient.testnet();
|
|
||||||
|
|
||||||
const keypair = Keypair.random();
|
|
||||||
|
|
||||||
await client.signup(keypair, Homeserver);
|
|
||||||
|
|
||||||
const publicKey = keypair.publicKey();
|
|
||||||
|
|
||||||
let url = `pubky://${publicKey.z32()}/pub/example.com/arbitrary`;
|
|
||||||
|
|
||||||
const body = Buffer.from(JSON.stringify({ foo: 'bar' }))
|
|
||||||
|
|
||||||
// PUT public data, by authorized client
|
|
||||||
await client.put(url, body);
|
|
||||||
|
|
||||||
const otherClient = PubkyClient.testnet();
|
|
||||||
|
|
||||||
// GET public data without signup or signin
|
|
||||||
{
|
|
||||||
let response = await otherClient.get(url);
|
|
||||||
|
|
||||||
t.ok(Buffer.from(response).equals(body))
|
|
||||||
}
|
|
||||||
|
|
||||||
// DELETE public data, by authorized client
|
|
||||||
await client.delete(url);
|
|
||||||
|
|
||||||
|
|
||||||
// GET public data without signup or signin
|
|
||||||
{
|
|
||||||
let response = await otherClient.get(url);
|
|
||||||
|
|
||||||
t.notOk(response)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
test("not found", async (t) => {
|
|
||||||
const client = PubkyClient.testnet();
|
|
||||||
|
|
||||||
|
|
||||||
const keypair = Keypair.random();
|
|
||||||
|
|
||||||
await client.signup(keypair, Homeserver);
|
|
||||||
|
|
||||||
const publicKey = keypair.publicKey();
|
|
||||||
|
|
||||||
let url = `pubky://${publicKey.z32()}/pub/example.com/arbitrary`;
|
|
||||||
|
|
||||||
let result = await client.get(url).catch(e => e);
|
|
||||||
|
|
||||||
t.notOk(result);
|
|
||||||
})
|
|
||||||
|
|
||||||
test("unauthorized", async (t) => {
|
|
||||||
const client = PubkyClient.testnet();
|
|
||||||
|
|
||||||
const keypair = Keypair.random()
|
|
||||||
const publicKey = keypair.publicKey()
|
|
||||||
|
|
||||||
await client.signup(keypair, Homeserver)
|
|
||||||
|
|
||||||
const session = await client.session(publicKey)
|
|
||||||
t.ok(session, "signup")
|
|
||||||
|
|
||||||
await client.signout(publicKey)
|
|
||||||
|
|
||||||
const body = Buffer.from(JSON.stringify({ foo: 'bar' }))
|
|
||||||
|
|
||||||
let url = `pubky://${publicKey.z32()}/pub/example.com/arbitrary`;
|
|
||||||
|
|
||||||
// PUT public data, by authorized client
|
|
||||||
let result = await client.put(url, body).catch(e => e);
|
|
||||||
|
|
||||||
t.ok(result instanceof Error);
|
|
||||||
t.is(
|
|
||||||
result.message,
|
|
||||||
`HTTP status client error (401 Unauthorized) for url (http://localhost:15411/${publicKey.z32()}/pub/example.com/arbitrary)`
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
test("forbidden", async (t) => {
|
|
||||||
const client = PubkyClient.testnet();
|
|
||||||
|
|
||||||
const keypair = Keypair.random()
|
|
||||||
const publicKey = keypair.publicKey()
|
|
||||||
|
|
||||||
await client.signup(keypair, Homeserver)
|
|
||||||
|
|
||||||
const session = await client.session(publicKey)
|
|
||||||
t.ok(session, "signup")
|
|
||||||
|
|
||||||
const body = Buffer.from(JSON.stringify({ foo: 'bar' }))
|
|
||||||
|
|
||||||
let url = `pubky://${publicKey.z32()}/priv/example.com/arbitrary`;
|
|
||||||
|
|
||||||
// PUT public data, by authorized client
|
|
||||||
let result = await client.put(url, body).catch(e => e);
|
|
||||||
|
|
||||||
t.ok(result instanceof Error);
|
|
||||||
t.is(
|
|
||||||
result.message,
|
|
||||||
`HTTP status client error (403 Forbidden) for url (http://localhost:15411/${publicKey.z32()}/priv/example.com/arbitrary)`
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
test("list", async (t) => {
|
|
||||||
const client = PubkyClient.testnet();
|
|
||||||
|
|
||||||
const keypair = Keypair.random()
|
|
||||||
const publicKey = keypair.publicKey()
|
|
||||||
const pubky = publicKey.z32()
|
|
||||||
|
|
||||||
await client.signup(keypair, Homeserver)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
let urls = [
|
|
||||||
`pubky://${pubky}/pub/a.wrong/a.txt`,
|
|
||||||
`pubky://${pubky}/pub/example.com/a.txt`,
|
|
||||||
`pubky://${pubky}/pub/example.com/b.txt`,
|
|
||||||
`pubky://${pubky}/pub/example.wrong/a.txt`,
|
|
||||||
`pubky://${pubky}/pub/example.com/c.txt`,
|
|
||||||
`pubky://${pubky}/pub/example.com/d.txt`,
|
|
||||||
`pubky://${pubky}/pub/z.wrong/a.txt`,
|
|
||||||
]
|
|
||||||
|
|
||||||
for (let url of urls) {
|
|
||||||
await client.put(url, Buffer.from(""));
|
|
||||||
}
|
|
||||||
|
|
||||||
let url = `pubky://${pubky}/pub/example.com/`;
|
|
||||||
|
|
||||||
{
|
|
||||||
let list = await client.list(url);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
list,
|
|
||||||
[
|
|
||||||
`pubky://${pubky}/pub/example.com/a.txt`,
|
|
||||||
`pubky://${pubky}/pub/example.com/b.txt`,
|
|
||||||
`pubky://${pubky}/pub/example.com/c.txt`,
|
|
||||||
`pubky://${pubky}/pub/example.com/d.txt`,
|
|
||||||
|
|
||||||
],
|
|
||||||
"normal list with no limit or cursor"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let list = await client.list(url, null, null, 2);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
list,
|
|
||||||
[
|
|
||||||
`pubky://${pubky}/pub/example.com/a.txt`,
|
|
||||||
`pubky://${pubky}/pub/example.com/b.txt`,
|
|
||||||
|
|
||||||
],
|
|
||||||
"normal list with limit but no cursor"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let list = await client.list(url, "a.txt", null, 2);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
list,
|
|
||||||
[
|
|
||||||
`pubky://${pubky}/pub/example.com/b.txt`,
|
|
||||||
`pubky://${pubky}/pub/example.com/c.txt`,
|
|
||||||
|
|
||||||
],
|
|
||||||
"normal list with limit and a suffix cursor"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let list = await client.list(url, `pubky://${pubky}/pub/example.com/a.txt`, null, 2);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
list,
|
|
||||||
[
|
|
||||||
`pubky://${pubky}/pub/example.com/b.txt`,
|
|
||||||
`pubky://${pubky}/pub/example.com/c.txt`,
|
|
||||||
|
|
||||||
],
|
|
||||||
"normal list with limit and a full url cursor"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
{
|
|
||||||
let list = await client.list(url, null, true);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
list,
|
|
||||||
[
|
|
||||||
`pubky://${pubky}/pub/example.com/d.txt`,
|
|
||||||
`pubky://${pubky}/pub/example.com/c.txt`,
|
|
||||||
`pubky://${pubky}/pub/example.com/b.txt`,
|
|
||||||
`pubky://${pubky}/pub/example.com/a.txt`,
|
|
||||||
|
|
||||||
],
|
|
||||||
"reverse list with no limit or cursor"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let list = await client.list(url, null, true, 2);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
list,
|
|
||||||
[
|
|
||||||
`pubky://${pubky}/pub/example.com/d.txt`,
|
|
||||||
`pubky://${pubky}/pub/example.com/c.txt`,
|
|
||||||
|
|
||||||
],
|
|
||||||
"reverse list with limit but no cursor"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let list = await client.list(url, "d.txt", true, 2);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
list,
|
|
||||||
[
|
|
||||||
`pubky://${pubky}/pub/example.com/c.txt`,
|
|
||||||
`pubky://${pubky}/pub/example.com/b.txt`,
|
|
||||||
|
|
||||||
],
|
|
||||||
"reverse list with limit and a suffix cursor"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
test('list shallow', async (t) => {
|
|
||||||
const client = PubkyClient.testnet();
|
|
||||||
|
|
||||||
const keypair = Keypair.random()
|
|
||||||
const publicKey = keypair.publicKey()
|
|
||||||
const pubky = publicKey.z32()
|
|
||||||
|
|
||||||
await client.signup(keypair, Homeserver)
|
|
||||||
|
|
||||||
let urls = [
|
|
||||||
`pubky://${pubky}/pub/a.com/a.txt`,
|
|
||||||
`pubky://${pubky}/pub/example.com/a.txt`,
|
|
||||||
`pubky://${pubky}/pub/example.com/b.txt`,
|
|
||||||
`pubky://${pubky}/pub/example.com/c.txt`,
|
|
||||||
`pubky://${pubky}/pub/example.com/d.txt`,
|
|
||||||
`pubky://${pubky}/pub/example.con/d.txt`,
|
|
||||||
`pubky://${pubky}/pub/example.con`,
|
|
||||||
`pubky://${pubky}/pub/file`,
|
|
||||||
`pubky://${pubky}/pub/file2`,
|
|
||||||
`pubky://${pubky}/pub/z.com/a.txt`,
|
|
||||||
]
|
|
||||||
|
|
||||||
for (let url of urls) {
|
|
||||||
await client.put(url, Buffer.from(""));
|
|
||||||
}
|
|
||||||
|
|
||||||
let url = `pubky://${pubky}/pub/`;
|
|
||||||
|
|
||||||
{
|
|
||||||
let list = await client.list(url, null, false, null, true);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
list,
|
|
||||||
[
|
|
||||||
`pubky://${pubky}/pub/a.com/`,
|
|
||||||
`pubky://${pubky}/pub/example.com/`,
|
|
||||||
`pubky://${pubky}/pub/example.con`,
|
|
||||||
`pubky://${pubky}/pub/example.con/`,
|
|
||||||
`pubky://${pubky}/pub/file`,
|
|
||||||
`pubky://${pubky}/pub/file2`,
|
|
||||||
`pubky://${pubky}/pub/z.com/`,
|
|
||||||
],
|
|
||||||
"normal list shallow"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let list = await client.list(url, null, false, 3, true);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
list,
|
|
||||||
[
|
|
||||||
`pubky://${pubky}/pub/a.com/`,
|
|
||||||
`pubky://${pubky}/pub/example.com/`,
|
|
||||||
`pubky://${pubky}/pub/example.con`,
|
|
||||||
],
|
|
||||||
"normal list shallow with limit"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let list = await client.list(url, `example.com/`, false, null, true);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
list,
|
|
||||||
[
|
|
||||||
`pubky://${pubky}/pub/example.con`,
|
|
||||||
`pubky://${pubky}/pub/example.con/`,
|
|
||||||
`pubky://${pubky}/pub/file`,
|
|
||||||
`pubky://${pubky}/pub/file2`,
|
|
||||||
`pubky://${pubky}/pub/z.com/`,
|
|
||||||
],
|
|
||||||
"normal list shallow with cursor"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let list = await client.list(url, null, true, null, true);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
list,
|
|
||||||
[
|
|
||||||
`pubky://${pubky}/pub/z.com/`,
|
|
||||||
`pubky://${pubky}/pub/file2`,
|
|
||||||
`pubky://${pubky}/pub/file`,
|
|
||||||
`pubky://${pubky}/pub/example.con/`,
|
|
||||||
`pubky://${pubky}/pub/example.con`,
|
|
||||||
`pubky://${pubky}/pub/example.com/`,
|
|
||||||
`pubky://${pubky}/pub/a.com/`,
|
|
||||||
],
|
|
||||||
"normal list shallow"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let list = await client.list(url, null, true, 3, true);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
list,
|
|
||||||
[
|
|
||||||
`pubky://${pubky}/pub/z.com/`,
|
|
||||||
`pubky://${pubky}/pub/file2`,
|
|
||||||
`pubky://${pubky}/pub/file`,
|
|
||||||
],
|
|
||||||
"normal list shallow with limit"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
})
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
import test from 'tape'
|
|
||||||
|
|
||||||
import { Keypair, createRecoveryFile, decryptRecoveryFile } from '../index.cjs'
|
|
||||||
|
|
||||||
test('recovery', async (t) => {
|
|
||||||
const keypair = Keypair.random();
|
|
||||||
|
|
||||||
const recoveryFile = createRecoveryFile(keypair, 'very secure password');
|
|
||||||
|
|
||||||
t.is(recoveryFile.length, 91)
|
|
||||||
t.deepEqual(
|
|
||||||
Array.from(recoveryFile.slice(0, 19)),
|
|
||||||
[112, 117, 98, 107, 121, 46, 111, 114, 103, 47, 114, 101, 99, 111, 118, 101, 114, 121, 10]
|
|
||||||
)
|
|
||||||
|
|
||||||
const recovered = decryptRecoveryFile(recoveryFile, 'very secure password')
|
|
||||||
|
|
||||||
t.is(recovered.publicKey().z32(), keypair.publicKey().z32())
|
|
||||||
})
|
|
||||||
@@ -1,65 +0,0 @@
|
|||||||
use std::env;
|
|
||||||
use std::io;
|
|
||||||
use std::process::{Command, ExitStatus};
|
|
||||||
|
|
||||||
// If the process hangs, try `cargo clean` to remove all locks.
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
println!("Building wasm for pubky...");
|
|
||||||
|
|
||||||
build_wasm("nodejs").unwrap();
|
|
||||||
patch().unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_wasm(target: &str) -> io::Result<ExitStatus> {
|
|
||||||
let manifest_dir = env::var("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR not set");
|
|
||||||
|
|
||||||
let output = Command::new("wasm-pack")
|
|
||||||
.args([
|
|
||||||
"build",
|
|
||||||
&manifest_dir,
|
|
||||||
"--release",
|
|
||||||
"--target",
|
|
||||||
target,
|
|
||||||
"--out-dir",
|
|
||||||
&format!("pkg/{}", target),
|
|
||||||
])
|
|
||||||
.output()?;
|
|
||||||
|
|
||||||
println!(
|
|
||||||
"wasm-pack {target} output: {}",
|
|
||||||
String::from_utf8_lossy(&output.stdout)
|
|
||||||
);
|
|
||||||
|
|
||||||
if !output.status.success() {
|
|
||||||
eprintln!(
|
|
||||||
"wasm-pack failed: {}",
|
|
||||||
String::from_utf8_lossy(&output.stderr)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(output.status)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn patch() -> io::Result<ExitStatus> {
|
|
||||||
let manifest_dir = env::var("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR not set");
|
|
||||||
|
|
||||||
println!("{manifest_dir}/src/bin/patch.mjs");
|
|
||||||
let output = Command::new("node")
|
|
||||||
.args([format!("{manifest_dir}/src/bin/patch.mjs")])
|
|
||||||
.output()?;
|
|
||||||
|
|
||||||
println!(
|
|
||||||
"patch.mjs output: {}",
|
|
||||||
String::from_utf8_lossy(&output.stdout)
|
|
||||||
);
|
|
||||||
|
|
||||||
if !output.status.success() {
|
|
||||||
eprintln!(
|
|
||||||
"patch.mjs failed: {}",
|
|
||||||
String::from_utf8_lossy(&output.stderr)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(output.status)
|
|
||||||
}
|
|
||||||
@@ -1,66 +0,0 @@
|
|||||||
// This script is used to generate isomorphic code for web and nodejs
|
|
||||||
//
|
|
||||||
// Based on hacks from [this issue](https://github.com/rustwasm/wasm-pack/issues/1334)
|
|
||||||
|
|
||||||
import { readFile, writeFile, rename } from "node:fs/promises";
|
|
||||||
import { fileURLToPath } from 'node:url';
|
|
||||||
import path, { dirname } from 'node:path';
|
|
||||||
|
|
||||||
const __filename = fileURLToPath(import.meta.url);
|
|
||||||
const __dirname = dirname(__filename);
|
|
||||||
|
|
||||||
const cargoTomlContent = await readFile(path.join(__dirname, "../../Cargo.toml"), "utf8");
|
|
||||||
const cargoPackageName = /\[package\]\nname = "(.*?)"/.exec(cargoTomlContent)[1]
|
|
||||||
const name = cargoPackageName.replace(/-/g, '_')
|
|
||||||
|
|
||||||
const content = await readFile(path.join(__dirname, `../../pkg/nodejs/${name}.js`), "utf8");
|
|
||||||
|
|
||||||
const patched = content
|
|
||||||
// use global TextDecoder TextEncoder
|
|
||||||
.replace("require(`util`)", "globalThis")
|
|
||||||
// attach to `imports` instead of module.exports
|
|
||||||
.replace("= module.exports", "= imports")
|
|
||||||
// Export classes
|
|
||||||
.replace(/\nclass (.*?) \{/g, "\n export class $1 {")
|
|
||||||
// Export functions
|
|
||||||
.replace(/\nmodule.exports.(.*?) = function/g, "\nimports.$1 = $1;\nexport function $1")
|
|
||||||
// Add exports to 'imports'
|
|
||||||
.replace(/\nmodule\.exports\.(.*?)\s+/g, "\nimports.$1")
|
|
||||||
// Export default
|
|
||||||
.replace(/$/, 'export default imports')
|
|
||||||
// inline wasm bytes
|
|
||||||
.replace(
|
|
||||||
/\nconst path.*\nconst bytes.*\n/,
|
|
||||||
`
|
|
||||||
var __toBinary = /* @__PURE__ */ (() => {
|
|
||||||
var table = new Uint8Array(128);
|
|
||||||
for (var i = 0; i < 64; i++)
|
|
||||||
table[i < 26 ? i + 65 : i < 52 ? i + 71 : i < 62 ? i - 4 : i * 4 - 205] = i;
|
|
||||||
return (base64) => {
|
|
||||||
var n = base64.length, bytes = new Uint8Array((n - (base64[n - 1] == "=") - (base64[n - 2] == "=")) * 3 / 4 | 0);
|
|
||||||
for (var i2 = 0, j = 0; i2 < n; ) {
|
|
||||||
var c0 = table[base64.charCodeAt(i2++)], c1 = table[base64.charCodeAt(i2++)];
|
|
||||||
var c2 = table[base64.charCodeAt(i2++)], c3 = table[base64.charCodeAt(i2++)];
|
|
||||||
bytes[j++] = c0 << 2 | c1 >> 4;
|
|
||||||
bytes[j++] = c1 << 4 | c2 >> 2;
|
|
||||||
bytes[j++] = c2 << 6 | c3;
|
|
||||||
}
|
|
||||||
return bytes;
|
|
||||||
};
|
|
||||||
})();
|
|
||||||
|
|
||||||
const bytes = __toBinary(${JSON.stringify(await readFile(path.join(__dirname, `../../pkg/nodejs/${name}_bg.wasm`), "base64"))
|
|
||||||
});
|
|
||||||
`,
|
|
||||||
);
|
|
||||||
|
|
||||||
await writeFile(path.join(__dirname, `../../pkg/browser.js`), patched + "\nglobalThis['pubky'] = imports");
|
|
||||||
|
|
||||||
// Move outside of nodejs
|
|
||||||
|
|
||||||
await Promise.all([".js", ".d.ts", "_bg.wasm"].map(suffix =>
|
|
||||||
rename(
|
|
||||||
path.join(__dirname, `../../pkg/nodejs/${name}${suffix}`),
|
|
||||||
path.join(__dirname, `../../pkg/${suffix === '.js' ? "index.cjs" : (name + suffix)}`),
|
|
||||||
))
|
|
||||||
)
|
|
||||||
@@ -1,56 +0,0 @@
|
|||||||
//! Main Crate Error
|
|
||||||
|
|
||||||
use pkarr::dns::SimpleDnsError;
|
|
||||||
|
|
||||||
// Alias Result to be the crate Result.
|
|
||||||
pub type Result<T, E = Error> = core::result::Result<T, E>;
|
|
||||||
|
|
||||||
#[derive(thiserror::Error, Debug)]
|
|
||||||
/// Pk common Error
|
|
||||||
pub enum Error {
|
|
||||||
/// For starter, to remove as code matures.
|
|
||||||
#[error("Generic error: {0}")]
|
|
||||||
Generic(String),
|
|
||||||
|
|
||||||
#[error("Could not resolve endpoint for {0}")]
|
|
||||||
ResolveEndpoint(String),
|
|
||||||
|
|
||||||
#[error("Could not convert the passed type into a Url")]
|
|
||||||
InvalidUrl,
|
|
||||||
|
|
||||||
// === Transparent ===
|
|
||||||
#[error(transparent)]
|
|
||||||
Dns(#[from] SimpleDnsError),
|
|
||||||
|
|
||||||
#[error(transparent)]
|
|
||||||
Pkarr(#[from] pkarr::Error),
|
|
||||||
|
|
||||||
#[error(transparent)]
|
|
||||||
Url(#[from] url::ParseError),
|
|
||||||
|
|
||||||
#[error(transparent)]
|
|
||||||
Reqwest(#[from] reqwest::Error),
|
|
||||||
|
|
||||||
#[error(transparent)]
|
|
||||||
Session(#[from] pubky_common::session::Error),
|
|
||||||
|
|
||||||
#[error(transparent)]
|
|
||||||
Crypto(#[from] pubky_common::crypto::Error),
|
|
||||||
|
|
||||||
#[error(transparent)]
|
|
||||||
RecoveryFile(#[from] pubky_common::recovery_file::Error),
|
|
||||||
|
|
||||||
#[error(transparent)]
|
|
||||||
AuthToken(#[from] pubky_common::auth::Error),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(target_arch = "wasm32")]
|
|
||||||
use wasm_bindgen::JsValue;
|
|
||||||
|
|
||||||
#[cfg(target_arch = "wasm32")]
|
|
||||||
impl From<Error> for JsValue {
|
|
||||||
fn from(error: Error) -> JsValue {
|
|
||||||
let error_message = error.to_string();
|
|
||||||
js_sys::Error::new(&error_message).into()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
mod error;
|
|
||||||
mod shared;
|
|
||||||
|
|
||||||
#[cfg(not(target_arch = "wasm32"))]
|
|
||||||
mod native;
|
|
||||||
|
|
||||||
#[cfg(target_arch = "wasm32")]
|
|
||||||
mod wasm;
|
|
||||||
#[cfg(target_arch = "wasm32")]
|
|
||||||
use std::{
|
|
||||||
collections::HashSet,
|
|
||||||
sync::{Arc, RwLock},
|
|
||||||
};
|
|
||||||
|
|
||||||
use wasm_bindgen::prelude::*;
|
|
||||||
|
|
||||||
#[cfg(not(target_arch = "wasm32"))]
|
|
||||||
use ::pkarr::PkarrClientAsync;
|
|
||||||
|
|
||||||
pub use error::Error;
|
|
||||||
|
|
||||||
#[cfg(not(target_arch = "wasm32"))]
|
|
||||||
pub use crate::shared::list_builder::ListBuilder;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
#[wasm_bindgen]
|
|
||||||
pub struct PubkyClient {
|
|
||||||
http: reqwest::Client,
|
|
||||||
#[cfg(not(target_arch = "wasm32"))]
|
|
||||||
pub(crate) pkarr: PkarrClientAsync,
|
|
||||||
/// A cookie jar for nodejs fetch.
|
|
||||||
#[cfg(target_arch = "wasm32")]
|
|
||||||
pub(crate) session_cookies: Arc<RwLock<HashSet<String>>>,
|
|
||||||
#[cfg(target_arch = "wasm32")]
|
|
||||||
pub(crate) pkarr_relays: Vec<String>,
|
|
||||||
}
|
|
||||||
@@ -1,258 +0,0 @@
|
|||||||
use std::net::ToSocketAddrs;
|
|
||||||
use std::time::Duration;
|
|
||||||
|
|
||||||
use bytes::Bytes;
|
|
||||||
use pubky_common::{
|
|
||||||
capabilities::Capabilities,
|
|
||||||
recovery_file::{create_recovery_file, decrypt_recovery_file},
|
|
||||||
session::Session,
|
|
||||||
};
|
|
||||||
use reqwest::{RequestBuilder, Response};
|
|
||||||
use tokio::sync::oneshot;
|
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
use pkarr::{Keypair, PkarrClientAsync};
|
|
||||||
|
|
||||||
use ::pkarr::{mainline::dht::Testnet, PkarrClient, PublicKey, SignedPacket};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
error::{Error, Result},
|
|
||||||
shared::list_builder::ListBuilder,
|
|
||||||
PubkyClient,
|
|
||||||
};
|
|
||||||
|
|
||||||
static DEFAULT_USER_AGENT: &str = concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION"),);
|
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
|
||||||
pub struct PubkyClientBuilder {
|
|
||||||
pkarr_settings: pkarr::Settings,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PubkyClientBuilder {
|
|
||||||
/// Set Pkarr client [pkarr::Settings].
|
|
||||||
pub fn pkarr_settings(mut self, settings: pkarr::Settings) -> Self {
|
|
||||||
self.pkarr_settings = settings;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Use the bootstrap nodes of a testnet, as the bootstrap nodes and
|
|
||||||
/// resolvers in the internal Pkarr client.
|
|
||||||
pub fn testnet(mut self, testnet: &Testnet) -> Self {
|
|
||||||
self.pkarr_settings.dht.bootstrap = testnet.bootstrap.to_vec().into();
|
|
||||||
|
|
||||||
self.pkarr_settings.resolvers = testnet
|
|
||||||
.bootstrap
|
|
||||||
.iter()
|
|
||||||
.flat_map(|resolver| resolver.to_socket_addrs())
|
|
||||||
.flatten()
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.into();
|
|
||||||
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Set the request_timeout of the UDP socket in the Mainline DHT client in
|
|
||||||
/// the internal Pkarr client.
|
|
||||||
///
|
|
||||||
/// Useful to speed unit tests.
|
|
||||||
/// Defaults to 2 seconds.
|
|
||||||
pub fn dht_request_timeout(mut self, timeout: Duration) -> Self {
|
|
||||||
self.pkarr_settings.dht.request_timeout = timeout.into();
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Build [PubkyClient]
|
|
||||||
pub fn build(self) -> PubkyClient {
|
|
||||||
PubkyClient {
|
|
||||||
http: reqwest::Client::builder()
|
|
||||||
.cookie_store(true)
|
|
||||||
.user_agent(DEFAULT_USER_AGENT)
|
|
||||||
.build()
|
|
||||||
.unwrap(),
|
|
||||||
pkarr: PkarrClient::new(self.pkarr_settings).unwrap().as_async(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for PubkyClient {
|
|
||||||
fn default() -> Self {
|
|
||||||
PubkyClient::builder().build()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// === Public API ===
|
|
||||||
|
|
||||||
impl PubkyClient {
|
|
||||||
/// Returns a builder to edit settings before creating [PubkyClient].
|
|
||||||
pub fn builder() -> PubkyClientBuilder {
|
|
||||||
PubkyClientBuilder::default()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a client connected to the local network
|
|
||||||
/// with the bootstrapping node: `localhost:6881`
|
|
||||||
pub fn testnet() -> Self {
|
|
||||||
Self::test(&Testnet {
|
|
||||||
bootstrap: vec!["localhost:6881".to_string()],
|
|
||||||
nodes: vec![],
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates a [PubkyClient] with:
|
|
||||||
/// - DHT bootstrap nodes set to the `testnet` bootstrap nodes.
|
|
||||||
/// - DHT request timout set to 500 milliseconds. (unless in CI, then it is left as default 2000)
|
|
||||||
///
|
|
||||||
/// For more control, you can use [PubkyClientBuilder::testnet]
|
|
||||||
pub fn test(testnet: &Testnet) -> PubkyClient {
|
|
||||||
let mut builder = PubkyClient::builder().testnet(testnet);
|
|
||||||
|
|
||||||
if std::env::var("CI").is_err() {
|
|
||||||
builder = builder.dht_request_timeout(Duration::from_millis(500));
|
|
||||||
}
|
|
||||||
|
|
||||||
builder.build()
|
|
||||||
}
|
|
||||||
|
|
||||||
// === Getters ===
|
|
||||||
|
|
||||||
/// Returns a reference to the internal [pkarr] Client.
|
|
||||||
pub fn pkarr(&self) -> &PkarrClientAsync {
|
|
||||||
&self.pkarr
|
|
||||||
}
|
|
||||||
|
|
||||||
// === Auth ===
|
|
||||||
|
|
||||||
/// Signup to a homeserver and update Pkarr accordingly.
|
|
||||||
///
|
|
||||||
/// The homeserver is a Pkarr domain name, where the TLD is a Pkarr public key
|
|
||||||
/// for example "pubky.o4dksfbqk85ogzdb5osziw6befigbuxmuxkuxq8434q89uj56uyy"
|
|
||||||
pub async fn signup(&self, keypair: &Keypair, homeserver: &PublicKey) -> Result<Session> {
|
|
||||||
self.inner_signup(keypair, homeserver).await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Check the current sesison for a given Pubky in its homeserver.
|
|
||||||
///
|
|
||||||
/// Returns [Session] or `None` (if recieved `404 NOT_FOUND`),
|
|
||||||
/// or [reqwest::Error] if the response has any other `>=400` status code.
|
|
||||||
pub async fn session(&self, pubky: &PublicKey) -> Result<Option<Session>> {
|
|
||||||
self.inner_session(pubky).await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Signout from a homeserver.
|
|
||||||
pub async fn signout(&self, pubky: &PublicKey) -> Result<()> {
|
|
||||||
self.inner_signout(pubky).await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Signin to a homeserver.
|
|
||||||
pub async fn signin(&self, keypair: &Keypair) -> Result<Session> {
|
|
||||||
self.inner_signin(keypair).await
|
|
||||||
}
|
|
||||||
|
|
||||||
// === Public data ===
|
|
||||||
|
|
||||||
/// Upload a small payload to a given path.
|
|
||||||
pub async fn put<T: TryInto<Url>>(&self, url: T, content: &[u8]) -> Result<()> {
|
|
||||||
self.inner_put(url, content).await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Download a small payload from a given path relative to a pubky author.
|
|
||||||
pub async fn get<T: TryInto<Url>>(&self, url: T) -> Result<Option<Bytes>> {
|
|
||||||
self.inner_get(url).await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Delete a file at a path relative to a pubky author.
|
|
||||||
pub async fn delete<T: TryInto<Url>>(&self, url: T) -> Result<()> {
|
|
||||||
self.inner_delete(url).await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a [ListBuilder] to help pass options before calling [ListBuilder::send].
|
|
||||||
///
|
|
||||||
/// `url` sets the path you want to lest within.
|
|
||||||
pub fn list<T: TryInto<Url>>(&self, url: T) -> Result<ListBuilder> {
|
|
||||||
self.inner_list(url)
|
|
||||||
}
|
|
||||||
|
|
||||||
// === Helpers ===
|
|
||||||
|
|
||||||
/// Create a recovery file of the `keypair`, containing the secret key encrypted
|
|
||||||
/// using the `passphrase`.
|
|
||||||
pub fn create_recovery_file(keypair: &Keypair, passphrase: &str) -> Result<Vec<u8>> {
|
|
||||||
Ok(create_recovery_file(keypair, passphrase)?)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Recover a keypair from a recovery file by decrypting the secret key using `passphrase`.
|
|
||||||
pub fn decrypt_recovery_file(recovery_file: &[u8], passphrase: &str) -> Result<Keypair> {
|
|
||||||
Ok(decrypt_recovery_file(recovery_file, passphrase)?)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return `pubkyauth://` url and wait for the incoming [AuthToken]
|
|
||||||
/// verifying that AuthToken, and if capabilities were requested, signing in to
|
|
||||||
/// the Pubky's homeserver and returning the [Session] information.
|
|
||||||
pub fn auth_request(
|
|
||||||
&self,
|
|
||||||
relay: impl TryInto<Url>,
|
|
||||||
capabilities: &Capabilities,
|
|
||||||
) -> Result<(Url, tokio::sync::oneshot::Receiver<PublicKey>)> {
|
|
||||||
let mut relay: Url = relay
|
|
||||||
.try_into()
|
|
||||||
.map_err(|_| Error::Generic("Invalid relay Url".into()))?;
|
|
||||||
|
|
||||||
let (pubkyauth_url, client_secret) = self.create_auth_request(&mut relay, capabilities)?;
|
|
||||||
|
|
||||||
let (tx, rx) = oneshot::channel::<PublicKey>();
|
|
||||||
|
|
||||||
let this = self.clone();
|
|
||||||
|
|
||||||
tokio::spawn(async move {
|
|
||||||
let to_send = this
|
|
||||||
.subscribe_to_auth_response(relay, &client_secret)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
tx.send(to_send)
|
|
||||||
.map_err(|_| Error::Generic("Failed to send the session after signing in with token, since the receiver is dropped".into()))?;
|
|
||||||
|
|
||||||
Ok::<(), Error>(())
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok((pubkyauth_url, rx))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Sign an [pubky_common::auth::AuthToken], encrypt it and send it to the
|
|
||||||
/// source of the pubkyauth request url.
|
|
||||||
pub async fn send_auth_token<T: TryInto<Url>>(
|
|
||||||
&self,
|
|
||||||
keypair: &Keypair,
|
|
||||||
pubkyauth_url: T,
|
|
||||||
) -> Result<()> {
|
|
||||||
let url: Url = pubkyauth_url.try_into().map_err(|_| Error::InvalidUrl)?;
|
|
||||||
|
|
||||||
self.inner_send_auth_token(keypair, url).await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// === Internals ===
|
|
||||||
|
|
||||||
impl PubkyClient {
|
|
||||||
// === Pkarr ===
|
|
||||||
|
|
||||||
pub(crate) async fn pkarr_resolve(
|
|
||||||
&self,
|
|
||||||
public_key: &PublicKey,
|
|
||||||
) -> Result<Option<SignedPacket>> {
|
|
||||||
Ok(self.pkarr.resolve(public_key).await?)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) async fn pkarr_publish(&self, signed_packet: &SignedPacket) -> Result<()> {
|
|
||||||
Ok(self.pkarr.publish(signed_packet).await?)
|
|
||||||
}
|
|
||||||
|
|
||||||
// === HTTP ===
|
|
||||||
|
|
||||||
pub(crate) fn request(&self, method: reqwest::Method, url: Url) -> RequestBuilder {
|
|
||||||
self.http.request(method, url)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn store_session(&self, _: &Response) {}
|
|
||||||
pub(crate) fn remove_session(&self, _: &PublicKey) {}
|
|
||||||
}
|
|
||||||
@@ -1,341 +0,0 @@
|
|||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
use base64::{alphabet::URL_SAFE, engine::general_purpose::NO_PAD, Engine};
|
|
||||||
use reqwest::{Method, StatusCode};
|
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
use pkarr::{Keypair, PublicKey};
|
|
||||||
use pubky_common::{
|
|
||||||
auth::AuthToken,
|
|
||||||
capabilities::{Capabilities, Capability},
|
|
||||||
crypto::{decrypt, encrypt, hash, random_bytes},
|
|
||||||
session::Session,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
error::{Error, Result},
|
|
||||||
PubkyClient,
|
|
||||||
};
|
|
||||||
|
|
||||||
use super::pkarr::Endpoint;
|
|
||||||
|
|
||||||
impl PubkyClient {
|
|
||||||
/// Signup to a homeserver and update Pkarr accordingly.
|
|
||||||
///
|
|
||||||
/// The homeserver is a Pkarr domain name, where the TLD is a Pkarr public key
|
|
||||||
/// for example "pubky.o4dksfbqk85ogzdb5osziw6befigbuxmuxkuxq8434q89uj56uyy"
|
|
||||||
pub(crate) async fn inner_signup(
|
|
||||||
&self,
|
|
||||||
keypair: &Keypair,
|
|
||||||
homeserver: &PublicKey,
|
|
||||||
) -> Result<Session> {
|
|
||||||
let homeserver = homeserver.to_string();
|
|
||||||
|
|
||||||
let Endpoint { mut url, .. } = self.resolve_endpoint(&homeserver).await?;
|
|
||||||
|
|
||||||
url.set_path("/signup");
|
|
||||||
|
|
||||||
let body = AuthToken::sign(keypair, vec![Capability::root()]).serialize();
|
|
||||||
|
|
||||||
let response = self
|
|
||||||
.request(Method::POST, url.clone())
|
|
||||||
.body(body)
|
|
||||||
.send()
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
self.store_session(&response);
|
|
||||||
|
|
||||||
self.publish_pubky_homeserver(keypair, &homeserver).await?;
|
|
||||||
|
|
||||||
let bytes = response.bytes().await?;
|
|
||||||
|
|
||||||
Ok(Session::deserialize(&bytes)?)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Check the current sesison for a given Pubky in its homeserver.
|
|
||||||
///
|
|
||||||
/// Returns None if not signed in, or [reqwest::Error]
|
|
||||||
/// if the response has any other `>=404` status code.
|
|
||||||
pub(crate) async fn inner_session(&self, pubky: &PublicKey) -> Result<Option<Session>> {
|
|
||||||
let Endpoint { mut url, .. } = self.resolve_pubky_homeserver(pubky).await?;
|
|
||||||
|
|
||||||
url.set_path(&format!("/{}/session", pubky));
|
|
||||||
|
|
||||||
let res = self.request(Method::GET, url).send().await?;
|
|
||||||
|
|
||||||
if res.status() == StatusCode::NOT_FOUND {
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
|
|
||||||
if !res.status().is_success() {
|
|
||||||
res.error_for_status_ref()?;
|
|
||||||
};
|
|
||||||
|
|
||||||
let bytes = res.bytes().await?;
|
|
||||||
|
|
||||||
Ok(Some(Session::deserialize(&bytes)?))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Signout from a homeserver.
|
|
||||||
pub(crate) async fn inner_signout(&self, pubky: &PublicKey) -> Result<()> {
|
|
||||||
let Endpoint { mut url, .. } = self.resolve_pubky_homeserver(pubky).await?;
|
|
||||||
|
|
||||||
url.set_path(&format!("/{}/session", pubky));
|
|
||||||
|
|
||||||
self.request(Method::DELETE, url).send().await?;
|
|
||||||
|
|
||||||
self.remove_session(pubky);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Signin to a homeserver.
|
|
||||||
pub(crate) async fn inner_signin(&self, keypair: &Keypair) -> Result<Session> {
|
|
||||||
let token = AuthToken::sign(keypair, vec![Capability::root()]);
|
|
||||||
|
|
||||||
self.signin_with_authtoken(&token).await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) async fn inner_send_auth_token(
|
|
||||||
&self,
|
|
||||||
keypair: &Keypair,
|
|
||||||
pubkyauth_url: Url,
|
|
||||||
) -> Result<()> {
|
|
||||||
let query_params: HashMap<String, String> =
|
|
||||||
pubkyauth_url.query_pairs().into_owned().collect();
|
|
||||||
|
|
||||||
let relay = query_params
|
|
||||||
.get("relay")
|
|
||||||
.map(|r| url::Url::parse(r).expect("Relay query param to be valid URL"))
|
|
||||||
.expect("Missing relay query param");
|
|
||||||
|
|
||||||
let client_secret = query_params
|
|
||||||
.get("secret")
|
|
||||||
.map(|s| {
|
|
||||||
let engine = base64::engine::GeneralPurpose::new(&URL_SAFE, NO_PAD);
|
|
||||||
let bytes = engine.decode(s).expect("invalid client_secret");
|
|
||||||
let arr: [u8; 32] = bytes.try_into().expect("invalid client_secret");
|
|
||||||
|
|
||||||
arr
|
|
||||||
})
|
|
||||||
.expect("Missing client secret");
|
|
||||||
|
|
||||||
let capabilities = query_params
|
|
||||||
.get("caps")
|
|
||||||
.map(|caps_string| {
|
|
||||||
caps_string
|
|
||||||
.split(',')
|
|
||||||
.filter_map(|cap| Capability::try_from(cap).ok())
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
})
|
|
||||||
.unwrap_or_default();
|
|
||||||
|
|
||||||
let token = AuthToken::sign(keypair, capabilities);
|
|
||||||
|
|
||||||
let encrypted_token = encrypt(&token.serialize(), &client_secret)?;
|
|
||||||
|
|
||||||
let engine = base64::engine::GeneralPurpose::new(&URL_SAFE, NO_PAD);
|
|
||||||
|
|
||||||
let mut callback = relay.clone();
|
|
||||||
let mut path_segments = callback.path_segments_mut().unwrap();
|
|
||||||
path_segments.pop_if_empty();
|
|
||||||
let channel_id = engine.encode(hash(&client_secret).as_bytes());
|
|
||||||
path_segments.push(&channel_id);
|
|
||||||
drop(path_segments);
|
|
||||||
|
|
||||||
self.request(Method::POST, callback)
|
|
||||||
.body(encrypted_token)
|
|
||||||
.send()
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn inner_third_party_signin(
|
|
||||||
&self,
|
|
||||||
encrypted_token: &[u8],
|
|
||||||
client_secret: &[u8; 32],
|
|
||||||
) -> Result<PublicKey> {
|
|
||||||
let decrypted = decrypt(encrypted_token, client_secret)?;
|
|
||||||
let token = AuthToken::deserialize(&decrypted)?;
|
|
||||||
|
|
||||||
self.signin_with_authtoken(&token).await?;
|
|
||||||
|
|
||||||
Ok(token.pubky().to_owned())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn signin_with_authtoken(&self, token: &AuthToken) -> Result<Session> {
|
|
||||||
let mut url = Url::parse(&format!("https://{}/session", token.pubky()))?;
|
|
||||||
|
|
||||||
self.resolve_url(&mut url).await?;
|
|
||||||
|
|
||||||
let response = self
|
|
||||||
.request(Method::POST, url)
|
|
||||||
.body(token.serialize())
|
|
||||||
.send()
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
self.store_session(&response);
|
|
||||||
|
|
||||||
let bytes = response.bytes().await?;
|
|
||||||
|
|
||||||
Ok(Session::deserialize(&bytes)?)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn create_auth_request(
|
|
||||||
&self,
|
|
||||||
relay: &mut Url,
|
|
||||||
capabilities: &Capabilities,
|
|
||||||
) -> Result<(Url, [u8; 32])> {
|
|
||||||
let engine = base64::engine::GeneralPurpose::new(&URL_SAFE, NO_PAD);
|
|
||||||
|
|
||||||
let client_secret: [u8; 32] = random_bytes::<32>();
|
|
||||||
|
|
||||||
let pubkyauth_url = Url::parse(&format!(
|
|
||||||
"pubkyauth:///?caps={capabilities}&secret={}&relay={relay}",
|
|
||||||
engine.encode(client_secret)
|
|
||||||
))?;
|
|
||||||
|
|
||||||
let mut segments = relay
|
|
||||||
.path_segments_mut()
|
|
||||||
.map_err(|_| Error::Generic("Invalid relay".into()))?;
|
|
||||||
// remove trailing slash if any.
|
|
||||||
segments.pop_if_empty();
|
|
||||||
let channel_id = &engine.encode(hash(&client_secret).as_bytes());
|
|
||||||
segments.push(channel_id);
|
|
||||||
drop(segments);
|
|
||||||
|
|
||||||
Ok((pubkyauth_url, client_secret))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) async fn subscribe_to_auth_response(
|
|
||||||
&self,
|
|
||||||
relay: Url,
|
|
||||||
client_secret: &[u8; 32],
|
|
||||||
) -> Result<PublicKey> {
|
|
||||||
let response = self.http.request(Method::GET, relay).send().await?;
|
|
||||||
let encrypted_token = response.bytes().await?;
|
|
||||||
let token_bytes = decrypt(&encrypted_token, client_secret)?;
|
|
||||||
let token = AuthToken::verify(&token_bytes)?;
|
|
||||||
|
|
||||||
if !token.capabilities().is_empty() {
|
|
||||||
self.signin_with_authtoken(&token).await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(token.pubky().clone())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
|
|
||||||
use crate::*;
|
|
||||||
|
|
||||||
use pkarr::{mainline::Testnet, Keypair};
|
|
||||||
use pubky_common::capabilities::{Capabilities, Capability};
|
|
||||||
use pubky_homeserver::Homeserver;
|
|
||||||
use reqwest::StatusCode;
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn basic_authn() {
|
|
||||||
let testnet = Testnet::new(10);
|
|
||||||
let server = Homeserver::start_test(&testnet).await.unwrap();
|
|
||||||
|
|
||||||
let client = PubkyClient::test(&testnet);
|
|
||||||
|
|
||||||
let keypair = Keypair::random();
|
|
||||||
|
|
||||||
client.signup(&keypair, &server.public_key()).await.unwrap();
|
|
||||||
|
|
||||||
let session = client
|
|
||||||
.session(&keypair.public_key())
|
|
||||||
.await
|
|
||||||
.unwrap()
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
assert!(session.capabilities().contains(&Capability::root()));
|
|
||||||
|
|
||||||
client.signout(&keypair.public_key()).await.unwrap();
|
|
||||||
|
|
||||||
{
|
|
||||||
let session = client.session(&keypair.public_key()).await.unwrap();
|
|
||||||
|
|
||||||
assert!(session.is_none());
|
|
||||||
}
|
|
||||||
|
|
||||||
client.signin(&keypair).await.unwrap();
|
|
||||||
|
|
||||||
{
|
|
||||||
let session = client
|
|
||||||
.session(&keypair.public_key())
|
|
||||||
.await
|
|
||||||
.unwrap()
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(session.pubky(), &keypair.public_key());
|
|
||||||
assert!(session.capabilities().contains(&Capability::root()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn authz() {
|
|
||||||
let testnet = Testnet::new(10);
|
|
||||||
let server = Homeserver::start_test(&testnet).await.unwrap();
|
|
||||||
|
|
||||||
let keypair = Keypair::random();
|
|
||||||
let pubky = keypair.public_key();
|
|
||||||
|
|
||||||
// Third party app side
|
|
||||||
let capabilities: Capabilities =
|
|
||||||
"/pub/pubky.app/:rw,/pub/foo.bar/file:r".try_into().unwrap();
|
|
||||||
let client = PubkyClient::test(&testnet);
|
|
||||||
let (pubkyauth_url, pubkyauth_response) = client
|
|
||||||
.auth_request("https://demo.httprelay.io/link", &capabilities)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
// Authenticator side
|
|
||||||
{
|
|
||||||
let client = PubkyClient::test(&testnet);
|
|
||||||
|
|
||||||
client.signup(&keypair, &server.public_key()).await.unwrap();
|
|
||||||
|
|
||||||
client
|
|
||||||
.send_auth_token(&keypair, pubkyauth_url)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
let public_key = pubkyauth_response.await.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(&public_key, &pubky);
|
|
||||||
|
|
||||||
// Test access control enforcement
|
|
||||||
|
|
||||||
client
|
|
||||||
.put(format!("pubky://{pubky}/pub/pubky.app/foo").as_str(), &[])
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
client
|
|
||||||
.put(format!("pubky://{pubky}/pub/pubky.app").as_str(), &[])
|
|
||||||
.await
|
|
||||||
.map_err(|e| match e {
|
|
||||||
crate::Error::Reqwest(e) => e.status(),
|
|
||||||
_ => None,
|
|
||||||
}),
|
|
||||||
Err(Some(StatusCode::FORBIDDEN))
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
client
|
|
||||||
.put(format!("pubky://{pubky}/pub/foo.bar/file").as_str(), &[])
|
|
||||||
.await
|
|
||||||
.map_err(|e| match e {
|
|
||||||
crate::Error::Reqwest(e) => e.status(),
|
|
||||||
_ => None,
|
|
||||||
}),
|
|
||||||
Err(Some(StatusCode::FORBIDDEN))
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,105 +0,0 @@
|
|||||||
use reqwest::Method;
|
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
use crate::{error::Result, PubkyClient};
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct ListBuilder<'a> {
|
|
||||||
url: Url,
|
|
||||||
reverse: bool,
|
|
||||||
limit: Option<u16>,
|
|
||||||
cursor: Option<&'a str>,
|
|
||||||
client: &'a PubkyClient,
|
|
||||||
shallow: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> ListBuilder<'a> {
|
|
||||||
/// Create a new List request builder
|
|
||||||
pub(crate) fn new(client: &'a PubkyClient, url: Url) -> Self {
|
|
||||||
Self {
|
|
||||||
client,
|
|
||||||
url,
|
|
||||||
limit: None,
|
|
||||||
cursor: None,
|
|
||||||
reverse: false,
|
|
||||||
shallow: false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Set the `reverse` option.
|
|
||||||
pub fn reverse(mut self, reverse: bool) -> Self {
|
|
||||||
self.reverse = reverse;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Set the `limit` value.
|
|
||||||
pub fn limit(mut self, limit: u16) -> Self {
|
|
||||||
self.limit = limit.into();
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Set the `cursor` value.
|
|
||||||
///
|
|
||||||
/// Either a full `pubky://` Url (from previous list response),
|
|
||||||
/// or a path (to a file or directory) relative to the `url`
|
|
||||||
pub fn cursor(mut self, cursor: &'a str) -> Self {
|
|
||||||
self.cursor = cursor.into();
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn shallow(mut self, shallow: bool) -> Self {
|
|
||||||
self.shallow = shallow;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Send the list request.
|
|
||||||
///
|
|
||||||
/// Returns a list of Pubky URLs of the files in the path of the `url`
|
|
||||||
/// respecting [ListBuilder::reverse], [ListBuilder::limit] and [ListBuilder::cursor]
|
|
||||||
/// options.
|
|
||||||
pub async fn send(self) -> Result<Vec<String>> {
|
|
||||||
let mut url = self.client.pubky_to_http(self.url).await?;
|
|
||||||
|
|
||||||
if !url.path().ends_with('/') {
|
|
||||||
let path = url.path().to_string();
|
|
||||||
let mut parts = path.split('/').collect::<Vec<&str>>();
|
|
||||||
parts.pop();
|
|
||||||
|
|
||||||
let path = format!("{}/", parts.join("/"));
|
|
||||||
|
|
||||||
url.set_path(&path)
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut query = url.query_pairs_mut();
|
|
||||||
|
|
||||||
if self.reverse {
|
|
||||||
query.append_key_only("reverse");
|
|
||||||
}
|
|
||||||
|
|
||||||
if self.shallow {
|
|
||||||
query.append_key_only("shallow");
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(limit) = self.limit {
|
|
||||||
query.append_pair("limit", &limit.to_string());
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(cursor) = self.cursor {
|
|
||||||
query.append_pair("cursor", cursor);
|
|
||||||
}
|
|
||||||
|
|
||||||
drop(query);
|
|
||||||
|
|
||||||
let response = self.client.request(Method::GET, url).send().await?;
|
|
||||||
|
|
||||||
response.error_for_status_ref()?;
|
|
||||||
|
|
||||||
// TODO: bail on too large files.
|
|
||||||
let bytes = response.bytes().await?;
|
|
||||||
|
|
||||||
Ok(String::from_utf8_lossy(&bytes)
|
|
||||||
.lines()
|
|
||||||
.map(String::from)
|
|
||||||
.collect())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
pub mod auth;
|
|
||||||
pub mod list_builder;
|
|
||||||
pub mod pkarr;
|
|
||||||
pub mod public;
|
|
||||||
@@ -1,336 +0,0 @@
|
|||||||
use url::Url;
|
|
||||||
|
|
||||||
use pkarr::{
|
|
||||||
dns::{rdata::SVCB, Packet},
|
|
||||||
Keypair, PublicKey, SignedPacket,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
error::{Error, Result},
|
|
||||||
PubkyClient,
|
|
||||||
};
|
|
||||||
|
|
||||||
const MAX_ENDPOINT_RESOLUTION_RECURSION: u8 = 3;
|
|
||||||
|
|
||||||
impl PubkyClient {
|
|
||||||
/// Publish the SVCB record for `_pubky.<public_key>`.
|
|
||||||
pub(crate) async fn publish_pubky_homeserver(
|
|
||||||
&self,
|
|
||||||
keypair: &Keypair,
|
|
||||||
host: &str,
|
|
||||||
) -> Result<()> {
|
|
||||||
let existing = self.pkarr_resolve(&keypair.public_key()).await?;
|
|
||||||
|
|
||||||
let mut packet = Packet::new_reply(0);
|
|
||||||
|
|
||||||
if let Some(existing) = existing {
|
|
||||||
for answer in existing.packet().answers.iter().cloned() {
|
|
||||||
if !answer.name.to_string().starts_with("_pubky") {
|
|
||||||
packet.answers.push(answer.into_owned())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let svcb = SVCB::new(0, host.try_into()?);
|
|
||||||
|
|
||||||
packet.answers.push(pkarr::dns::ResourceRecord::new(
|
|
||||||
"_pubky".try_into().unwrap(),
|
|
||||||
pkarr::dns::CLASS::IN,
|
|
||||||
60 * 60,
|
|
||||||
pkarr::dns::rdata::RData::SVCB(svcb),
|
|
||||||
));
|
|
||||||
|
|
||||||
let signed_packet = SignedPacket::from_packet(keypair, &packet)?;
|
|
||||||
|
|
||||||
self.pkarr_publish(&signed_packet).await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Resolve the homeserver for a pubky.
|
|
||||||
pub(crate) async fn resolve_pubky_homeserver(&self, pubky: &PublicKey) -> Result<Endpoint> {
|
|
||||||
let target = format!("_pubky.{pubky}");
|
|
||||||
|
|
||||||
self.resolve_endpoint(&target)
|
|
||||||
.await
|
|
||||||
.map_err(|_| Error::Generic("Could not resolve homeserver".to_string()))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Resolve a service's public_key and "non-pkarr url" from a Pubky domain
|
|
||||||
///
|
|
||||||
/// "non-pkarr" url is any URL where the hostname isn't a 52 z-base32 character,
|
|
||||||
/// usually an IPv4, IPv6 or ICANN domain, but could also be any other unknown hostname.
|
|
||||||
///
|
|
||||||
/// Recursively resolve SVCB and HTTPS endpoints, with [MAX_ENDPOINT_RESOLUTION_RECURSION] limit.
|
|
||||||
pub(crate) async fn resolve_endpoint(&self, target: &str) -> Result<Endpoint> {
|
|
||||||
let original_target = target;
|
|
||||||
// TODO: cache the result of this function?
|
|
||||||
|
|
||||||
let mut target = target.to_string();
|
|
||||||
|
|
||||||
let mut endpoint_public_key = None;
|
|
||||||
let mut origin = target.clone();
|
|
||||||
|
|
||||||
let mut step = 0;
|
|
||||||
|
|
||||||
// PublicKey is very good at extracting the Pkarr TLD from a string.
|
|
||||||
while let Ok(public_key) = PublicKey::try_from(target.clone()) {
|
|
||||||
if step >= MAX_ENDPOINT_RESOLUTION_RECURSION {
|
|
||||||
break;
|
|
||||||
};
|
|
||||||
step += 1;
|
|
||||||
|
|
||||||
if let Some(signed_packet) = self
|
|
||||||
.pkarr_resolve(&public_key)
|
|
||||||
.await
|
|
||||||
.map_err(|_| Error::ResolveEndpoint(original_target.into()))?
|
|
||||||
{
|
|
||||||
// Choose most prior SVCB record
|
|
||||||
let svcb = signed_packet.resource_records(&target).fold(
|
|
||||||
None,
|
|
||||||
|prev: Option<SVCB>, answer| {
|
|
||||||
if let Some(svcb) = match &answer.rdata {
|
|
||||||
pkarr::dns::rdata::RData::SVCB(svcb) => Some(svcb),
|
|
||||||
pkarr::dns::rdata::RData::HTTPS(curr) => Some(&curr.0),
|
|
||||||
_ => None,
|
|
||||||
} {
|
|
||||||
let curr = svcb.clone();
|
|
||||||
|
|
||||||
if curr.priority == 0 {
|
|
||||||
return Some(curr);
|
|
||||||
}
|
|
||||||
if let Some(prev) = &prev {
|
|
||||||
// TODO return random if priority is the same
|
|
||||||
if curr.priority >= prev.priority {
|
|
||||||
return Some(curr);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return Some(curr);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
prev
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
if let Some(svcb) = svcb {
|
|
||||||
endpoint_public_key = Some(public_key.clone());
|
|
||||||
target = svcb.target.to_string();
|
|
||||||
|
|
||||||
if let Some(port) = svcb.get_param(pkarr::dns::rdata::SVCB::PORT) {
|
|
||||||
if port.len() < 2 {
|
|
||||||
// TODO: debug! Error encoding port!
|
|
||||||
}
|
|
||||||
let port = u16::from_be_bytes([port[0], port[1]]);
|
|
||||||
|
|
||||||
origin = format!("{target}:{port}");
|
|
||||||
} else {
|
|
||||||
origin.clone_from(&target);
|
|
||||||
};
|
|
||||||
|
|
||||||
if step >= MAX_ENDPOINT_RESOLUTION_RECURSION {
|
|
||||||
continue;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if PublicKey::try_from(origin.as_str()).is_ok() {
|
|
||||||
return Err(Error::ResolveEndpoint(original_target.into()));
|
|
||||||
}
|
|
||||||
|
|
||||||
if endpoint_public_key.is_some() {
|
|
||||||
let url = Url::parse(&format!(
|
|
||||||
"{}://{}",
|
|
||||||
if origin.starts_with("localhost") {
|
|
||||||
"http"
|
|
||||||
} else {
|
|
||||||
"https"
|
|
||||||
},
|
|
||||||
origin
|
|
||||||
))?;
|
|
||||||
|
|
||||||
return Ok(Endpoint { url });
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(Error::ResolveEndpoint(original_target.into()))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) async fn resolve_url(&self, url: &mut Url) -> Result<()> {
|
|
||||||
if let Some(Ok(pubky)) = url.host_str().map(PublicKey::try_from) {
|
|
||||||
let Endpoint { url: x, .. } = self.resolve_endpoint(&format!("_pubky.{pubky}")).await?;
|
|
||||||
|
|
||||||
url.set_host(x.host_str())?;
|
|
||||||
url.set_port(x.port()).expect("should work!");
|
|
||||||
url.set_scheme(x.scheme()).expect("should work!");
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub(crate) struct Endpoint {
|
|
||||||
pub url: Url,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
use pkarr::{
|
|
||||||
dns::{
|
|
||||||
rdata::{HTTPS, SVCB},
|
|
||||||
Packet,
|
|
||||||
},
|
|
||||||
mainline::{dht::DhtSettings, Testnet},
|
|
||||||
Keypair, PkarrClient, Settings, SignedPacket,
|
|
||||||
};
|
|
||||||
use pubky_homeserver::Homeserver;
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn resolve_endpoint_https() {
|
|
||||||
let testnet = Testnet::new(10);
|
|
||||||
|
|
||||||
let pkarr_client = PkarrClient::new(Settings {
|
|
||||||
dht: DhtSettings {
|
|
||||||
bootstrap: Some(testnet.bootstrap.clone()),
|
|
||||||
..Default::default()
|
|
||||||
},
|
|
||||||
..Default::default()
|
|
||||||
})
|
|
||||||
.unwrap()
|
|
||||||
.as_async();
|
|
||||||
|
|
||||||
let domain = "example.com";
|
|
||||||
let mut target;
|
|
||||||
|
|
||||||
// Server
|
|
||||||
{
|
|
||||||
let keypair = Keypair::random();
|
|
||||||
|
|
||||||
let https = HTTPS(SVCB::new(0, domain.try_into().unwrap()));
|
|
||||||
|
|
||||||
let mut packet = Packet::new_reply(0);
|
|
||||||
|
|
||||||
packet.answers.push(pkarr::dns::ResourceRecord::new(
|
|
||||||
"foo".try_into().unwrap(),
|
|
||||||
pkarr::dns::CLASS::IN,
|
|
||||||
60 * 60,
|
|
||||||
pkarr::dns::rdata::RData::HTTPS(https),
|
|
||||||
));
|
|
||||||
|
|
||||||
let signed_packet = SignedPacket::from_packet(&keypair, &packet).unwrap();
|
|
||||||
|
|
||||||
pkarr_client.publish(&signed_packet).await.unwrap();
|
|
||||||
|
|
||||||
target = format!("foo.{}", keypair.public_key());
|
|
||||||
}
|
|
||||||
|
|
||||||
// intermediate
|
|
||||||
{
|
|
||||||
let keypair = Keypair::random();
|
|
||||||
|
|
||||||
let svcb = SVCB::new(0, target.as_str().try_into().unwrap());
|
|
||||||
|
|
||||||
let mut packet = Packet::new_reply(0);
|
|
||||||
|
|
||||||
packet.answers.push(pkarr::dns::ResourceRecord::new(
|
|
||||||
"bar".try_into().unwrap(),
|
|
||||||
pkarr::dns::CLASS::IN,
|
|
||||||
60 * 60,
|
|
||||||
pkarr::dns::rdata::RData::SVCB(svcb),
|
|
||||||
));
|
|
||||||
|
|
||||||
let signed_packet = SignedPacket::from_packet(&keypair, &packet).unwrap();
|
|
||||||
|
|
||||||
pkarr_client.publish(&signed_packet).await.unwrap();
|
|
||||||
|
|
||||||
target = format!("bar.{}", keypair.public_key())
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let keypair = Keypair::random();
|
|
||||||
|
|
||||||
let svcb = SVCB::new(0, target.as_str().try_into().unwrap());
|
|
||||||
|
|
||||||
let mut packet = Packet::new_reply(0);
|
|
||||||
|
|
||||||
packet.answers.push(pkarr::dns::ResourceRecord::new(
|
|
||||||
"pubky".try_into().unwrap(),
|
|
||||||
pkarr::dns::CLASS::IN,
|
|
||||||
60 * 60,
|
|
||||||
pkarr::dns::rdata::RData::SVCB(svcb),
|
|
||||||
));
|
|
||||||
|
|
||||||
let signed_packet = SignedPacket::from_packet(&keypair, &packet).unwrap();
|
|
||||||
|
|
||||||
pkarr_client.publish(&signed_packet).await.unwrap();
|
|
||||||
|
|
||||||
target = format!("pubky.{}", keypair.public_key())
|
|
||||||
}
|
|
||||||
|
|
||||||
let client = PubkyClient::test(&testnet);
|
|
||||||
|
|
||||||
let endpoint = client.resolve_endpoint(&target).await.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(endpoint.url.host_str().unwrap(), domain);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn resolve_homeserver() {
|
|
||||||
let testnet = Testnet::new(10);
|
|
||||||
let server = Homeserver::start_test(&testnet).await.unwrap();
|
|
||||||
|
|
||||||
// Publish an intermediate controller of the homeserver
|
|
||||||
let pkarr_client = PkarrClient::new(Settings {
|
|
||||||
dht: DhtSettings {
|
|
||||||
bootstrap: Some(testnet.bootstrap.clone()),
|
|
||||||
..Default::default()
|
|
||||||
},
|
|
||||||
..Default::default()
|
|
||||||
})
|
|
||||||
.unwrap()
|
|
||||||
.as_async();
|
|
||||||
|
|
||||||
let intermediate = Keypair::random();
|
|
||||||
|
|
||||||
let mut packet = Packet::new_reply(0);
|
|
||||||
|
|
||||||
let server_tld = server.public_key().to_string();
|
|
||||||
|
|
||||||
let svcb = SVCB::new(0, server_tld.as_str().try_into().unwrap());
|
|
||||||
|
|
||||||
packet.answers.push(pkarr::dns::ResourceRecord::new(
|
|
||||||
"pubky".try_into().unwrap(),
|
|
||||||
pkarr::dns::CLASS::IN,
|
|
||||||
60 * 60,
|
|
||||||
pkarr::dns::rdata::RData::SVCB(svcb),
|
|
||||||
));
|
|
||||||
|
|
||||||
let signed_packet = SignedPacket::from_packet(&intermediate, &packet).unwrap();
|
|
||||||
|
|
||||||
pkarr_client.publish(&signed_packet).await.unwrap();
|
|
||||||
|
|
||||||
{
|
|
||||||
let client = PubkyClient::test(&testnet);
|
|
||||||
|
|
||||||
let pubky = Keypair::random();
|
|
||||||
|
|
||||||
client
|
|
||||||
.publish_pubky_homeserver(&pubky, &format!("pubky.{}", &intermediate.public_key()))
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let Endpoint { url, .. } = client
|
|
||||||
.resolve_pubky_homeserver(&pubky.public_key())
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(url.host_str(), Some("localhost"));
|
|
||||||
assert_eq!(url.port(), Some(server.port()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,824 +0,0 @@
|
|||||||
use bytes::Bytes;
|
|
||||||
|
|
||||||
use pkarr::PublicKey;
|
|
||||||
use reqwest::{Method, StatusCode};
|
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
error::{Error, Result},
|
|
||||||
PubkyClient,
|
|
||||||
};
|
|
||||||
|
|
||||||
use super::{list_builder::ListBuilder, pkarr::Endpoint};
|
|
||||||
|
|
||||||
impl PubkyClient {
|
|
||||||
pub(crate) async fn inner_put<T: TryInto<Url>>(&self, url: T, content: &[u8]) -> Result<()> {
|
|
||||||
let url = self.pubky_to_http(url).await?;
|
|
||||||
|
|
||||||
let response = self
|
|
||||||
.request(Method::PUT, url)
|
|
||||||
.body(content.to_owned())
|
|
||||||
.send()
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
response.error_for_status()?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) async fn inner_get<T: TryInto<Url>>(&self, url: T) -> Result<Option<Bytes>> {
|
|
||||||
let url = self.pubky_to_http(url).await?;
|
|
||||||
|
|
||||||
let response = self.request(Method::GET, url).send().await?;
|
|
||||||
|
|
||||||
if response.status() == StatusCode::NOT_FOUND {
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
|
|
||||||
response.error_for_status_ref()?;
|
|
||||||
|
|
||||||
// TODO: bail on too large files.
|
|
||||||
let bytes = response.bytes().await?;
|
|
||||||
|
|
||||||
Ok(Some(bytes))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) async fn inner_delete<T: TryInto<Url>>(&self, url: T) -> Result<()> {
|
|
||||||
let url = self.pubky_to_http(url).await?;
|
|
||||||
|
|
||||||
let response = self.request(Method::DELETE, url).send().await?;
|
|
||||||
|
|
||||||
response.error_for_status_ref()?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn inner_list<T: TryInto<Url>>(&self, url: T) -> Result<ListBuilder> {
|
|
||||||
Ok(ListBuilder::new(
|
|
||||||
self,
|
|
||||||
url.try_into().map_err(|_| Error::InvalidUrl)?,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) async fn pubky_to_http<T: TryInto<Url>>(&self, url: T) -> Result<Url> {
|
|
||||||
let original_url: Url = url.try_into().map_err(|_| Error::InvalidUrl)?;
|
|
||||||
|
|
||||||
let pubky = original_url
|
|
||||||
.host_str()
|
|
||||||
.ok_or(Error::Generic("Missing Pubky Url host".to_string()))?;
|
|
||||||
|
|
||||||
if let Ok(public_key) = PublicKey::try_from(pubky) {
|
|
||||||
let Endpoint { mut url, .. } = self.resolve_pubky_homeserver(&public_key).await?;
|
|
||||||
|
|
||||||
// TODO: remove if we move to subdomains instead of paths.
|
|
||||||
if original_url.scheme() == "pubky" {
|
|
||||||
let path = original_url.path_segments();
|
|
||||||
|
|
||||||
let mut split = url.path_segments_mut().unwrap();
|
|
||||||
split.push(pubky);
|
|
||||||
if let Some(segments) = path {
|
|
||||||
for segment in segments {
|
|
||||||
split.push(segment);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
drop(split);
|
|
||||||
}
|
|
||||||
|
|
||||||
return Ok(url);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(original_url)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
|
|
||||||
use core::panic;
|
|
||||||
|
|
||||||
use crate::*;
|
|
||||||
|
|
||||||
use pkarr::{mainline::Testnet, Keypair};
|
|
||||||
use pubky_homeserver::Homeserver;
|
|
||||||
use reqwest::{Method, StatusCode};
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn put_get_delete() {
|
|
||||||
let testnet = Testnet::new(10);
|
|
||||||
let server = Homeserver::start_test(&testnet).await.unwrap();
|
|
||||||
|
|
||||||
let client = PubkyClient::test(&testnet);
|
|
||||||
|
|
||||||
let keypair = Keypair::random();
|
|
||||||
|
|
||||||
client.signup(&keypair, &server.public_key()).await.unwrap();
|
|
||||||
|
|
||||||
let url = format!("pubky://{}/pub/foo.txt", keypair.public_key());
|
|
||||||
let url = url.as_str();
|
|
||||||
|
|
||||||
client.put(url, &[0, 1, 2, 3, 4]).await.unwrap();
|
|
||||||
|
|
||||||
let response = client.get(url).await.unwrap().unwrap();
|
|
||||||
|
|
||||||
assert_eq!(response, bytes::Bytes::from(vec![0, 1, 2, 3, 4]));
|
|
||||||
|
|
||||||
client.delete(url).await.unwrap();
|
|
||||||
|
|
||||||
let response = client.get(url).await.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(response, None);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn unauthorized_put_delete() {
|
|
||||||
let testnet = Testnet::new(10);
|
|
||||||
let server = Homeserver::start_test(&testnet).await.unwrap();
|
|
||||||
|
|
||||||
let client = PubkyClient::test(&testnet);
|
|
||||||
|
|
||||||
let keypair = Keypair::random();
|
|
||||||
|
|
||||||
client.signup(&keypair, &server.public_key()).await.unwrap();
|
|
||||||
|
|
||||||
let public_key = keypair.public_key();
|
|
||||||
|
|
||||||
let url = format!("pubky://{public_key}/pub/foo.txt");
|
|
||||||
let url = url.as_str();
|
|
||||||
|
|
||||||
let other_client = PubkyClient::test(&testnet);
|
|
||||||
{
|
|
||||||
let other = Keypair::random();
|
|
||||||
|
|
||||||
// TODO: remove extra client after switching to subdomains.
|
|
||||||
other_client
|
|
||||||
.signup(&other, &server.public_key())
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let response = other_client.put(url, &[0, 1, 2, 3, 4]).await;
|
|
||||||
|
|
||||||
match response {
|
|
||||||
Err(Error::Reqwest(error)) => {
|
|
||||||
assert!(error.status() == Some(StatusCode::UNAUTHORIZED))
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
panic!("expected error StatusCode::UNAUTHORIZED")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
client.put(url, &[0, 1, 2, 3, 4]).await.unwrap();
|
|
||||||
|
|
||||||
{
|
|
||||||
let other = Keypair::random();
|
|
||||||
|
|
||||||
// TODO: remove extra client after switching to subdomains.
|
|
||||||
other_client
|
|
||||||
.signup(&other, &server.public_key())
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let response = other_client.delete(url).await;
|
|
||||||
|
|
||||||
match response {
|
|
||||||
Err(Error::Reqwest(error)) => {
|
|
||||||
assert!(error.status() == Some(StatusCode::UNAUTHORIZED))
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
panic!("expected error StatusCode::UNAUTHORIZED")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let response = client.get(url).await.unwrap().unwrap();
|
|
||||||
|
|
||||||
assert_eq!(response, bytes::Bytes::from(vec![0, 1, 2, 3, 4]));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn list() {
|
|
||||||
let testnet = Testnet::new(10);
|
|
||||||
let server = Homeserver::start_test(&testnet).await.unwrap();
|
|
||||||
|
|
||||||
let client = PubkyClient::test(&testnet);
|
|
||||||
|
|
||||||
let keypair = Keypair::random();
|
|
||||||
|
|
||||||
client.signup(&keypair, &server.public_key()).await.unwrap();
|
|
||||||
|
|
||||||
let pubky = keypair.public_key();
|
|
||||||
|
|
||||||
let urls = vec![
|
|
||||||
format!("pubky://{pubky}/pub/a.wrong/a.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/a.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/b.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/cc-nested/z.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.wrong/a.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/c.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/d.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/z.wrong/a.txt"),
|
|
||||||
];
|
|
||||||
|
|
||||||
for url in urls {
|
|
||||||
client.put(url.as_str(), &[0]).await.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
let url = format!("pubky://{pubky}/pub/example.com/extra");
|
|
||||||
let url = url.as_str();
|
|
||||||
|
|
||||||
{
|
|
||||||
let list = client.list(url).unwrap().send().await.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
list,
|
|
||||||
vec![
|
|
||||||
format!("pubky://{pubky}/pub/example.com/a.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/b.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/c.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/cc-nested/z.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/d.txt"),
|
|
||||||
],
|
|
||||||
"normal list with no limit or cursor"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let list = client.list(url).unwrap().limit(2).send().await.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
list,
|
|
||||||
vec![
|
|
||||||
format!("pubky://{pubky}/pub/example.com/a.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/b.txt"),
|
|
||||||
],
|
|
||||||
"normal list with limit but no cursor"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let list = client
|
|
||||||
.list(url)
|
|
||||||
.unwrap()
|
|
||||||
.limit(2)
|
|
||||||
.cursor("a.txt")
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
list,
|
|
||||||
vec![
|
|
||||||
format!("pubky://{pubky}/pub/example.com/b.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/c.txt"),
|
|
||||||
],
|
|
||||||
"normal list with limit and a file cursor"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let list = client
|
|
||||||
.list(url)
|
|
||||||
.unwrap()
|
|
||||||
.limit(2)
|
|
||||||
.cursor("cc-nested/")
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
list,
|
|
||||||
vec![
|
|
||||||
format!("pubky://{pubky}/pub/example.com/cc-nested/z.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/d.txt"),
|
|
||||||
],
|
|
||||||
"normal list with limit and a directory cursor"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let list = client
|
|
||||||
.list(url)
|
|
||||||
.unwrap()
|
|
||||||
.limit(2)
|
|
||||||
.cursor(&format!("pubky://{pubky}/pub/example.com/a.txt"))
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
list,
|
|
||||||
vec![
|
|
||||||
format!("pubky://{pubky}/pub/example.com/b.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/c.txt"),
|
|
||||||
],
|
|
||||||
"normal list with limit and a full url cursor"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let list = client
|
|
||||||
.list(url)
|
|
||||||
.unwrap()
|
|
||||||
.limit(2)
|
|
||||||
.cursor("/a.txt")
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
list,
|
|
||||||
vec![
|
|
||||||
format!("pubky://{pubky}/pub/example.com/b.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/c.txt"),
|
|
||||||
],
|
|
||||||
"normal list with limit and a leading / cursor"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let list = client
|
|
||||||
.list(url)
|
|
||||||
.unwrap()
|
|
||||||
.reverse(true)
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
list,
|
|
||||||
vec![
|
|
||||||
format!("pubky://{pubky}/pub/example.com/d.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/cc-nested/z.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/c.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/b.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/a.txt"),
|
|
||||||
],
|
|
||||||
"reverse list with no limit or cursor"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let list = client
|
|
||||||
.list(url)
|
|
||||||
.unwrap()
|
|
||||||
.reverse(true)
|
|
||||||
.limit(2)
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
list,
|
|
||||||
vec![
|
|
||||||
format!("pubky://{pubky}/pub/example.com/d.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/cc-nested/z.txt"),
|
|
||||||
],
|
|
||||||
"reverse list with limit but no cursor"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let list = client
|
|
||||||
.list(url)
|
|
||||||
.unwrap()
|
|
||||||
.reverse(true)
|
|
||||||
.limit(2)
|
|
||||||
.cursor("d.txt")
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
list,
|
|
||||||
vec![
|
|
||||||
format!("pubky://{pubky}/pub/example.com/cc-nested/z.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/c.txt"),
|
|
||||||
],
|
|
||||||
"reverse list with limit and cursor"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn list_shallow() {
|
|
||||||
let testnet = Testnet::new(10);
|
|
||||||
let server = Homeserver::start_test(&testnet).await.unwrap();
|
|
||||||
|
|
||||||
let client = PubkyClient::test(&testnet);
|
|
||||||
|
|
||||||
let keypair = Keypair::random();
|
|
||||||
|
|
||||||
client.signup(&keypair, &server.public_key()).await.unwrap();
|
|
||||||
|
|
||||||
let pubky = keypair.public_key();
|
|
||||||
|
|
||||||
let urls = vec![
|
|
||||||
format!("pubky://{pubky}/pub/a.com/a.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/a.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/b.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/c.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/d.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.con/d.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.con"),
|
|
||||||
format!("pubky://{pubky}/pub/file"),
|
|
||||||
format!("pubky://{pubky}/pub/file2"),
|
|
||||||
format!("pubky://{pubky}/pub/z.com/a.txt"),
|
|
||||||
];
|
|
||||||
|
|
||||||
for url in urls {
|
|
||||||
client.put(url.as_str(), &[0]).await.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
let url = format!("pubky://{pubky}/pub/");
|
|
||||||
let url = url.as_str();
|
|
||||||
|
|
||||||
{
|
|
||||||
let list = client
|
|
||||||
.list(url)
|
|
||||||
.unwrap()
|
|
||||||
.shallow(true)
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
list,
|
|
||||||
vec![
|
|
||||||
format!("pubky://{pubky}/pub/a.com/"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/"),
|
|
||||||
format!("pubky://{pubky}/pub/example.con"),
|
|
||||||
format!("pubky://{pubky}/pub/example.con/"),
|
|
||||||
format!("pubky://{pubky}/pub/file"),
|
|
||||||
format!("pubky://{pubky}/pub/file2"),
|
|
||||||
format!("pubky://{pubky}/pub/z.com/"),
|
|
||||||
],
|
|
||||||
"normal list shallow"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let list = client
|
|
||||||
.list(url)
|
|
||||||
.unwrap()
|
|
||||||
.shallow(true)
|
|
||||||
.limit(2)
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
list,
|
|
||||||
vec![
|
|
||||||
format!("pubky://{pubky}/pub/a.com/"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/"),
|
|
||||||
],
|
|
||||||
"normal list shallow with limit but no cursor"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let list = client
|
|
||||||
.list(url)
|
|
||||||
.unwrap()
|
|
||||||
.shallow(true)
|
|
||||||
.limit(2)
|
|
||||||
.cursor("example.com/a.txt")
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
list,
|
|
||||||
vec![
|
|
||||||
format!("pubky://{pubky}/pub/example.com/"),
|
|
||||||
format!("pubky://{pubky}/pub/example.con"),
|
|
||||||
],
|
|
||||||
"normal list shallow with limit and a file cursor"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let list = client
|
|
||||||
.list(url)
|
|
||||||
.unwrap()
|
|
||||||
.shallow(true)
|
|
||||||
.limit(3)
|
|
||||||
.cursor("example.com/")
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
list,
|
|
||||||
vec![
|
|
||||||
format!("pubky://{pubky}/pub/example.con"),
|
|
||||||
format!("pubky://{pubky}/pub/example.con/"),
|
|
||||||
format!("pubky://{pubky}/pub/file"),
|
|
||||||
],
|
|
||||||
"normal list shallow with limit and a directory cursor"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let list = client
|
|
||||||
.list(url)
|
|
||||||
.unwrap()
|
|
||||||
.reverse(true)
|
|
||||||
.shallow(true)
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
list,
|
|
||||||
vec![
|
|
||||||
format!("pubky://{pubky}/pub/z.com/"),
|
|
||||||
format!("pubky://{pubky}/pub/file2"),
|
|
||||||
format!("pubky://{pubky}/pub/file"),
|
|
||||||
format!("pubky://{pubky}/pub/example.con/"),
|
|
||||||
format!("pubky://{pubky}/pub/example.con"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/"),
|
|
||||||
format!("pubky://{pubky}/pub/a.com/"),
|
|
||||||
],
|
|
||||||
"reverse list shallow"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let list = client
|
|
||||||
.list(url)
|
|
||||||
.unwrap()
|
|
||||||
.reverse(true)
|
|
||||||
.shallow(true)
|
|
||||||
.limit(2)
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
list,
|
|
||||||
vec![
|
|
||||||
format!("pubky://{pubky}/pub/z.com/"),
|
|
||||||
format!("pubky://{pubky}/pub/file2"),
|
|
||||||
],
|
|
||||||
"reverse list shallow with limit but no cursor"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let list = client
|
|
||||||
.list(url)
|
|
||||||
.unwrap()
|
|
||||||
.shallow(true)
|
|
||||||
.reverse(true)
|
|
||||||
.limit(2)
|
|
||||||
.cursor("file2")
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
list,
|
|
||||||
vec![
|
|
||||||
format!("pubky://{pubky}/pub/file"),
|
|
||||||
format!("pubky://{pubky}/pub/example.con/"),
|
|
||||||
],
|
|
||||||
"reverse list shallow with limit and a file cursor"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let list = client
|
|
||||||
.list(url)
|
|
||||||
.unwrap()
|
|
||||||
.shallow(true)
|
|
||||||
.reverse(true)
|
|
||||||
.limit(2)
|
|
||||||
.cursor("example.con/")
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
list,
|
|
||||||
vec![
|
|
||||||
format!("pubky://{pubky}/pub/example.con"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/"),
|
|
||||||
],
|
|
||||||
"reverse list shallow with limit and a directory cursor"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn list_events() {
|
|
||||||
let testnet = Testnet::new(10);
|
|
||||||
let server = Homeserver::start_test(&testnet).await.unwrap();
|
|
||||||
|
|
||||||
let client = PubkyClient::test(&testnet);
|
|
||||||
|
|
||||||
let keypair = Keypair::random();
|
|
||||||
|
|
||||||
client.signup(&keypair, &server.public_key()).await.unwrap();
|
|
||||||
|
|
||||||
let pubky = keypair.public_key();
|
|
||||||
|
|
||||||
let urls = vec![
|
|
||||||
format!("pubky://{pubky}/pub/a.com/a.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/a.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/b.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/c.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.com/d.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.con/d.txt"),
|
|
||||||
format!("pubky://{pubky}/pub/example.con"),
|
|
||||||
format!("pubky://{pubky}/pub/file"),
|
|
||||||
format!("pubky://{pubky}/pub/file2"),
|
|
||||||
format!("pubky://{pubky}/pub/z.com/a.txt"),
|
|
||||||
];
|
|
||||||
|
|
||||||
for url in urls {
|
|
||||||
client.put(url.as_str(), &[0]).await.unwrap();
|
|
||||||
client.delete(url.as_str()).await.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
let feed_url = format!("http://localhost:{}/events/", server.port());
|
|
||||||
let feed_url = feed_url.as_str();
|
|
||||||
|
|
||||||
let client = PubkyClient::test(&testnet);
|
|
||||||
|
|
||||||
let cursor;
|
|
||||||
|
|
||||||
{
|
|
||||||
let response = client
|
|
||||||
.request(
|
|
||||||
Method::GET,
|
|
||||||
format!("{feed_url}?limit=10").as_str().try_into().unwrap(),
|
|
||||||
)
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let text = response.text().await.unwrap();
|
|
||||||
let lines = text.split('\n').collect::<Vec<_>>();
|
|
||||||
|
|
||||||
cursor = lines.last().unwrap().split(" ").last().unwrap().to_string();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
lines,
|
|
||||||
vec![
|
|
||||||
format!("PUT pubky://{pubky}/pub/a.com/a.txt"),
|
|
||||||
format!("DEL pubky://{pubky}/pub/a.com/a.txt"),
|
|
||||||
format!("PUT pubky://{pubky}/pub/example.com/a.txt"),
|
|
||||||
format!("DEL pubky://{pubky}/pub/example.com/a.txt"),
|
|
||||||
format!("PUT pubky://{pubky}/pub/example.com/b.txt"),
|
|
||||||
format!("DEL pubky://{pubky}/pub/example.com/b.txt"),
|
|
||||||
format!("PUT pubky://{pubky}/pub/example.com/c.txt"),
|
|
||||||
format!("DEL pubky://{pubky}/pub/example.com/c.txt"),
|
|
||||||
format!("PUT pubky://{pubky}/pub/example.com/d.txt"),
|
|
||||||
format!("DEL pubky://{pubky}/pub/example.com/d.txt"),
|
|
||||||
format!("cursor: {cursor}",)
|
|
||||||
]
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let response = client
|
|
||||||
.request(
|
|
||||||
Method::GET,
|
|
||||||
format!("{feed_url}?limit=10&cursor={cursor}")
|
|
||||||
.as_str()
|
|
||||||
.try_into()
|
|
||||||
.unwrap(),
|
|
||||||
)
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let text = response.text().await.unwrap();
|
|
||||||
let lines = text.split('\n').collect::<Vec<_>>();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
lines,
|
|
||||||
vec![
|
|
||||||
format!("PUT pubky://{pubky}/pub/example.con/d.txt"),
|
|
||||||
format!("DEL pubky://{pubky}/pub/example.con/d.txt"),
|
|
||||||
format!("PUT pubky://{pubky}/pub/example.con"),
|
|
||||||
format!("DEL pubky://{pubky}/pub/example.con"),
|
|
||||||
format!("PUT pubky://{pubky}/pub/file"),
|
|
||||||
format!("DEL pubky://{pubky}/pub/file"),
|
|
||||||
format!("PUT pubky://{pubky}/pub/file2"),
|
|
||||||
format!("DEL pubky://{pubky}/pub/file2"),
|
|
||||||
format!("PUT pubky://{pubky}/pub/z.com/a.txt"),
|
|
||||||
format!("DEL pubky://{pubky}/pub/z.com/a.txt"),
|
|
||||||
lines.last().unwrap().to_string()
|
|
||||||
]
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn read_after_event() {
|
|
||||||
let testnet = Testnet::new(10);
|
|
||||||
let server = Homeserver::start_test(&testnet).await.unwrap();
|
|
||||||
|
|
||||||
let client = PubkyClient::test(&testnet);
|
|
||||||
|
|
||||||
let keypair = Keypair::random();
|
|
||||||
|
|
||||||
client.signup(&keypair, &server.public_key()).await.unwrap();
|
|
||||||
|
|
||||||
let pubky = keypair.public_key();
|
|
||||||
|
|
||||||
let url = format!("pubky://{pubky}/pub/a.com/a.txt");
|
|
||||||
|
|
||||||
client.put(url.as_str(), &[0]).await.unwrap();
|
|
||||||
|
|
||||||
let feed_url = format!("http://localhost:{}/events/", server.port());
|
|
||||||
let feed_url = feed_url.as_str();
|
|
||||||
|
|
||||||
let client = PubkyClient::test(&testnet);
|
|
||||||
|
|
||||||
{
|
|
||||||
let response = client
|
|
||||||
.request(
|
|
||||||
Method::GET,
|
|
||||||
format!("{feed_url}?limit=10").as_str().try_into().unwrap(),
|
|
||||||
)
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let text = response.text().await.unwrap();
|
|
||||||
let lines = text.split('\n').collect::<Vec<_>>();
|
|
||||||
|
|
||||||
let cursor = lines.last().unwrap().split(" ").last().unwrap().to_string();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
lines,
|
|
||||||
vec![
|
|
||||||
format!("PUT pubky://{pubky}/pub/a.com/a.txt"),
|
|
||||||
format!("cursor: {cursor}",)
|
|
||||||
]
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
let get = client.get(url.as_str()).await.unwrap();
|
|
||||||
dbg!(get);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn dont_delete_shared_blobs() {
|
|
||||||
let testnet = Testnet::new(10);
|
|
||||||
let homeserver = Homeserver::start_test(&testnet).await.unwrap();
|
|
||||||
let client = PubkyClient::test(&testnet);
|
|
||||||
|
|
||||||
let homeserver_pubky = homeserver.public_key();
|
|
||||||
|
|
||||||
let user_1 = Keypair::random();
|
|
||||||
let user_2 = Keypair::random();
|
|
||||||
|
|
||||||
client.signup(&user_1, &homeserver_pubky).await.unwrap();
|
|
||||||
client.signup(&user_2, &homeserver_pubky).await.unwrap();
|
|
||||||
|
|
||||||
let user_1_id = user_1.public_key();
|
|
||||||
let user_2_id = user_2.public_key();
|
|
||||||
|
|
||||||
let url_1 = format!("pubky://{user_1_id}/pub/pubky.app/file/file_1");
|
|
||||||
let url_2 = format!("pubky://{user_2_id}/pub/pubky.app/file/file_1");
|
|
||||||
|
|
||||||
let file = vec![1];
|
|
||||||
client.put(url_1.as_str(), &file).await.unwrap();
|
|
||||||
client.put(url_2.as_str(), &file).await.unwrap();
|
|
||||||
|
|
||||||
// Delete file 1
|
|
||||||
client.delete(url_1.as_str()).await.unwrap();
|
|
||||||
|
|
||||||
let blob = client.get(url_2.as_str()).await.unwrap().unwrap();
|
|
||||||
|
|
||||||
assert_eq!(blob, file);
|
|
||||||
|
|
||||||
let feed_url = format!("http://localhost:{}/events/", homeserver.port());
|
|
||||||
|
|
||||||
let response = client
|
|
||||||
.request(
|
|
||||||
Method::GET,
|
|
||||||
format!("{feed_url}").as_str().try_into().unwrap(),
|
|
||||||
)
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let text = response.text().await.unwrap();
|
|
||||||
let lines = text.split('\n').collect::<Vec<_>>();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
lines,
|
|
||||||
vec![
|
|
||||||
format!("PUT pubky://{user_1_id}/pub/pubky.app/file/file_1",),
|
|
||||||
format!("PUT pubky://{user_2_id}/pub/pubky.app/file/file_1",),
|
|
||||||
format!("DEL pubky://{user_1_id}/pub/pubky.app/file/file_1",),
|
|
||||||
lines.last().unwrap().to_string()
|
|
||||||
]
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,250 +0,0 @@
|
|||||||
use std::{
|
|
||||||
collections::HashSet,
|
|
||||||
sync::{Arc, RwLock},
|
|
||||||
};
|
|
||||||
|
|
||||||
use js_sys::{Array, Uint8Array};
|
|
||||||
use wasm_bindgen::prelude::*;
|
|
||||||
|
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
use pubky_common::capabilities::Capabilities;
|
|
||||||
|
|
||||||
use crate::error::Error;
|
|
||||||
use crate::PubkyClient;
|
|
||||||
|
|
||||||
mod http;
|
|
||||||
mod keys;
|
|
||||||
mod pkarr;
|
|
||||||
mod recovery_file;
|
|
||||||
mod session;
|
|
||||||
|
|
||||||
use keys::{Keypair, PublicKey};
|
|
||||||
use session::Session;
|
|
||||||
|
|
||||||
impl Default for PubkyClient {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::new()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static DEFAULT_RELAYS: [&str; 1] = ["https://relay.pkarr.org"];
|
|
||||||
static TESTNET_RELAYS: [&str; 1] = ["http://localhost:15411/pkarr"];
|
|
||||||
|
|
||||||
#[wasm_bindgen]
|
|
||||||
impl PubkyClient {
|
|
||||||
#[wasm_bindgen(constructor)]
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
http: reqwest::Client::builder().build().unwrap(),
|
|
||||||
session_cookies: Arc::new(RwLock::new(HashSet::new())),
|
|
||||||
pkarr_relays: DEFAULT_RELAYS.into_iter().map(|s| s.to_string()).collect(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a client with with configurations appropriate for local testing:
|
|
||||||
/// - set Pkarr relays to `["http://localhost:15411/pkarr"]` instead of default relay.
|
|
||||||
#[wasm_bindgen]
|
|
||||||
pub fn testnet() -> Self {
|
|
||||||
Self {
|
|
||||||
http: reqwest::Client::builder().build().unwrap(),
|
|
||||||
session_cookies: Arc::new(RwLock::new(HashSet::new())),
|
|
||||||
pkarr_relays: TESTNET_RELAYS.into_iter().map(|s| s.to_string()).collect(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Set Pkarr relays used for publishing and resolving Pkarr packets.
|
|
||||||
///
|
|
||||||
/// By default, [PubkyClient] will use `["https://relay.pkarr.org"]`
|
|
||||||
#[wasm_bindgen(js_name = "setPkarrRelays")]
|
|
||||||
pub fn set_pkarr_relays(mut self, relays: Vec<String>) -> Self {
|
|
||||||
self.pkarr_relays = relays;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read the set of pkarr relays used by this client.
|
|
||||||
#[wasm_bindgen(js_name = "getPkarrRelays")]
|
|
||||||
pub fn get_pkarr_relays(&self) -> Vec<String> {
|
|
||||||
self.pkarr_relays.clone()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Signup to a homeserver and update Pkarr accordingly.
|
|
||||||
///
|
|
||||||
/// The homeserver is a Pkarr domain name, where the TLD is a Pkarr public key
|
|
||||||
/// for example "pubky.o4dksfbqk85ogzdb5osziw6befigbuxmuxkuxq8434q89uj56uyy"
|
|
||||||
#[wasm_bindgen]
|
|
||||||
pub async fn signup(
|
|
||||||
&self,
|
|
||||||
keypair: &Keypair,
|
|
||||||
homeserver: &PublicKey,
|
|
||||||
) -> Result<Session, JsValue> {
|
|
||||||
Ok(Session(
|
|
||||||
self.inner_signup(keypair.as_inner(), homeserver.as_inner())
|
|
||||||
.await
|
|
||||||
.map_err(JsValue::from)?,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Check the current sesison for a given Pubky in its homeserver.
|
|
||||||
///
|
|
||||||
/// Returns [Session] or `None` (if recieved `404 NOT_FOUND`),
|
|
||||||
/// or throws the recieved error if the response has any other `>=400` status code.
|
|
||||||
#[wasm_bindgen]
|
|
||||||
pub async fn session(&self, pubky: &PublicKey) -> Result<Option<Session>, JsValue> {
|
|
||||||
self.inner_session(pubky.as_inner())
|
|
||||||
.await
|
|
||||||
.map(|s| s.map(Session))
|
|
||||||
.map_err(|e| e.into())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Signout from a homeserver.
|
|
||||||
#[wasm_bindgen]
|
|
||||||
pub async fn signout(&self, pubky: &PublicKey) -> Result<(), JsValue> {
|
|
||||||
self.inner_signout(pubky.as_inner())
|
|
||||||
.await
|
|
||||||
.map_err(|e| e.into())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Signin to a homeserver using the root Keypair.
|
|
||||||
#[wasm_bindgen]
|
|
||||||
pub async fn signin(&self, keypair: &Keypair) -> Result<(), JsValue> {
|
|
||||||
self.inner_signin(keypair.as_inner())
|
|
||||||
.await
|
|
||||||
.map(|_| ())
|
|
||||||
.map_err(|e| e.into())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return `pubkyauth://` url and wait for the incoming [AuthToken]
|
|
||||||
/// verifying that AuthToken, and if capabilities were requested, signing in to
|
|
||||||
/// the Pubky's homeserver and returning the [Session] information.
|
|
||||||
///
|
|
||||||
/// Returns a tuple of [pubkyAuthUrl, Promise<Session>]
|
|
||||||
#[wasm_bindgen(js_name = "authRequest")]
|
|
||||||
pub fn auth_request(&self, relay: &str, capabilities: &str) -> Result<js_sys::Array, JsValue> {
|
|
||||||
let mut relay: Url = relay
|
|
||||||
.try_into()
|
|
||||||
.map_err(|_| Error::Generic("Invalid relay Url".into()))?;
|
|
||||||
|
|
||||||
let (pubkyauth_url, client_secret) = self.create_auth_request(
|
|
||||||
&mut relay,
|
|
||||||
&Capabilities::try_from(capabilities).map_err(|_| "Invalid capaiblities")?,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let this = self.clone();
|
|
||||||
|
|
||||||
let future = async move {
|
|
||||||
this.subscribe_to_auth_response(relay, &client_secret)
|
|
||||||
.await
|
|
||||||
.map(|pubky| JsValue::from(PublicKey(pubky)))
|
|
||||||
.map_err(|err| JsValue::from_str(&format!("{:?}", err)))
|
|
||||||
};
|
|
||||||
|
|
||||||
let promise = wasm_bindgen_futures::future_to_promise(future);
|
|
||||||
|
|
||||||
// Return the URL and the promise
|
|
||||||
let js_tuple = js_sys::Array::new();
|
|
||||||
js_tuple.push(&JsValue::from_str(pubkyauth_url.as_ref()));
|
|
||||||
js_tuple.push(&promise);
|
|
||||||
|
|
||||||
Ok(js_tuple)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Sign an [pubky_common::auth::AuthToken], encrypt it and send it to the
|
|
||||||
/// source of the pubkyauth request url.
|
|
||||||
#[wasm_bindgen(js_name = "sendAuthToken")]
|
|
||||||
pub async fn send_auth_token(
|
|
||||||
&self,
|
|
||||||
keypair: &Keypair,
|
|
||||||
pubkyauth_url: &str,
|
|
||||||
) -> Result<(), JsValue> {
|
|
||||||
let pubkyauth_url: Url = pubkyauth_url
|
|
||||||
.try_into()
|
|
||||||
.map_err(|_| Error::Generic("Invalid relay Url".into()))?;
|
|
||||||
|
|
||||||
self.inner_send_auth_token(keypair.as_inner(), pubkyauth_url)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
// === Public data ===
|
|
||||||
|
|
||||||
#[wasm_bindgen]
|
|
||||||
/// Upload a small payload to a given path.
|
|
||||||
pub async fn put(&self, url: &str, content: &[u8]) -> Result<(), JsValue> {
|
|
||||||
self.inner_put(url, content).await.map_err(|e| e.into())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Download a small payload from a given path relative to a pubky author.
|
|
||||||
#[wasm_bindgen]
|
|
||||||
pub async fn get(&self, url: &str) -> Result<Option<Uint8Array>, JsValue> {
|
|
||||||
self.inner_get(url)
|
|
||||||
.await
|
|
||||||
.map(|b| b.map(|b| (&*b).into()))
|
|
||||||
.map_err(|e| e.into())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Delete a file at a path relative to a pubky author.
|
|
||||||
#[wasm_bindgen]
|
|
||||||
pub async fn delete(&self, url: &str) -> Result<(), JsValue> {
|
|
||||||
self.inner_delete(url).await.map_err(|e| e.into())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a list of Pubky urls (as strings).
|
|
||||||
///
|
|
||||||
/// - `url`: The Pubky url (string) to the directory you want to list its content.
|
|
||||||
/// - `cursor`: Either a full `pubky://` Url (from previous list response),
|
|
||||||
/// or a path (to a file or directory) relative to the `url`
|
|
||||||
/// - `reverse`: List in reverse order
|
|
||||||
/// - `limit` Limit the number of urls in the response
|
|
||||||
/// - `shallow`: List directories and files, instead of flat list of files.
|
|
||||||
#[wasm_bindgen]
|
|
||||||
pub async fn list(
|
|
||||||
&self,
|
|
||||||
url: &str,
|
|
||||||
cursor: Option<String>,
|
|
||||||
reverse: Option<bool>,
|
|
||||||
limit: Option<u16>,
|
|
||||||
shallow: Option<bool>,
|
|
||||||
) -> Result<Array, JsValue> {
|
|
||||||
// TODO: try later to return Vec<String> from async function.
|
|
||||||
|
|
||||||
if let Some(cursor) = cursor {
|
|
||||||
return self
|
|
||||||
.inner_list(url)?
|
|
||||||
.reverse(reverse.unwrap_or(false))
|
|
||||||
.limit(limit.unwrap_or(u16::MAX))
|
|
||||||
.cursor(&cursor)
|
|
||||||
.shallow(shallow.unwrap_or(false))
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.map(|urls| {
|
|
||||||
let js_array = Array::new();
|
|
||||||
|
|
||||||
for url in urls {
|
|
||||||
js_array.push(&JsValue::from_str(&url));
|
|
||||||
}
|
|
||||||
|
|
||||||
js_array
|
|
||||||
})
|
|
||||||
.map_err(|e| e.into());
|
|
||||||
}
|
|
||||||
|
|
||||||
self.inner_list(url)?
|
|
||||||
.reverse(reverse.unwrap_or(false))
|
|
||||||
.limit(limit.unwrap_or(u16::MAX))
|
|
||||||
.shallow(shallow.unwrap_or(false))
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.map(|urls| {
|
|
||||||
let js_array = Array::new();
|
|
||||||
|
|
||||||
for url in urls {
|
|
||||||
js_array.push(&JsValue::from_str(&url));
|
|
||||||
}
|
|
||||||
|
|
||||||
js_array
|
|
||||||
})
|
|
||||||
.map_err(|e| e.into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
use crate::PubkyClient;
|
|
||||||
|
|
||||||
use reqwest::{Method, RequestBuilder, Response};
|
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
impl PubkyClient {
|
|
||||||
pub(crate) fn request(&self, method: Method, url: Url) -> RequestBuilder {
|
|
||||||
let mut request = self.http.request(method, url).fetch_credentials_include();
|
|
||||||
|
|
||||||
for cookie in self.session_cookies.read().unwrap().iter() {
|
|
||||||
request = request.header("Cookie", cookie);
|
|
||||||
}
|
|
||||||
|
|
||||||
request
|
|
||||||
}
|
|
||||||
|
|
||||||
// Support cookies for nodejs
|
|
||||||
|
|
||||||
pub(crate) fn store_session(&self, response: &Response) {
|
|
||||||
if let Some(cookie) = response
|
|
||||||
.headers()
|
|
||||||
.get("set-cookie")
|
|
||||||
.and_then(|h| h.to_str().ok())
|
|
||||||
.and_then(|s| s.split(';').next())
|
|
||||||
{
|
|
||||||
self.session_cookies
|
|
||||||
.write()
|
|
||||||
.unwrap()
|
|
||||||
.insert(cookie.to_string());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub(crate) fn remove_session(&self, pubky: &pkarr::PublicKey) {
|
|
||||||
let key = pubky.to_string();
|
|
||||||
|
|
||||||
self.session_cookies
|
|
||||||
.write()
|
|
||||||
.unwrap()
|
|
||||||
.retain(|cookie| !cookie.starts_with(&key));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,99 +0,0 @@
|
|||||||
use wasm_bindgen::prelude::*;
|
|
||||||
|
|
||||||
use crate::Error;
|
|
||||||
|
|
||||||
#[wasm_bindgen]
|
|
||||||
pub struct Keypair(pkarr::Keypair);
|
|
||||||
|
|
||||||
#[wasm_bindgen]
|
|
||||||
impl Keypair {
|
|
||||||
#[wasm_bindgen]
|
|
||||||
/// Generate a random [Keypair]
|
|
||||||
pub fn random() -> Self {
|
|
||||||
Self(pkarr::Keypair::random())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Generate a [Keypair] from a secret key.
|
|
||||||
#[wasm_bindgen(js_name = "fromSecretKey")]
|
|
||||||
pub fn from_secret_key(secret_key: js_sys::Uint8Array) -> Result<Keypair, JsValue> {
|
|
||||||
if !js_sys::Uint8Array::instanceof(&secret_key) {
|
|
||||||
return Err("Expected secret_key to be an instance of Uint8Array".into());
|
|
||||||
}
|
|
||||||
|
|
||||||
let len = secret_key.byte_length();
|
|
||||||
if len != 32 {
|
|
||||||
return Err(format!("Expected secret_key to be 32 bytes, got {len}"))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut bytes = [0; 32];
|
|
||||||
secret_key.copy_to(&mut bytes);
|
|
||||||
|
|
||||||
Ok(Self(pkarr::Keypair::from_secret_key(&bytes)))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the secret key of this keypair.
|
|
||||||
#[wasm_bindgen(js_name = "secretKey")]
|
|
||||||
pub fn secret_key(&self) -> js_sys::Uint8Array {
|
|
||||||
self.0.secret_key().as_slice().into()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the [PublicKey] of this keypair.
|
|
||||||
#[wasm_bindgen(js_name = "publicKey")]
|
|
||||||
pub fn public_key(&self) -> PublicKey {
|
|
||||||
PublicKey(self.0.public_key())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Keypair {
|
|
||||||
pub fn as_inner(&self) -> &pkarr::Keypair {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<pkarr::Keypair> for Keypair {
|
|
||||||
fn from(keypair: pkarr::Keypair) -> Self {
|
|
||||||
Self(keypair)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[wasm_bindgen]
|
|
||||||
pub struct PublicKey(pub(crate) pkarr::PublicKey);
|
|
||||||
|
|
||||||
#[wasm_bindgen]
|
|
||||||
impl PublicKey {
|
|
||||||
#[wasm_bindgen]
|
|
||||||
/// Convert the PublicKey to Uint8Array
|
|
||||||
pub fn to_uint8array(&self) -> js_sys::Uint8Array {
|
|
||||||
js_sys::Uint8Array::from(self.0.as_bytes().as_slice())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[wasm_bindgen]
|
|
||||||
/// Returns the z-base32 encoding of this public key
|
|
||||||
pub fn z32(&self) -> String {
|
|
||||||
self.0.to_string()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[wasm_bindgen(js_name = "from")]
|
|
||||||
/// @throws
|
|
||||||
pub fn try_from(value: JsValue) -> Result<PublicKey, JsValue> {
|
|
||||||
let string = value
|
|
||||||
.as_string()
|
|
||||||
.ok_or("Couldn't create a PublicKey from this type of value")?;
|
|
||||||
|
|
||||||
Ok(PublicKey(
|
|
||||||
pkarr::PublicKey::try_from(string).map_err(Error::Pkarr)?,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PublicKey {
|
|
||||||
pub fn as_inner(&self) -> &pkarr::PublicKey {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<pkarr::PublicKey> for PublicKey {
|
|
||||||
fn from(value: pkarr::PublicKey) -> Self {
|
|
||||||
PublicKey(value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,48 +0,0 @@
|
|||||||
use reqwest::StatusCode;
|
|
||||||
|
|
||||||
pub use pkarr::{PublicKey, SignedPacket};
|
|
||||||
|
|
||||||
use crate::error::Result;
|
|
||||||
use crate::PubkyClient;
|
|
||||||
|
|
||||||
// TODO: Add an in memory cache of packets
|
|
||||||
|
|
||||||
impl PubkyClient {
|
|
||||||
//TODO: migrate to pkarr::PkarrRelayClient
|
|
||||||
pub(crate) async fn pkarr_resolve(
|
|
||||||
&self,
|
|
||||||
public_key: &PublicKey,
|
|
||||||
) -> Result<Option<SignedPacket>> {
|
|
||||||
//TODO: Allow multiple relays in parallel
|
|
||||||
let relay = self.pkarr_relays.first().expect("initialized with relays");
|
|
||||||
|
|
||||||
let res = self
|
|
||||||
.http
|
|
||||||
.get(format!("{relay}/{}", public_key))
|
|
||||||
.send()
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
if res.status() == StatusCode::NOT_FOUND {
|
|
||||||
return Ok(None);
|
|
||||||
};
|
|
||||||
|
|
||||||
// TODO: guard against too large responses.
|
|
||||||
let bytes = res.bytes().await?;
|
|
||||||
|
|
||||||
let existing = SignedPacket::from_relay_payload(public_key, &bytes)?;
|
|
||||||
|
|
||||||
Ok(Some(existing))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) async fn pkarr_publish(&self, signed_packet: &SignedPacket) -> Result<()> {
|
|
||||||
let relay = self.pkarr_relays.first().expect("initialized with relays");
|
|
||||||
|
|
||||||
self.http
|
|
||||||
.put(format!("{relay}/{}", signed_packet.public_key()))
|
|
||||||
.body(signed_packet.to_relay_payload())
|
|
||||||
.send()
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
use js_sys::Uint8Array;
|
|
||||||
use wasm_bindgen::prelude::{wasm_bindgen, JsValue};
|
|
||||||
|
|
||||||
use crate::error::Error;
|
|
||||||
|
|
||||||
use super::keys::Keypair;
|
|
||||||
|
|
||||||
/// Create a recovery file of the `keypair`, containing the secret key encrypted
|
|
||||||
/// using the `passphrase`.
|
|
||||||
#[wasm_bindgen(js_name = "createRecoveryFile")]
|
|
||||||
pub fn create_recovery_file(keypair: &Keypair, passphrase: &str) -> Result<Uint8Array, JsValue> {
|
|
||||||
pubky_common::recovery_file::create_recovery_file(keypair.as_inner(), passphrase)
|
|
||||||
.map(|b| b.as_slice().into())
|
|
||||||
.map_err(|e| Error::from(e).into())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a recovery file of the `keypair`, containing the secret key encrypted
|
|
||||||
/// using the `passphrase`.
|
|
||||||
#[wasm_bindgen(js_name = "decryptRecoveryFile")]
|
|
||||||
pub fn decrypt_recovery_file(recovery_file: &[u8], passphrase: &str) -> Result<Keypair, JsValue> {
|
|
||||||
pubky_common::recovery_file::decrypt_recovery_file(recovery_file, passphrase)
|
|
||||||
.map(Keypair::from)
|
|
||||||
.map_err(|e| Error::from(e).into())
|
|
||||||
}
|
|
||||||
@@ -1,27 +0,0 @@
|
|||||||
use pubky_common::session;
|
|
||||||
|
|
||||||
use wasm_bindgen::prelude::*;
|
|
||||||
|
|
||||||
use super::keys::PublicKey;
|
|
||||||
|
|
||||||
#[wasm_bindgen]
|
|
||||||
pub struct Session(pub(crate) session::Session);
|
|
||||||
|
|
||||||
#[wasm_bindgen]
|
|
||||||
impl Session {
|
|
||||||
/// Return the [PublicKey] of this session
|
|
||||||
#[wasm_bindgen]
|
|
||||||
pub fn pubky(&self) -> PublicKey {
|
|
||||||
self.0.pubky().clone().into()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the capabilities that this session has.
|
|
||||||
#[wasm_bindgen]
|
|
||||||
pub fn capabilities(&self) -> Vec<String> {
|
|
||||||
self.0
|
|
||||||
.capabilities()
|
|
||||||
.iter()
|
|
||||||
.map(|c| c.to_string())
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,80 +0,0 @@
|
|||||||
use crate::keypair::get_keypair_from_secret_key;
|
|
||||||
use crate::{PubkyAuthDetails, Capability};
|
|
||||||
use crate::utils::create_response_vector;
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use pubky::PubkyClient;
|
|
||||||
use serde_json;
|
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
pub async fn authorize(url: String, secret_key: String) -> Vec<String> {
|
|
||||||
let client = PubkyClient::testnet();
|
|
||||||
let keypair = match get_keypair_from_secret_key(&secret_key) {
|
|
||||||
Ok(keypair) => keypair,
|
|
||||||
Err(error) => return create_response_vector(true, error),
|
|
||||||
};
|
|
||||||
|
|
||||||
let parsed_url = match Url::parse(&url) {
|
|
||||||
Ok(url) => url,
|
|
||||||
Err(_) => return create_response_vector(true, "Failed to parse URL".to_string()),
|
|
||||||
};
|
|
||||||
|
|
||||||
match client.send_auth_token(&keypair, parsed_url).await {
|
|
||||||
Ok(_) => create_response_vector(false, "send_auth_token success".to_string()),
|
|
||||||
Err(error) => create_response_vector(true, format!("send_auth_token failure: {}", error)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn pubky_auth_details_to_json(details: &PubkyAuthDetails) -> Result<String, String> {
|
|
||||||
serde_json::to_string(details).map_err(|_| "Error serializing to JSON".to_string())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn parse_pubky_auth_url(url_str: &str) -> Result<PubkyAuthDetails, String> {
|
|
||||||
let url = Url::parse(url_str).map_err(|_| "Invalid URL".to_string())?;
|
|
||||||
|
|
||||||
if url.scheme() != "pubkyauth" {
|
|
||||||
return Err("Invalid scheme, expected 'pubkyauth'".to_string());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Collect query pairs into a HashMap for efficient access
|
|
||||||
let query_params: HashMap<_, _> = url.query_pairs().into_owned().collect();
|
|
||||||
|
|
||||||
let relay = query_params
|
|
||||||
.get("relay")
|
|
||||||
.cloned()
|
|
||||||
.ok_or_else(|| "Missing relay".to_string())?;
|
|
||||||
|
|
||||||
let capabilities_str = query_params
|
|
||||||
.get("capabilities")
|
|
||||||
.or_else(|| query_params.get("caps"))
|
|
||||||
.cloned()
|
|
||||||
.ok_or_else(|| "Missing capabilities".to_string())?;
|
|
||||||
|
|
||||||
let secret = query_params
|
|
||||||
.get("secret")
|
|
||||||
.cloned()
|
|
||||||
.ok_or_else(|| "Missing secret".to_string())?;
|
|
||||||
|
|
||||||
// Parse capabilities
|
|
||||||
let capabilities = capabilities_str
|
|
||||||
.split(',')
|
|
||||||
.map(|capability| {
|
|
||||||
let mut parts = capability.splitn(2, ':');
|
|
||||||
let path = parts
|
|
||||||
.next()
|
|
||||||
.ok_or_else(|| format!("Invalid capability format in '{}'", capability))?;
|
|
||||||
let permission = parts
|
|
||||||
.next()
|
|
||||||
.ok_or_else(|| format!("Invalid capability format in '{}'", capability))?;
|
|
||||||
Ok(Capability {
|
|
||||||
path: path.to_string(),
|
|
||||||
permission: permission.to_string(),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.collect::<Result<Vec<_>, String>>()?;
|
|
||||||
|
|
||||||
Ok(PubkyAuthDetails {
|
|
||||||
relay,
|
|
||||||
capabilities,
|
|
||||||
secret,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
fn main() {
|
|
||||||
uniffi::uniffi_bindgen_main()
|
|
||||||
}
|
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
use pkarr::Keypair;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get a keypair from a secret key
|
|
||||||
*/
|
|
||||||
pub fn get_keypair_from_secret_key(secret_key: &str) -> Result<Keypair, String> {
|
|
||||||
let bytes = match hex::decode(&secret_key) {
|
|
||||||
Ok(bytes) => bytes,
|
|
||||||
Err(_) => return Err("Failed to decode secret key".to_string())
|
|
||||||
};
|
|
||||||
|
|
||||||
let secret_key_bytes: [u8; 32] = match bytes.try_into() {
|
|
||||||
Ok(secret_key) => secret_key,
|
|
||||||
Err(_) => {
|
|
||||||
return Err("Failed to convert secret key to 32-byte array".to_string());
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(Keypair::from_secret_key(&secret_key_bytes))
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the secret key from a keypair
|
|
||||||
*/
|
|
||||||
pub fn get_secret_key_from_keypair(keypair: &Keypair) -> String {
|
|
||||||
hex::encode(keypair.secret_key())
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate a new keypair
|
|
||||||
*/
|
|
||||||
pub fn generate_keypair() -> Keypair {
|
|
||||||
Keypair::random()
|
|
||||||
}
|
|
||||||
629
rust/src/lib.rs
629
rust/src/lib.rs
@@ -1,629 +0,0 @@
|
|||||||
mod types;
|
|
||||||
mod keypair;
|
|
||||||
mod auth;
|
|
||||||
mod utils;
|
|
||||||
|
|
||||||
pub use types::*;
|
|
||||||
pub use keypair::*;
|
|
||||||
pub use auth::*;
|
|
||||||
pub use utils::*;
|
|
||||||
|
|
||||||
uniffi::setup_scaffolding!();
|
|
||||||
|
|
||||||
use std::str;
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use base64::Engine;
|
|
||||||
use base64::engine::general_purpose;
|
|
||||||
use pubky::PubkyClient;
|
|
||||||
use hex;
|
|
||||||
use hex::ToHex;
|
|
||||||
use serde::Serialize;
|
|
||||||
use url::Url;
|
|
||||||
use tokio;
|
|
||||||
use pkarr::{PkarrClient, SignedPacket, Keypair, dns, PublicKey};
|
|
||||||
use pkarr::dns::rdata::{RData, HTTPS, SVCB};
|
|
||||||
use pkarr::dns::{Packet, ResourceRecord};
|
|
||||||
use serde_json::json;
|
|
||||||
use utils::*;
|
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use std::sync::{Arc, Mutex};
|
|
||||||
use std::time::Duration;
|
|
||||||
use pkarr::bytes::Bytes;
|
|
||||||
use pubky_common::session::Session;
|
|
||||||
use tokio::runtime::Runtime;
|
|
||||||
use tokio::time;
|
|
||||||
|
|
||||||
static PUBKY_CLIENT: Lazy<Arc<PubkyClient>> = Lazy::new(|| {
|
|
||||||
Arc::new(PubkyClient::testnet())
|
|
||||||
});
|
|
||||||
|
|
||||||
static TOKIO_RUNTIME: Lazy<Arc<Runtime>> = Lazy::new(|| {
|
|
||||||
Arc::new(
|
|
||||||
Runtime::new().expect("Failed to create Tokio runtime")
|
|
||||||
)
|
|
||||||
});
|
|
||||||
|
|
||||||
// Define the EventListener trait
|
|
||||||
#[uniffi::export(callback_interface)]
|
|
||||||
pub trait EventListener: Send + Sync {
|
|
||||||
fn on_event_occurred(&self, event_data: String);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(uniffi::Object)]
|
|
||||||
pub struct EventNotifier {
|
|
||||||
listener: Arc<Mutex<Option<Box<dyn EventListener>>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl EventNotifier {
|
|
||||||
#[uniffi::constructor]
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
listener: Arc::new(Mutex::new(None)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set_listener(&self, listener: Box<dyn EventListener>) {
|
|
||||||
let mut lock = self.listener.lock().unwrap();
|
|
||||||
*lock = Some(listener);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn remove_listener(&self) {
|
|
||||||
let mut lock = self.listener.lock().unwrap();
|
|
||||||
*lock = None;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn notify_event(&self, event_data: String) {
|
|
||||||
let lock = self.listener.lock().unwrap();
|
|
||||||
if let Some(listener) = &*lock {
|
|
||||||
listener.on_event_occurred(event_data);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static EVENT_NOTIFIER: Lazy<Arc<EventNotifier>> = Lazy::new(|| {
|
|
||||||
Arc::new(EventNotifier::new())
|
|
||||||
});
|
|
||||||
|
|
||||||
#[uniffi::export]
|
|
||||||
pub fn set_event_listener(listener: Box<dyn EventListener>) {
|
|
||||||
EVENT_NOTIFIER.as_ref().set_listener(listener);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[uniffi::export]
|
|
||||||
pub fn remove_event_listener() {
|
|
||||||
EVENT_NOTIFIER.as_ref().remove_listener();
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn start_internal_event_loop() {
|
|
||||||
let event_notifier = EVENT_NOTIFIER.clone();
|
|
||||||
let runtime = TOKIO_RUNTIME.clone();
|
|
||||||
runtime.spawn(async move {
|
|
||||||
let mut interval = time::interval(Duration::from_secs(2));
|
|
||||||
loop {
|
|
||||||
interval.tick().await;
|
|
||||||
event_notifier.as_ref().notify_event("Internal event triggered".to_string());
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
#[uniffi::export]
|
|
||||||
pub fn delete_file(url: String) -> Vec<String> {
|
|
||||||
let runtime = TOKIO_RUNTIME.clone();
|
|
||||||
runtime.block_on(async {
|
|
||||||
let client = PUBKY_CLIENT.clone();
|
|
||||||
let parsed_url = match Url::parse(&url) {
|
|
||||||
Ok(url) => url,
|
|
||||||
Err(_) => return create_response_vector(true, "Failed to parse URL".to_string()),
|
|
||||||
};
|
|
||||||
match client.delete(parsed_url).await {
|
|
||||||
Ok(_) => create_response_vector(false, "Deleted successfully".to_string()),
|
|
||||||
Err(error) => create_response_vector(true, format!("Failed to delete: {}", error)),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[uniffi::export]
|
|
||||||
pub fn session(pubky: String) -> Vec<String> {
|
|
||||||
let runtime = TOKIO_RUNTIME.clone();
|
|
||||||
runtime.block_on(async {
|
|
||||||
let client = PUBKY_CLIENT.clone();
|
|
||||||
let public_key = match PublicKey::try_from(pubky) {
|
|
||||||
Ok(key) => key,
|
|
||||||
Err(error) => return create_response_vector(true, format!("Invalid homeserver public key: {}", error)),
|
|
||||||
};
|
|
||||||
let result = match client.session(&public_key).await {
|
|
||||||
Ok(session) => session,
|
|
||||||
Err(error) => return create_response_vector(true, format!("Failed to get session: {}", error)),
|
|
||||||
};
|
|
||||||
let session: Session = match result {
|
|
||||||
Some(session) => session,
|
|
||||||
None => return create_response_vector(true, "No session returned".to_string()),
|
|
||||||
};
|
|
||||||
|
|
||||||
let json_obj = json!({
|
|
||||||
"pubky": session.pubky().to_string(),
|
|
||||||
"capabilities": session.capabilities().iter().map(|c| c.to_string()).collect::<Vec<String>>(),
|
|
||||||
});
|
|
||||||
|
|
||||||
let json_str = match serde_json::to_string(&json_obj) {
|
|
||||||
Ok(json) => json,
|
|
||||||
Err(e) => return create_response_vector(true, format!("Failed to serialize JSON: {}", e)),
|
|
||||||
};
|
|
||||||
|
|
||||||
create_response_vector(false, json_str)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[uniffi::export]
|
|
||||||
pub fn generate_secret_key() -> Vec<String> {
|
|
||||||
let keypair = generate_keypair();
|
|
||||||
let secret_key = get_secret_key_from_keypair(&keypair);
|
|
||||||
let public_key = keypair.public_key();
|
|
||||||
let uri = public_key.to_uri_string();
|
|
||||||
let json_obj = json!({
|
|
||||||
"secret_key": secret_key,
|
|
||||||
"public_key": public_key.to_string(),
|
|
||||||
"uri": uri,
|
|
||||||
});
|
|
||||||
|
|
||||||
let json_str = match serde_json::to_string(&json_obj) {
|
|
||||||
Ok(json) => json,
|
|
||||||
Err(e) => return create_response_vector(true, format!("Failed to serialize JSON: {}", e)),
|
|
||||||
};
|
|
||||||
start_internal_event_loop();
|
|
||||||
create_response_vector(false, json_str)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[uniffi::export]
|
|
||||||
pub fn get_public_key_from_secret_key(secret_key: String) -> Vec<String> {
|
|
||||||
let keypair = match get_keypair_from_secret_key(&secret_key) {
|
|
||||||
Ok(keypair) => keypair,
|
|
||||||
Err(error) => return create_response_vector(true, error),
|
|
||||||
};
|
|
||||||
let public_key = keypair.public_key();
|
|
||||||
let uri = public_key.to_uri_string();
|
|
||||||
let json_obj = json!({
|
|
||||||
"public_key": public_key.to_string(),
|
|
||||||
"uri": uri,
|
|
||||||
});
|
|
||||||
|
|
||||||
let json_str = match serde_json::to_string(&json_obj) {
|
|
||||||
Ok(json) => json,
|
|
||||||
Err(e) => return create_response_vector(true, format!("Failed to serialize JSON: {}", e)),
|
|
||||||
};
|
|
||||||
create_response_vector(false, json_str)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[uniffi::export]
|
|
||||||
pub fn publish_https(record_name: String, target: String, secret_key: String) -> Vec<String> {
|
|
||||||
let runtime = TOKIO_RUNTIME.clone();
|
|
||||||
runtime.block_on(async {
|
|
||||||
let client = PUBKY_CLIENT.clone();
|
|
||||||
|
|
||||||
let keypair = match get_keypair_from_secret_key(&secret_key) {
|
|
||||||
Ok(keypair) => keypair,
|
|
||||||
Err(error) => return create_response_vector(true, error),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Create SVCB record with the target domain
|
|
||||||
let target = match target.as_str().try_into() {
|
|
||||||
Ok(target) => target,
|
|
||||||
Err(e) => return create_response_vector(true, format!("Invalid target: {}", e)),
|
|
||||||
};
|
|
||||||
let svcb = SVCB::new(0, target);
|
|
||||||
|
|
||||||
// Create HTTPS record
|
|
||||||
let https_record = HTTPS(svcb);
|
|
||||||
|
|
||||||
// Create DNS packet
|
|
||||||
let mut packet = Packet::new_reply(0);
|
|
||||||
let dns_name = match dns::Name::new(&record_name) {
|
|
||||||
Ok(name) => name,
|
|
||||||
Err(e) => return create_response_vector(true, format!("Invalid DNS name: {}", e)),
|
|
||||||
};
|
|
||||||
|
|
||||||
packet.answers.push(ResourceRecord::new(
|
|
||||||
dns_name,
|
|
||||||
dns::CLASS::IN,
|
|
||||||
3600, // TTL in seconds
|
|
||||||
dns::rdata::RData::HTTPS(https_record),
|
|
||||||
));
|
|
||||||
|
|
||||||
let signed_packet = match SignedPacket::from_packet(&keypair, &packet) {
|
|
||||||
Ok(signed_packet) => signed_packet,
|
|
||||||
Err(e) => return create_response_vector(true, format!("Failed to create signed packet: {}", e)),
|
|
||||||
};
|
|
||||||
|
|
||||||
match client.pkarr().publish(&signed_packet).await {
|
|
||||||
Ok(()) => create_response_vector(false, keypair.public_key().to_string()),
|
|
||||||
Err(e) => create_response_vector(true, format!("Failed to publish: {}", e)),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[uniffi::export]
|
|
||||||
pub fn resolve_https(public_key: String) -> Vec<String> {
|
|
||||||
let runtime = TOKIO_RUNTIME.clone();
|
|
||||||
runtime.block_on(async {
|
|
||||||
let public_key = match public_key.as_str().try_into() {
|
|
||||||
Ok(key) => key,
|
|
||||||
Err(e) => return create_response_vector(true, format!("Invalid public key: {}", e)),
|
|
||||||
};
|
|
||||||
|
|
||||||
let client = PUBKY_CLIENT.clone();
|
|
||||||
|
|
||||||
match client.pkarr().resolve(&public_key).await {
|
|
||||||
Ok(Some(signed_packet)) => {
|
|
||||||
// Extract HTTPS records from the signed packet
|
|
||||||
let https_records: Vec<serde_json::Value> = signed_packet.packet().answers.iter()
|
|
||||||
.filter_map(|record| {
|
|
||||||
if let dns::rdata::RData::HTTPS(https) = &record.rdata {
|
|
||||||
// Create a JSON object
|
|
||||||
let mut https_json = serde_json::json!({
|
|
||||||
"name": record.name.to_string(),
|
|
||||||
"class": format!("{:?}", record.class),
|
|
||||||
"ttl": record.ttl,
|
|
||||||
"priority": https.0.priority,
|
|
||||||
"target": https.0.target.to_string(),
|
|
||||||
});
|
|
||||||
|
|
||||||
// Access specific parameters using the constants from SVCB
|
|
||||||
if let Some(port_param) = https.0.get_param(SVCB::PORT) {
|
|
||||||
if port_param.len() == 2 {
|
|
||||||
let port = u16::from_be_bytes([port_param[0], port_param[1]]);
|
|
||||||
https_json["port"] = serde_json::json!(port);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Access ALPN parameter if needed
|
|
||||||
if let Some(alpn_param) = https.0.get_param(SVCB::ALPN) {
|
|
||||||
// Parse ALPN protocols (list of character strings)
|
|
||||||
let mut position = 0;
|
|
||||||
let mut alpn_protocols = Vec::new();
|
|
||||||
while position < alpn_param.len() {
|
|
||||||
let length = alpn_param[position] as usize;
|
|
||||||
position += 1;
|
|
||||||
if position + length <= alpn_param.len() {
|
|
||||||
let protocol = String::from_utf8_lossy(
|
|
||||||
&alpn_param[position..position + length],
|
|
||||||
);
|
|
||||||
alpn_protocols.push(protocol.to_string());
|
|
||||||
position += length;
|
|
||||||
} else {
|
|
||||||
break; // Malformed ALPN parameter
|
|
||||||
}
|
|
||||||
}
|
|
||||||
https_json["alpn"] = serde_json::json!(alpn_protocols);
|
|
||||||
}
|
|
||||||
// TODO: Add other parameters as needed.
|
|
||||||
Some(https_json)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
if https_records.is_empty() {
|
|
||||||
return create_response_vector(true, "No HTTPS records found".to_string());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create JSON response
|
|
||||||
let json_obj = json!({
|
|
||||||
"public_key": public_key.to_string(),
|
|
||||||
"https_records": https_records,
|
|
||||||
"last_seen": signed_packet.last_seen(),
|
|
||||||
"timestamp": signed_packet.timestamp(),
|
|
||||||
});
|
|
||||||
|
|
||||||
let json_str = match serde_json::to_string(&json_obj) {
|
|
||||||
Ok(json) => json,
|
|
||||||
Err(e) => return create_response_vector(true, format!("Failed to serialize JSON: {}", e)),
|
|
||||||
};
|
|
||||||
|
|
||||||
create_response_vector(false, json_str)
|
|
||||||
},
|
|
||||||
Ok(None) => create_response_vector(true, "No signed packet found".to_string()),
|
|
||||||
Err(e) => create_response_vector(true, format!("Failed to resolve: {}", e)),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[uniffi::export]
|
|
||||||
pub fn sign_up(secret_key: String, homeserver: String) -> Vec<String> {
|
|
||||||
let runtime = TOKIO_RUNTIME.clone();
|
|
||||||
runtime.block_on(async {
|
|
||||||
let client = PUBKY_CLIENT.clone();
|
|
||||||
let keypair = match get_keypair_from_secret_key(&secret_key) {
|
|
||||||
Ok(keypair) => keypair,
|
|
||||||
Err(error) => return create_response_vector(true, error),
|
|
||||||
};
|
|
||||||
|
|
||||||
let homeserver_public_key = match PublicKey::try_from(homeserver) {
|
|
||||||
Ok(key) => key,
|
|
||||||
Err(error) => return create_response_vector(true, format!("Invalid homeserver public key: {}", error)),
|
|
||||||
};
|
|
||||||
|
|
||||||
match client.signup(&keypair, &homeserver_public_key).await {
|
|
||||||
Ok(session) => create_response_vector(false, session.pubky().to_string()),
|
|
||||||
Err(error) => create_response_vector(true, format!("signup failure: {}", error)),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[uniffi::export]
|
|
||||||
pub fn sign_in(secret_key: String) -> Vec<String> {
|
|
||||||
let runtime = TOKIO_RUNTIME.clone();
|
|
||||||
runtime.block_on(async {
|
|
||||||
let client = PUBKY_CLIENT.clone();
|
|
||||||
let keypair = match get_keypair_from_secret_key(&secret_key) {
|
|
||||||
Ok(keypair) => keypair,
|
|
||||||
Err(error) => return create_response_vector(true, error),
|
|
||||||
};
|
|
||||||
match client.signin(&keypair).await {
|
|
||||||
Ok(_) => create_response_vector(false, "Sign in success".to_string()),
|
|
||||||
Err(error) => {
|
|
||||||
create_response_vector(true, format!("Failed to sign in: {}", error))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[uniffi::export]
|
|
||||||
pub fn sign_out(secret_key: String) -> Vec<String> {
|
|
||||||
let runtime = TOKIO_RUNTIME.clone();
|
|
||||||
runtime.block_on(async {
|
|
||||||
let client = PUBKY_CLIENT.clone();
|
|
||||||
let keypair = match get_keypair_from_secret_key(&secret_key) {
|
|
||||||
Ok(keypair) => keypair,
|
|
||||||
Err(error) => return create_response_vector(true, error),
|
|
||||||
};
|
|
||||||
match client.signout(&keypair.public_key()).await {
|
|
||||||
Ok(_) => create_response_vector(false, "Sign out success".to_string()),
|
|
||||||
Err(error) => {
|
|
||||||
create_response_vector(true, format!("Failed to sign out: {}", error))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[uniffi::export]
|
|
||||||
pub fn put(url: String, content: String) -> Vec<String> {
|
|
||||||
let runtime = TOKIO_RUNTIME.clone();
|
|
||||||
runtime.block_on(async {
|
|
||||||
let client = PUBKY_CLIENT.clone();
|
|
||||||
let trimmed_url = url.trim_end_matches('/');
|
|
||||||
let parsed_url = match Url::parse(&trimmed_url) {
|
|
||||||
Ok(url) => url,
|
|
||||||
Err(_) => return create_response_vector(true, "Failed to parse URL".to_string()),
|
|
||||||
};
|
|
||||||
match client.put(parsed_url, &content.as_bytes()).await {
|
|
||||||
Ok(_) => create_response_vector(false, trimmed_url.to_string()),
|
|
||||||
Err(error) => {
|
|
||||||
create_response_vector(true, format!("Failed to put: {}", error))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[uniffi::export]
|
|
||||||
pub fn get(url: String) -> Vec<String> {
|
|
||||||
let runtime = TOKIO_RUNTIME.clone();
|
|
||||||
runtime.block_on(async {
|
|
||||||
let client = PUBKY_CLIENT.clone();
|
|
||||||
let trimmed_url = url.trim_end_matches('/');
|
|
||||||
let parsed_url = match Url::parse(&trimmed_url) {
|
|
||||||
Ok(url) => url,
|
|
||||||
Err(_) => return create_response_vector(true, "Failed to parse URL".to_string()),
|
|
||||||
};
|
|
||||||
let result: Option<Bytes> = match client.get(parsed_url).await {
|
|
||||||
Ok(res) => res,
|
|
||||||
Err(_) => return create_response_vector(true, "Request failed".to_string()),
|
|
||||||
};
|
|
||||||
let bytes = match result {
|
|
||||||
Some(bytes) => bytes,
|
|
||||||
None => return create_response_vector(true, "No data returned".to_string()),
|
|
||||||
};
|
|
||||||
let string = match str::from_utf8(&bytes) {
|
|
||||||
Ok(s) => s.to_string(),
|
|
||||||
Err(_) => return create_response_vector(true, "Invalid UTF-8 sequence".to_string()),
|
|
||||||
};
|
|
||||||
create_response_vector(false, string)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Resolve a signed packet from a public key
|
|
||||||
* @param public_key The public key to resolve
|
|
||||||
* @returns A vector with two elements: the first element is a boolean indicating success or failure,
|
|
||||||
* and the second element is the response data (either an error message or the resolved signed packet)
|
|
||||||
**/
|
|
||||||
#[uniffi::export]
|
|
||||||
pub fn resolve(public_key: String) -> Vec<String> {
|
|
||||||
let runtime = TOKIO_RUNTIME.clone();
|
|
||||||
runtime.block_on(async {
|
|
||||||
let public_key = match public_key.as_str().try_into() {
|
|
||||||
Ok(key) => key,
|
|
||||||
Err(e) => return create_response_vector(true, format!("Invalid zbase32 encoded key: {}", e)),
|
|
||||||
};
|
|
||||||
let client = PUBKY_CLIENT.clone();
|
|
||||||
|
|
||||||
match client.pkarr().resolve(&public_key).await {
|
|
||||||
Ok(Some(signed_packet)) => {
|
|
||||||
// Collect references to ResourceRecords from the signed packet's answers
|
|
||||||
let all_records: Vec<&ResourceRecord> = signed_packet.packet().answers.iter().collect();
|
|
||||||
// Convert each ResourceRecord to a JSON value, handling errors appropriately
|
|
||||||
let json_records: Vec<serde_json::Value> = all_records
|
|
||||||
.iter()
|
|
||||||
.filter_map(|record| {
|
|
||||||
match resource_record_to_json(record) {
|
|
||||||
Ok(json_value) => Some(json_value),
|
|
||||||
Err(e) => {
|
|
||||||
eprintln!("Error converting record to JSON: {}", e);
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let bytes = signed_packet.as_bytes();
|
|
||||||
let public_key = &bytes[..32];
|
|
||||||
let signature = &bytes[32..96];
|
|
||||||
let timestamp = signed_packet.timestamp();
|
|
||||||
let dns_packet = &bytes[104..];
|
|
||||||
let hex: String = signed_packet.encode_hex();
|
|
||||||
|
|
||||||
let json_obj = json!({
|
|
||||||
"signed_packet": hex,
|
|
||||||
"public_key": general_purpose::STANDARD.encode(public_key),
|
|
||||||
"signature": general_purpose::STANDARD.encode(signature),
|
|
||||||
"timestamp": timestamp,
|
|
||||||
"last_seen": signed_packet.last_seen(),
|
|
||||||
"dns_packet": general_purpose::STANDARD.encode(dns_packet),
|
|
||||||
"records": json_records
|
|
||||||
});
|
|
||||||
|
|
||||||
let json_str = serde_json::to_string(&json_obj)
|
|
||||||
.expect("Failed to convert JSON object to string");
|
|
||||||
|
|
||||||
create_response_vector(false, json_str)
|
|
||||||
},
|
|
||||||
Ok(None) => {
|
|
||||||
create_response_vector(true, "No signed packet found".to_string())
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
create_response_vector(true, format!("Failed to resolve: {}", e))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[uniffi::export]
|
|
||||||
pub fn publish(record_name: String, record_content: String, secret_key: String) -> Vec<String> {
|
|
||||||
let runtime = TOKIO_RUNTIME.clone();
|
|
||||||
runtime.block_on(async {
|
|
||||||
let client = PUBKY_CLIENT.clone();
|
|
||||||
|
|
||||||
let keypair = match get_keypair_from_secret_key(&secret_key) {
|
|
||||||
Ok(keypair) => keypair,
|
|
||||||
Err(error) => return create_response_vector(true, error),
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut packet = dns::Packet::new_reply(0);
|
|
||||||
|
|
||||||
let dns_name = match dns::Name::new(&record_name) {
|
|
||||||
Ok(name) => name,
|
|
||||||
Err(e) => return create_response_vector(true, format!("Failed to create DNS name: {}", e)),
|
|
||||||
};
|
|
||||||
|
|
||||||
let record_content_str: &str = record_content.as_str();
|
|
||||||
|
|
||||||
let txt_record = match record_content_str.try_into() {
|
|
||||||
Ok(value) => RData::TXT(value),
|
|
||||||
Err(e) => {
|
|
||||||
return create_response_vector(true, format!("Failed to convert string to TXT record: {}", e))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
packet.answers.push(dns::ResourceRecord::new(
|
|
||||||
dns_name,
|
|
||||||
dns::CLASS::IN,
|
|
||||||
30,
|
|
||||||
txt_record,
|
|
||||||
));
|
|
||||||
|
|
||||||
match SignedPacket::from_packet(&keypair, &packet) {
|
|
||||||
Ok(signed_packet) => {
|
|
||||||
match client.pkarr().publish(&signed_packet).await {
|
|
||||||
Ok(()) => {
|
|
||||||
create_response_vector(false, keypair.public_key().to_string())
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
create_response_vector(true, format!("Failed to publish: {}", e))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
create_response_vector(true, format!("Failed to create signed packet: {}", e))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
#[uniffi::export]
|
|
||||||
pub fn list(url: String) -> Vec<String> {
|
|
||||||
let runtime = TOKIO_RUNTIME.clone();
|
|
||||||
runtime.block_on(async {
|
|
||||||
let client = PUBKY_CLIENT.clone();
|
|
||||||
let trimmed_url = url.trim_end_matches('/');
|
|
||||||
let parsed_url = match Url::parse(&trimmed_url) {
|
|
||||||
Ok(url) => url,
|
|
||||||
Err(_) => return create_response_vector(true, "Failed to parse URL".to_string()),
|
|
||||||
};
|
|
||||||
let list_builder = match client.list(parsed_url) {
|
|
||||||
Ok(list) => list,
|
|
||||||
Err(error) => return create_response_vector(true, format!("Failed to list: {}", error)),
|
|
||||||
};
|
|
||||||
// Execute the non-Send part synchronously
|
|
||||||
let send_future = list_builder.send();
|
|
||||||
let send_res = match send_future.await {
|
|
||||||
Ok(res) => res,
|
|
||||||
Err(error) => return create_response_vector(true, format!("Failed to send list request: {}", error))
|
|
||||||
};
|
|
||||||
let json_string = match serde_json::to_string(&send_res) {
|
|
||||||
Ok(json) => json,
|
|
||||||
Err(error) => return create_response_vector(true, format!("Failed to serialize JSON: {}", error)),
|
|
||||||
};
|
|
||||||
create_response_vector(false, json_string)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[uniffi::export]
|
|
||||||
pub fn auth(url: String, secret_key: String) -> Vec<String> {
|
|
||||||
let runtime = TOKIO_RUNTIME.clone();
|
|
||||||
runtime.block_on(authorize(url, secret_key))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[uniffi::export]
|
|
||||||
pub fn parse_auth_url(url: String) -> Vec<String> {
|
|
||||||
let parsed_details = match parse_pubky_auth_url(&url) {
|
|
||||||
Ok(details) => details,
|
|
||||||
Err(error) => return create_response_vector(true, error),
|
|
||||||
};
|
|
||||||
match pubky_auth_details_to_json(&parsed_details) {
|
|
||||||
Ok(json) => create_response_vector(false, json),
|
|
||||||
Err(error) => create_response_vector(true, error),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[uniffi::export]
|
|
||||||
pub fn create_recovery_file(secret_key: String, passphrase: String,) -> Vec<String> {
|
|
||||||
if secret_key.is_empty() || passphrase.is_empty() {
|
|
||||||
return create_response_vector(true, "Secret key and passphrase must not be empty".to_string());
|
|
||||||
}
|
|
||||||
let keypair = match get_keypair_from_secret_key(&secret_key) {
|
|
||||||
Ok(keypair) => keypair,
|
|
||||||
Err(error) => return create_response_vector(true, error),
|
|
||||||
};
|
|
||||||
let recovery_file_bytes = match PubkyClient::create_recovery_file(&keypair, &passphrase) {
|
|
||||||
Ok(bytes) => bytes,
|
|
||||||
Err(_) => return create_response_vector(true, "Failed to create recovery file".to_string()),
|
|
||||||
};
|
|
||||||
let recovery_file = base64::encode(&recovery_file_bytes);
|
|
||||||
create_response_vector(false, recovery_file)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[uniffi::export]
|
|
||||||
pub fn decrypt_recovery_file(recovery_file: String, passphrase: String) -> Vec<String> {
|
|
||||||
if recovery_file.is_empty() || passphrase.is_empty() {
|
|
||||||
return create_response_vector(true, "Recovery file and passphrase must not be empty".to_string());
|
|
||||||
}
|
|
||||||
let recovery_file_bytes = match base64::decode(&recovery_file) {
|
|
||||||
Ok(bytes) => bytes,
|
|
||||||
Err(error) => return create_response_vector(true, format!("Failed to decode recovery file: {}", error)),
|
|
||||||
};
|
|
||||||
let keypair = match PubkyClient::decrypt_recovery_file(&recovery_file_bytes, &passphrase) {
|
|
||||||
Ok(keypair) => keypair,
|
|
||||||
Err(error) => return create_response_vector(true, "Failed to decrypt recovery file".to_string()),
|
|
||||||
};
|
|
||||||
let secret_key = get_secret_key_from_keypair(&keypair);
|
|
||||||
create_response_vector(false, secret_key)
|
|
||||||
}
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
use serde::Serialize;
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize)]
|
|
||||||
pub struct Capability {
|
|
||||||
pub path: String,
|
|
||||||
pub permission: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize)]
|
|
||||||
pub struct PubkyAuthDetails {
|
|
||||||
pub relay: String,
|
|
||||||
pub capabilities: Vec<Capability>,
|
|
||||||
pub secret: String,
|
|
||||||
}
|
|
||||||
@@ -1,244 +0,0 @@
|
|||||||
use std::error::Error;
|
|
||||||
use std::net::{Ipv4Addr, Ipv6Addr};
|
|
||||||
use serde_json::json;
|
|
||||||
use base64::{engine::general_purpose, Engine};
|
|
||||||
use pkarr::dns::rdata::RData;
|
|
||||||
use pkarr::dns::ResourceRecord;
|
|
||||||
|
|
||||||
pub fn create_response_vector(error: bool, data: String) -> Vec<String> {
|
|
||||||
if error {
|
|
||||||
vec!["error".to_string(), data]
|
|
||||||
} else {
|
|
||||||
vec!["success".to_string(), data]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn extract_rdata_for_json(record: &ResourceRecord) -> serde_json::Value {
|
|
||||||
match &record.rdata {
|
|
||||||
RData::TXT(txt) => {
|
|
||||||
let attributes = txt.attributes();
|
|
||||||
let strings: Vec<String> = attributes.into_iter()
|
|
||||||
.map(|(key, value)| {
|
|
||||||
match value {
|
|
||||||
Some(v) => format!("{}={}", key, v),
|
|
||||||
None => key,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
json!({
|
|
||||||
"type": "TXT",
|
|
||||||
"strings": strings
|
|
||||||
})
|
|
||||||
},
|
|
||||||
RData::A(a) => {
|
|
||||||
let ipv4 = Ipv4Addr::from(a.address);
|
|
||||||
json!({
|
|
||||||
"type": "A",
|
|
||||||
"address": ipv4.to_string()
|
|
||||||
})
|
|
||||||
},
|
|
||||||
RData::AAAA(aaaa) => {
|
|
||||||
let ipv6 = Ipv6Addr::from(aaaa.address);
|
|
||||||
json!({
|
|
||||||
"type": "AAAA",
|
|
||||||
"address": ipv6.to_string()
|
|
||||||
})
|
|
||||||
},
|
|
||||||
RData::AFSDB(afsdb) => {
|
|
||||||
json!({
|
|
||||||
"type": "AFSDB",
|
|
||||||
"subtype": afsdb.subtype,
|
|
||||||
"hostname": afsdb.hostname.to_string()
|
|
||||||
})
|
|
||||||
},
|
|
||||||
RData::CAA(caa) => {
|
|
||||||
json!({
|
|
||||||
"type": "CAA",
|
|
||||||
"flag": caa.flag,
|
|
||||||
"tag": caa.tag.to_string(),
|
|
||||||
"value": caa.value.to_string()
|
|
||||||
})
|
|
||||||
},
|
|
||||||
RData::HINFO(hinfo) => {
|
|
||||||
json!({
|
|
||||||
"type": "HINFO",
|
|
||||||
"cpu": hinfo.cpu.to_string(),
|
|
||||||
"os": hinfo.os.to_string()
|
|
||||||
})
|
|
||||||
},
|
|
||||||
RData::ISDN(isdn) => {
|
|
||||||
json!({
|
|
||||||
"type": "ISDN",
|
|
||||||
"address": isdn.address.to_string(),
|
|
||||||
"sa": isdn.sa.to_string()
|
|
||||||
})
|
|
||||||
},
|
|
||||||
RData::LOC(loc) => {
|
|
||||||
json!({
|
|
||||||
"type": "LOC",
|
|
||||||
"version": loc.version,
|
|
||||||
"size": loc.size,
|
|
||||||
"horizontal_precision": loc.horizontal_precision,
|
|
||||||
"vertical_precision": loc.vertical_precision,
|
|
||||||
"latitude": loc.latitude,
|
|
||||||
"longitude": loc.longitude,
|
|
||||||
"altitude": loc.altitude
|
|
||||||
})
|
|
||||||
},
|
|
||||||
RData::MINFO(minfo) => {
|
|
||||||
json!({
|
|
||||||
"type": "MINFO",
|
|
||||||
"rmailbox": minfo.rmailbox.to_string(),
|
|
||||||
"emailbox": minfo.emailbox.to_string()
|
|
||||||
})
|
|
||||||
},
|
|
||||||
RData::MX(mx) => {
|
|
||||||
json!({
|
|
||||||
"type": "MX",
|
|
||||||
"preference": mx.preference,
|
|
||||||
"exchange": mx.exchange.to_string()
|
|
||||||
})
|
|
||||||
},
|
|
||||||
RData::NAPTR(naptr) => {
|
|
||||||
json!({
|
|
||||||
"type": "NAPTR",
|
|
||||||
"order": naptr.order,
|
|
||||||
"preference": naptr.preference,
|
|
||||||
"flags": naptr.flags.to_string(),
|
|
||||||
"services": naptr.services.to_string(),
|
|
||||||
"regexp": naptr.regexp.to_string(),
|
|
||||||
"replacement": naptr.replacement.to_string()
|
|
||||||
})
|
|
||||||
},
|
|
||||||
RData::NULL(_, null_record) => {
|
|
||||||
json!({
|
|
||||||
"type": "NULL",
|
|
||||||
"data": base64::encode(null_record.get_data())
|
|
||||||
})
|
|
||||||
},
|
|
||||||
RData::OPT(opt) => {
|
|
||||||
json!({
|
|
||||||
"type": "OPT",
|
|
||||||
"udp_packet_size": opt.udp_packet_size,
|
|
||||||
"version": opt.version,
|
|
||||||
"opt_codes": opt.opt_codes.iter().map(|code| {
|
|
||||||
json!({
|
|
||||||
"code": code.code,
|
|
||||||
"data": base64::encode(&code.data)
|
|
||||||
})
|
|
||||||
}).collect::<Vec<_>>()
|
|
||||||
})
|
|
||||||
},
|
|
||||||
RData::RouteThrough(rt) => {
|
|
||||||
json!({
|
|
||||||
"type": "RT",
|
|
||||||
"preference": rt.preference,
|
|
||||||
"intermediate_host": rt.intermediate_host.to_string()
|
|
||||||
})
|
|
||||||
},
|
|
||||||
RData::RP(rp) => {
|
|
||||||
json!({
|
|
||||||
"type": "RP",
|
|
||||||
"mbox": rp.mbox.to_string(),
|
|
||||||
"txt": rp.txt.to_string()
|
|
||||||
})
|
|
||||||
},
|
|
||||||
RData::SOA(soa) => {
|
|
||||||
json!({
|
|
||||||
"type": "SOA",
|
|
||||||
"mname": soa.mname.to_string(),
|
|
||||||
"rname": soa.rname.to_string(),
|
|
||||||
"serial": soa.serial,
|
|
||||||
"refresh": soa.refresh,
|
|
||||||
"retry": soa.retry,
|
|
||||||
"expire": soa.expire,
|
|
||||||
"minimum": soa.minimum
|
|
||||||
})
|
|
||||||
},
|
|
||||||
RData::SRV(srv) => {
|
|
||||||
json!({
|
|
||||||
"type": "SRV",
|
|
||||||
"priority": srv.priority,
|
|
||||||
"weight": srv.weight,
|
|
||||||
"port": srv.port,
|
|
||||||
"target": srv.target.to_string()
|
|
||||||
})
|
|
||||||
},
|
|
||||||
RData::SVCB(svcb) => {
|
|
||||||
let mut params = serde_json::Map::new();
|
|
||||||
for (key, value) in svcb.iter_params() {
|
|
||||||
params.insert(key.to_string(), json!(base64::encode(value)));
|
|
||||||
}
|
|
||||||
json!({
|
|
||||||
"type": "SVCB",
|
|
||||||
"priority": svcb.priority,
|
|
||||||
"target": svcb.target.to_string(),
|
|
||||||
"params": params
|
|
||||||
})
|
|
||||||
},
|
|
||||||
RData::WKS(wks) => {
|
|
||||||
json!({
|
|
||||||
"type": "WKS",
|
|
||||||
"address": Ipv4Addr::from(wks.address).to_string(),
|
|
||||||
"protocol": wks.protocol,
|
|
||||||
"bit_map": base64::encode(&wks.bit_map)
|
|
||||||
})
|
|
||||||
},
|
|
||||||
|
|
||||||
_ => json!({
|
|
||||||
"type": format!("{:?}", record.rdata.type_code()),
|
|
||||||
"data": "Unhandled record type"
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn resource_record_to_json(record: &ResourceRecord) -> Result<serde_json::Value, Box<dyn Error>> {
|
|
||||||
Ok(json!({
|
|
||||||
"name": record.name.to_string(),
|
|
||||||
"class": format!("{:?}", record.class),
|
|
||||||
"ttl": record.ttl,
|
|
||||||
"rdata": extract_rdata_for_json(record),
|
|
||||||
"cache_flush": record.cache_flush
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn construct_pubky_url(public_key: &str, domain: &str, path_segments: &[&str]) -> String {
|
|
||||||
// Construct the base URL
|
|
||||||
let mut url = format!("pubky://{}/pub/{}", public_key, domain);
|
|
||||||
|
|
||||||
// Append each path segment, separated by '/'
|
|
||||||
for segment in path_segments {
|
|
||||||
if !segment.is_empty() {
|
|
||||||
url.push('/');
|
|
||||||
url.push_str(segment);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove trailing slash if present
|
|
||||||
if url.ends_with('/') {
|
|
||||||
url.pop();
|
|
||||||
}
|
|
||||||
|
|
||||||
url
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Extract everything up to the first instance of "pub/" in a Pubky URL
|
|
||||||
*
|
|
||||||
* # Arguments
|
|
||||||
* * `full_url` - The full URL
|
|
||||||
*
|
|
||||||
* # Returns
|
|
||||||
* * `Some(String)` - The "pub/" part of the URL
|
|
||||||
* * `None` - If "pub/" is not found in the URL
|
|
||||||
*/
|
|
||||||
pub fn get_list_url(full_url: &str) -> Option<String> {
|
|
||||||
if let Some(index) = full_url.find("pub/") {
|
|
||||||
let end_index = index + "pub/".len();
|
|
||||||
let substring = &full_url[..end_index];
|
|
||||||
Some(substring.to_string())
|
|
||||||
} else {
|
|
||||||
// "pub/" not found in the string
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,321 +0,0 @@
|
|||||||
use std::string::ToString;
|
|
||||||
use std::sync::Arc;
|
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use pkarr::{dns, Keypair, PublicKey, SignedPacket};
|
|
||||||
use pkarr::bytes::Bytes;
|
|
||||||
use pkarr::dns::rdata::RData;
|
|
||||||
use pubky::PubkyClient;
|
|
||||||
use url::Url;
|
|
||||||
use std::str;
|
|
||||||
|
|
||||||
static PUBKY_CLIENT: Lazy<Arc<PubkyClient>> = Lazy::new(|| {
|
|
||||||
// let custom_testnet = Testnet {
|
|
||||||
// bootstrap: vec!["http://localhost:6287".to_string()],
|
|
||||||
// nodes: vec![],
|
|
||||||
// };
|
|
||||||
//
|
|
||||||
// let client = PubkyClient::builder()
|
|
||||||
// .testnet(&custom_testnet)
|
|
||||||
// .build();
|
|
||||||
let client = PubkyClient::testnet();
|
|
||||||
|
|
||||||
Arc::new(client)
|
|
||||||
});
|
|
||||||
|
|
||||||
// static PUBKY_CLIENT: Lazy<Arc<PubkyClient>> = Lazy::new(|| {
|
|
||||||
// let custom_bootstrap = vec!["localhost:64630".to_string()];
|
|
||||||
//
|
|
||||||
// let mut pkarr_settings = Settings::default();
|
|
||||||
// pkarr_settings.dht.bootstrap = custom_bootstrap.clone().into();
|
|
||||||
// pkarr_settings.resolvers = custom_bootstrap
|
|
||||||
// .iter()
|
|
||||||
// .flat_map(|resolver| resolver.to_socket_addrs())
|
|
||||||
// .flatten()
|
|
||||||
// .collect::<Vec<_>>()
|
|
||||||
// .into();
|
|
||||||
//
|
|
||||||
// let client = PubkyClient::builder()
|
|
||||||
// .pkarr_settings(pkarr_settings)
|
|
||||||
// .build();
|
|
||||||
//
|
|
||||||
// Arc::new(client)
|
|
||||||
// });
|
|
||||||
|
|
||||||
const HOMESERVER: &str = "pubky://8pinxxgqs41n4aididenw5apqp1urfmzdztr8jt4abrkdn435ewo";
|
|
||||||
const SECRET_KEY: &str = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855";
|
|
||||||
|
|
||||||
#[tokio::main]
|
|
||||||
async fn main() {
|
|
||||||
let sign_in_res = signin_or_signup(SECRET_KEY, HOMESERVER).await;
|
|
||||||
println!("Sign In/Up Response: {:?}", sign_in_res);
|
|
||||||
// let res = publish("recordname".to_string(), "recordcontent".to_string(), "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855".to_string()).await;
|
|
||||||
// // println!("{:?}", res);
|
|
||||||
let public_key = &sign_in_res[1];
|
|
||||||
let url = construct_pubky_url(public_key, "mydomain.com", &[]);
|
|
||||||
let put_res = put(&url, &"newcontent".to_string()).await;
|
|
||||||
println!("Put Response: {:?}", put_res);
|
|
||||||
let get_res = get(&url).await;
|
|
||||||
println!("Get Response: {:?}", get_res);
|
|
||||||
let list_res = list(url).await;
|
|
||||||
println!("List Response: {:?}", list_res);
|
|
||||||
let create_response = create_recovery_file(&SECRET_KEY, "password");
|
|
||||||
println!("Create Response: {:?}", create_response);
|
|
||||||
let recovery_file = create_response[1].clone();
|
|
||||||
let decrypt_response = decrypt_recovery_file(&recovery_file, "password");
|
|
||||||
println!("Decrypt Response: {:?}", decrypt_response);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn create_recovery_file(secret_key: &str, passphrase: &str,) -> Vec<String> {
|
|
||||||
if secret_key.is_empty() || passphrase.is_empty() {
|
|
||||||
return create_response_vector(true, "Secret key and passphrase must not be empty".to_string());
|
|
||||||
}
|
|
||||||
let keypair = match get_keypair_from_secret_key(&secret_key) {
|
|
||||||
Ok(keypair) => keypair,
|
|
||||||
Err(error) => return create_response_vector(true, error),
|
|
||||||
};
|
|
||||||
let recovery_file_bytes = match PubkyClient::create_recovery_file(&keypair, &passphrase) {
|
|
||||||
Ok(bytes) => bytes,
|
|
||||||
Err(_) => return create_response_vector(true, "Failed to create recovery file".to_string()),
|
|
||||||
};
|
|
||||||
let recovery_file = base64::encode(&recovery_file_bytes);
|
|
||||||
create_response_vector(false, recovery_file)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn decrypt_recovery_file(recovery_file: &str, passphrase: &str) -> Vec<String> {
|
|
||||||
if recovery_file.is_empty() || passphrase.is_empty() {
|
|
||||||
return create_response_vector(true, "Recovery file and passphrase must not be empty".to_string());
|
|
||||||
}
|
|
||||||
let recovery_file_bytes = match base64::decode(&recovery_file) {
|
|
||||||
Ok(bytes) => bytes,
|
|
||||||
Err(error) => return create_response_vector(true, format!("Failed to decode recovery file: {}", error)),
|
|
||||||
};
|
|
||||||
let keypair = match PubkyClient::decrypt_recovery_file(&recovery_file_bytes, &passphrase) {
|
|
||||||
Ok(keypair) => keypair,
|
|
||||||
Err(error) => return create_response_vector(true, "Failed to decrypt recovery file".to_string()),
|
|
||||||
};
|
|
||||||
let secret_key = get_secret_key_from_keypair(&keypair);
|
|
||||||
create_response_vector(false, secret_key)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
pub async fn signin_or_signup(secret_key: &str, homeserver: &str) -> Vec<String> {
|
|
||||||
let sign_in_res = sign_in(secret_key).await;
|
|
||||||
if sign_in_res[0] == "success" {
|
|
||||||
return sign_in_res;
|
|
||||||
}
|
|
||||||
let sign_up_res = sign_up(secret_key, homeserver).await;
|
|
||||||
sign_up_res
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn sign_up(secret_key: &str, homeserver: &str) -> Vec<String> {
|
|
||||||
let client = PUBKY_CLIENT.clone();
|
|
||||||
let keypair = match get_keypair_from_secret_key(&secret_key) {
|
|
||||||
Ok(keypair) => keypair,
|
|
||||||
Err(error) => return create_response_vector(true, error),
|
|
||||||
};
|
|
||||||
|
|
||||||
let homeserver_public_key = match PublicKey::try_from(homeserver) {
|
|
||||||
Ok(key) => key,
|
|
||||||
Err(error) => return create_response_vector(true, format!("Invalid homeserver public key: {}", error)),
|
|
||||||
};
|
|
||||||
|
|
||||||
match client.signup(&keypair, &homeserver_public_key).await {
|
|
||||||
Ok(session) => create_response_vector(false, session.pubky().to_string()),
|
|
||||||
Err(error) => create_response_vector(true, format!("signup failure: {}", error)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn sign_in(secret_key: &str) -> Vec<String> {
|
|
||||||
let client = PUBKY_CLIENT.clone();
|
|
||||||
let keypair = match get_keypair_from_secret_key(&secret_key) {
|
|
||||||
Ok(keypair) => keypair,
|
|
||||||
Err(error) => return create_response_vector(true, error),
|
|
||||||
};
|
|
||||||
match client.signin(&keypair).await {
|
|
||||||
Ok(session) => {
|
|
||||||
create_response_vector(false, session.pubky().to_string())
|
|
||||||
},
|
|
||||||
Err(error) => {
|
|
||||||
create_response_vector(true, format!("Failed to sign in: {}", error))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn publish(record_name: String, record_content: String, secret_key: String) -> Vec<String> {
|
|
||||||
let client = PUBKY_CLIENT.clone();
|
|
||||||
|
|
||||||
let keypair = match get_keypair_from_secret_key(&secret_key) {
|
|
||||||
Ok(keypair) => keypair,
|
|
||||||
Err(error) => return create_response_vector(true, error),
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut packet = dns::Packet::new_reply(0);
|
|
||||||
|
|
||||||
let dns_name = match dns::Name::new(&record_name) {
|
|
||||||
Ok(name) => name,
|
|
||||||
Err(e) => return create_response_vector(true, format!("Failed to create DNS name: {}", e)),
|
|
||||||
};
|
|
||||||
|
|
||||||
let record_content_str: &str = record_content.as_str();
|
|
||||||
|
|
||||||
let txt_record = match record_content_str.try_into() {
|
|
||||||
Ok(value) => RData::TXT(value),
|
|
||||||
Err(e) => {
|
|
||||||
return create_response_vector(true, format!("Failed to convert string to TXT record: {}", e))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
packet.answers.push(dns::ResourceRecord::new(
|
|
||||||
dns_name,
|
|
||||||
dns::CLASS::IN,
|
|
||||||
30,
|
|
||||||
txt_record,
|
|
||||||
));
|
|
||||||
|
|
||||||
match SignedPacket::from_packet(&keypair, &packet) {
|
|
||||||
Ok(signed_packet) => {
|
|
||||||
match client.pkarr().publish(&signed_packet).await {
|
|
||||||
Ok(()) => {
|
|
||||||
create_response_vector(false, keypair.public_key().to_string())
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
create_response_vector(true, format!("Failed to publish: {}", e))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
create_response_vector(true, format!("Failed to create signed packet: {}", e))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_keypair_from_secret_key(secret_key: &str) -> Result<Keypair, String> {
|
|
||||||
let bytes = match hex::decode(&secret_key) {
|
|
||||||
Ok(bytes) => bytes,
|
|
||||||
Err(_) => return Err("Failed to decode secret key".to_string())
|
|
||||||
};
|
|
||||||
|
|
||||||
let secret_key_bytes: [u8; 32] = match bytes.try_into() {
|
|
||||||
Ok(secret_key) => secret_key,
|
|
||||||
Err(_) => {
|
|
||||||
return Err("Failed to convert secret key to 32-byte array".to_string());
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(Keypair::from_secret_key(&secret_key_bytes))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn create_response_vector(error: bool, data: String) -> Vec<String> {
|
|
||||||
if error {
|
|
||||||
vec!["error".to_string(), data]
|
|
||||||
} else {
|
|
||||||
vec!["success".to_string(), data]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn put(url: &String, content: &String) -> Vec<String> {
|
|
||||||
let client = PUBKY_CLIENT.clone();
|
|
||||||
let trimmed_url = url.trim_end_matches('/');
|
|
||||||
let parsed_url = match Url::parse(&trimmed_url) {
|
|
||||||
Ok(url) => url,
|
|
||||||
Err(_) => return create_response_vector(true, "Failed to parse URL".to_string()),
|
|
||||||
};
|
|
||||||
match client.put(parsed_url, &content.as_bytes()).await {
|
|
||||||
Ok(_) => create_response_vector(false, trimmed_url.to_string()),
|
|
||||||
Err(error) => {
|
|
||||||
create_response_vector(true, format!("Failed to put: {}", error))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get(url: &String) -> Vec<String> {
|
|
||||||
let client = PUBKY_CLIENT.clone();
|
|
||||||
let trimmed_url = url.trim_end_matches('/');
|
|
||||||
|
|
||||||
// Parse the URL and return error early if it fails
|
|
||||||
let parsed_url = match Url::parse(&trimmed_url) {
|
|
||||||
Ok(url) => url,
|
|
||||||
Err(_) => return create_response_vector(true, "Failed to parse URL".to_string()),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Perform the request and return error early if no data is returned
|
|
||||||
let result: Option<Bytes> = match client.get(parsed_url).await {
|
|
||||||
Ok(res) => res,
|
|
||||||
Err(_) => return create_response_vector(true, "Request failed".to_string()),
|
|
||||||
};
|
|
||||||
|
|
||||||
// If there are bytes, attempt to convert to UTF-8
|
|
||||||
let bytes = match result {
|
|
||||||
Some(bytes) => bytes,
|
|
||||||
None => return create_response_vector(true, "No data returned".to_string()),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Try to convert bytes to string and return error if it fails
|
|
||||||
let string = match str::from_utf8(&bytes) {
|
|
||||||
Ok(s) => s.to_string(),
|
|
||||||
Err(_) => return create_response_vector(true, "Invalid UTF-8 sequence".to_string()),
|
|
||||||
};
|
|
||||||
|
|
||||||
// If everything is successful, return the formatted response
|
|
||||||
create_response_vector(false, string)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn list(url: String) -> Vec<String> {
|
|
||||||
let client = PUBKY_CLIENT.clone();
|
|
||||||
let trimmed_url = url.trim_end_matches('/');
|
|
||||||
let parsed_url = match Url::parse(&trimmed_url) {
|
|
||||||
Ok(url) => url,
|
|
||||||
Err(_) => return create_response_vector(true, "Failed to parse URL".to_string()),
|
|
||||||
};
|
|
||||||
let list_builder = match client.list(parsed_url) {
|
|
||||||
Ok(list) => list,
|
|
||||||
Err(error) => return create_response_vector(true, format!("Failed to list: {}", error)),
|
|
||||||
};
|
|
||||||
// Execute the non-Send part synchronously
|
|
||||||
let send_future = list_builder.send();
|
|
||||||
let send_res = match send_future.await {
|
|
||||||
Ok(res) => res,
|
|
||||||
Err(error) => return create_response_vector(true, format!("Failed to send list request: {}", error))
|
|
||||||
};
|
|
||||||
let json_string = match serde_json::to_string(&send_res) {
|
|
||||||
Ok(json) => json,
|
|
||||||
Err(error) => return create_response_vector(true, format!("Failed to serialize JSON: {}", error)),
|
|
||||||
};
|
|
||||||
create_response_vector(false, json_string)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn construct_pubky_url(public_key: &str, domain: &str, path_segments: &[&str]) -> String {
|
|
||||||
// Construct the base URL
|
|
||||||
let mut url = format!("pubky://{}/pub/{}", public_key, domain);
|
|
||||||
|
|
||||||
// Append each path segment, separated by '/'
|
|
||||||
for segment in path_segments {
|
|
||||||
if !segment.is_empty() {
|
|
||||||
url.push('/');
|
|
||||||
url.push_str(segment);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove trailing slash if present
|
|
||||||
if url.ends_with('/') {
|
|
||||||
url.pop();
|
|
||||||
}
|
|
||||||
|
|
||||||
url
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_list_url(full_url: &str) -> Option<String> {
|
|
||||||
if let Some(index) = full_url.find("pub/") {
|
|
||||||
// Add length of "pub/" to include it in the substring
|
|
||||||
let end_index = index + "pub/".len();
|
|
||||||
let substring = &full_url[..end_index];
|
|
||||||
Some(substring.to_string())
|
|
||||||
} else {
|
|
||||||
// "pub/" not found in the string
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_secret_key_from_keypair(keypair: &Keypair) -> String {
|
|
||||||
hex::encode(keypair.secret_key())
|
|
||||||
}
|
|
||||||
44
setup-local-android-bindings.js
Normal file
44
setup-local-android-bindings.js
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
const fs = require('fs').promises;
|
||||||
|
const path = require('path');
|
||||||
|
|
||||||
|
const ktPath = 'rust/bindings/android/pubkymobile.kt';
|
||||||
|
const ktDestinationPath = 'android/src/main/java/uniffi/pubkymobile/';
|
||||||
|
const jniPath = 'rust/bindings/android/jniLibs';
|
||||||
|
const jniDestinationPath = 'android/src/main/jniLibs/';
|
||||||
|
|
||||||
|
async function runSetup() {
|
||||||
|
try {
|
||||||
|
console.log('Removing existing files...');
|
||||||
|
// Remove destination directories if they exist
|
||||||
|
await Promise.all([
|
||||||
|
fs.rm(ktDestinationPath, { recursive: true, force: true }),
|
||||||
|
fs.rm(jniDestinationPath, { recursive: true, force: true }),
|
||||||
|
]);
|
||||||
|
|
||||||
|
console.log('Creating directories...');
|
||||||
|
// Create destination directories if they don't exist
|
||||||
|
await Promise.all([
|
||||||
|
fs.mkdir(ktDestinationPath, { recursive: true }),
|
||||||
|
fs.mkdir(jniDestinationPath, { recursive: true }),
|
||||||
|
]);
|
||||||
|
|
||||||
|
console.log('Copying Kotlin file...');
|
||||||
|
// Copy Kotlin file to destination
|
||||||
|
const ktTargetPath = path.join(ktDestinationPath, 'pubkymobile.kt');
|
||||||
|
await fs.copyFile(ktPath, ktTargetPath);
|
||||||
|
|
||||||
|
console.log('Copying JNI libraries...');
|
||||||
|
// Copy JNI libraries directory
|
||||||
|
await fs.cp(jniPath, jniDestinationPath, { recursive: true });
|
||||||
|
|
||||||
|
console.log('Android files copied successfully!');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error during setup:', error);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
runSetup().catch((error) => {
|
||||||
|
console.error('Unhandled error:', error);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
49
setup-local-ios-bindings.js
Normal file
49
setup-local-ios-bindings.js
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
const fs = require('fs').promises;
|
||||||
|
const path = require('path');
|
||||||
|
|
||||||
|
const frameworkPath = 'rust/bindings/ios/PubkyMobile.xcframework';
|
||||||
|
const frameworkDestinationPath = 'ios/Frameworks';
|
||||||
|
const swiftFilePath = 'rust/bindings/ios/pubkymobile.swift';
|
||||||
|
const swiftDestinationPath = 'ios/';
|
||||||
|
|
||||||
|
async function runSetup() {
|
||||||
|
try {
|
||||||
|
console.log('Removing existing files...');
|
||||||
|
// Remove destination directories if they exist
|
||||||
|
await Promise.all([
|
||||||
|
fs.rm(frameworkDestinationPath, { recursive: true, force: true }),
|
||||||
|
fs.rm('ios/pubkymobile.swift', { recursive: true, force: true }),
|
||||||
|
]);
|
||||||
|
|
||||||
|
console.log('Creating directories...');
|
||||||
|
// Create destination directories if they don't exist
|
||||||
|
await Promise.all([
|
||||||
|
fs.mkdir(frameworkDestinationPath, { recursive: true }),
|
||||||
|
fs.mkdir(swiftDestinationPath, { recursive: true }),
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Copy framework to destination
|
||||||
|
const frameworkTargetPath = path.join(
|
||||||
|
frameworkDestinationPath,
|
||||||
|
path.basename(frameworkPath)
|
||||||
|
);
|
||||||
|
await fs.cp(frameworkPath, frameworkTargetPath, { recursive: true });
|
||||||
|
|
||||||
|
// Copy Swift file to destination
|
||||||
|
const swiftTargetPath = path.join(
|
||||||
|
swiftDestinationPath,
|
||||||
|
path.basename(swiftFilePath)
|
||||||
|
);
|
||||||
|
await fs.copyFile(swiftFilePath, swiftTargetPath);
|
||||||
|
|
||||||
|
console.log('Framework and Swift file copied successfully!');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error during setup:', error);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
runSetup().catch((error) => {
|
||||||
|
console.error('Unhandled error:', error);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
82
setup-remote-android-bindings.js
Normal file
82
setup-remote-android-bindings.js
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
const fs = require('fs').promises;
|
||||||
|
const path = require('path');
|
||||||
|
const simpleGit = require('simple-git');
|
||||||
|
|
||||||
|
// Configuration
|
||||||
|
const repoOwner = 'pubky';
|
||||||
|
const repoName = 'pubky-core-mobile-sdk';
|
||||||
|
const branch = 'main';
|
||||||
|
const ktPath = 'bindings/android/pubkymobile.kt';
|
||||||
|
const ktDestinationPath = 'android/src/main/java/uniffi/pubkymobile/';
|
||||||
|
const jniPath = 'bindings/android/jniLibs';
|
||||||
|
const jniDestinationPath = 'android/src/main/jniLibs/';
|
||||||
|
const tempDir = 'temp';
|
||||||
|
|
||||||
|
async function runSetup() {
|
||||||
|
try {
|
||||||
|
console.log('Removing existing files...');
|
||||||
|
// Remove destination directories if they exist & Clean up any lingering temporary directory
|
||||||
|
await Promise.all([
|
||||||
|
fs.rm(ktDestinationPath, { recursive: true, force: true }),
|
||||||
|
fs.rm(jniDestinationPath, { recursive: true, force: true }),
|
||||||
|
fs.rm(tempDir, { recursive: true, force: true }),
|
||||||
|
]);
|
||||||
|
|
||||||
|
console.log('Creating directories...');
|
||||||
|
// Create destination directories if they don't exist
|
||||||
|
await Promise.all([
|
||||||
|
fs.mkdir(ktDestinationPath, { recursive: true }),
|
||||||
|
fs.mkdir(jniDestinationPath, { recursive: true }),
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Initialize Git
|
||||||
|
const git = simpleGit();
|
||||||
|
|
||||||
|
console.log('Cloning repository...');
|
||||||
|
// Clone the repository sparsely
|
||||||
|
await git.clone(
|
||||||
|
`https://github.com/${repoOwner}/${repoName}.git`,
|
||||||
|
tempDir,
|
||||||
|
['--depth', '1', '--filter=blob:none', '--sparse', `--branch=${branch}`]
|
||||||
|
);
|
||||||
|
|
||||||
|
// Change directory to the cloned repository
|
||||||
|
const tempGit = simpleGit(tempDir);
|
||||||
|
|
||||||
|
console.log('Setting up sparse checkout...');
|
||||||
|
// Set sparse-checkout to include only the required directory
|
||||||
|
await tempGit.raw(['sparse-checkout', 'set', 'bindings/android']);
|
||||||
|
|
||||||
|
console.log('Copying Kotlin file...');
|
||||||
|
// Copy Kotlin file to destination
|
||||||
|
const ktSourcePath = path.join(tempDir, ktPath);
|
||||||
|
const ktTargetPath = path.join(ktDestinationPath, 'pubkymobile.kt');
|
||||||
|
await fs.copyFile(ktSourcePath, ktTargetPath);
|
||||||
|
|
||||||
|
console.log('Copying JNI libraries...');
|
||||||
|
// Copy JNI libraries directory
|
||||||
|
const jniSourcePath = path.join(tempDir, jniPath);
|
||||||
|
const jniTargetPath = jniDestinationPath;
|
||||||
|
await fs.cp(jniSourcePath, jniTargetPath, { recursive: true });
|
||||||
|
|
||||||
|
console.log('Cleaning up...');
|
||||||
|
// Clean up temporary directory
|
||||||
|
await fs.rm(tempDir, { recursive: true, force: true });
|
||||||
|
|
||||||
|
console.log('Android files downloaded and copied successfully!');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error during setup:', error);
|
||||||
|
// Try to clean up temp directory if it exists
|
||||||
|
try {
|
||||||
|
await fs.rm(tempDir, { recursive: true, force: true });
|
||||||
|
} catch (cleanupError) {
|
||||||
|
console.error('Failed to clean up temporary directory:', cleanupError);
|
||||||
|
}
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
runSetup().catch((error) => {
|
||||||
|
console.error('Unhandled error:', error);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
83
setup-remote-ios-bindings.js
Normal file
83
setup-remote-ios-bindings.js
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
const fs = require('fs').promises;
|
||||||
|
const path = require('path');
|
||||||
|
const simpleGit = require('simple-git');
|
||||||
|
|
||||||
|
// Configuration
|
||||||
|
const repoOwner = 'pubky';
|
||||||
|
const repoName = 'pubky-core-mobile-sdk';
|
||||||
|
const branch = 'main';
|
||||||
|
const frameworkPath = 'bindings/ios/PubkyMobile.xcframework';
|
||||||
|
const frameworkDestinationPath = 'ios/Frameworks';
|
||||||
|
const swiftFilePath = 'bindings/ios/pubkymobile.swift';
|
||||||
|
const swiftDestinationPath = 'ios/';
|
||||||
|
const tempDir = 'temp';
|
||||||
|
|
||||||
|
async function runSetup() {
|
||||||
|
try {
|
||||||
|
console.log('Removing existing files...');
|
||||||
|
// Remove destination directories if they exist & Clean up any lingering temporary directory
|
||||||
|
await Promise.all([
|
||||||
|
fs.rm(frameworkDestinationPath, { recursive: true, force: true }),
|
||||||
|
fs.rm('ios/pubkymobile.swift', { recursive: true, force: true }),
|
||||||
|
fs.rm(tempDir, { recursive: true, force: true }),
|
||||||
|
]);
|
||||||
|
|
||||||
|
console.log('Creating directories...');
|
||||||
|
// Create destination directories if they don't exist
|
||||||
|
await Promise.all([
|
||||||
|
fs.mkdir(frameworkDestinationPath, { recursive: true }),
|
||||||
|
fs.mkdir(swiftDestinationPath, { recursive: true }),
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Initialize Git
|
||||||
|
const git = simpleGit();
|
||||||
|
|
||||||
|
// Clone the repository sparsely
|
||||||
|
await git.clone(
|
||||||
|
`https://github.com/${repoOwner}/${repoName}.git`,
|
||||||
|
tempDir,
|
||||||
|
['--depth', '1', '--filter=blob:none', '--sparse', `--branch=${branch}`]
|
||||||
|
);
|
||||||
|
|
||||||
|
// Change directory to the cloned repository
|
||||||
|
const tempGit = simpleGit(tempDir);
|
||||||
|
|
||||||
|
// Set sparse-checkout to include only the required directory
|
||||||
|
await tempGit.raw(['sparse-checkout', 'set', 'bindings/ios']);
|
||||||
|
|
||||||
|
// Copy framework to destination
|
||||||
|
const frameworkSourcePath = path.join(tempDir, frameworkPath);
|
||||||
|
const frameworkTargetPath = path.join(
|
||||||
|
frameworkDestinationPath,
|
||||||
|
path.basename(frameworkPath)
|
||||||
|
);
|
||||||
|
await fs.cp(frameworkSourcePath, frameworkTargetPath, { recursive: true });
|
||||||
|
|
||||||
|
// Copy Swift file to destination
|
||||||
|
const swiftSourcePath = path.join(tempDir, swiftFilePath);
|
||||||
|
const swiftTargetPath = path.join(
|
||||||
|
swiftDestinationPath,
|
||||||
|
path.basename(swiftFilePath)
|
||||||
|
);
|
||||||
|
await fs.copyFile(swiftSourcePath, swiftTargetPath);
|
||||||
|
|
||||||
|
// Clean up temporary directory
|
||||||
|
await fs.rm(tempDir, { recursive: true, force: true });
|
||||||
|
|
||||||
|
console.log('Framework and Swift file downloaded and copied successfully!');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error during setup:', error);
|
||||||
|
// Try to clean up temp directory if it exists
|
||||||
|
try {
|
||||||
|
await fs.rm(tempDir, { recursive: true, force: true });
|
||||||
|
} catch (cleanupError) {
|
||||||
|
console.error('Failed to clean up temporary directory:', cleanupError);
|
||||||
|
}
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
runSetup().catch((error) => {
|
||||||
|
console.error('Unhandled error:', error);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
62
setup-rust.js
Normal file
62
setup-rust.js
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
const simpleGit = require('simple-git');
|
||||||
|
const fs = require('fs').promises;
|
||||||
|
const path = require('path');
|
||||||
|
|
||||||
|
const rustDir = 'rust';
|
||||||
|
const repoOwner = 'pubky';
|
||||||
|
const repoName = 'pubky-core-mobile-sdk';
|
||||||
|
const branch = 'main';
|
||||||
|
const tempDir = 'temp';
|
||||||
|
|
||||||
|
async function setupRustDirectory() {
|
||||||
|
try {
|
||||||
|
// Check if rust directory exists
|
||||||
|
const rustExists = await fs
|
||||||
|
.access(rustDir)
|
||||||
|
.then(() => true)
|
||||||
|
.catch(() => false);
|
||||||
|
|
||||||
|
if (!rustExists) {
|
||||||
|
console.log('Creating rust directory...');
|
||||||
|
await fs.mkdir(rustDir);
|
||||||
|
|
||||||
|
// Clone the repository directly into rust directory
|
||||||
|
const git = simpleGit();
|
||||||
|
await git.clone(
|
||||||
|
`https://github.com/${repoOwner}/${repoName}.git`,
|
||||||
|
tempDir,
|
||||||
|
['--depth', '1', `--branch=${branch}`]
|
||||||
|
);
|
||||||
|
|
||||||
|
// Move all contents from temp directory to rust directory
|
||||||
|
const tempContents = await fs.readdir(path.join(tempDir));
|
||||||
|
await Promise.all(
|
||||||
|
tempContents.map(async (item) => {
|
||||||
|
if (item !== '.git') {
|
||||||
|
// Skip .git directory
|
||||||
|
const source = path.join(tempDir, item);
|
||||||
|
const dest = path.join(rustDir, item);
|
||||||
|
await fs.cp(source, dest, { recursive: true });
|
||||||
|
}
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
// Clean up temp directory
|
||||||
|
await fs.rm(tempDir, { recursive: true, force: true });
|
||||||
|
console.log('Rust directory setup completed successfully!');
|
||||||
|
} else {
|
||||||
|
console.log('Rust directory already exists, skipping setup...');
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error during rust directory setup:', error);
|
||||||
|
// Try to clean up temp directory if it exists
|
||||||
|
try {
|
||||||
|
await fs.rm(tempDir, { recursive: true, force: true });
|
||||||
|
} catch (cleanupError) {
|
||||||
|
console.error('Failed to clean up temporary directory:', cleanupError);
|
||||||
|
}
|
||||||
|
throw error; // Re-throw to be handled by the main try-catch
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
setupRustDirectory();
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user