Initial commit

This commit is contained in:
Tony Klink 2024-01-12 09:35:31 -06:00
commit 9fe412be11
Signed by: klink
GPG key ID: 85175567C4D19231
58 changed files with 6215 additions and 0 deletions

143
src/bussy/mod.rs Normal file
View file

@ -0,0 +1,143 @@
use crate::{
noose::user::{User, UserRow},
utils::{error::Error, structs::Subscription},
};
use nostr::secp256k1::XOnlyPublicKey;
use std::{collections::HashMap, fmt::Debug};
use tokio::sync::{broadcast, Mutex};
pub mod channels {
pub static MSG_NOOSE: &str = "MSG_NOOSE";
pub static MSG_NIP05: &str = "MSG_NIP05";
pub static MSG_RELAY: &str = "MSG_RELAY";
pub static MSG_PIPELINE: &str = "MSG_PIPELINE";
}
#[derive(Debug, Clone, PartialEq)]
pub enum Command {
// DbRequest
DbReqWriteEvent(Box<nostr::Event>),
DbReqFindEvent(/* client_id*/ uuid::Uuid, Subscription),
DbReqDeleteEvents(/* event ids */ Vec<String>),
// Old messages
DbReqInsertUser(UserRow),
DbReqGetUser(User),
DbReqCreateAccount(XOnlyPublicKey, String, String),
DbReqGetAccount(String),
DbReqClear,
// DbResponse
DbResRelayMessage(
/* client_id*/ uuid::Uuid,
/* Vec<RelayMessage::Event> */ Vec<String>,
),
DbResInfo,
DbResOk,
DbResOkWithStatus(String),
DbResAccount, // TODO: Add Account DTO as a param
DbResUser(UserRow),
// Event Pipeline
PipelineReqEvent(/* client_id */ uuid::Uuid, Box<nostr::Event>),
PipelineResRelayMessageOk(/* client_id */ uuid::Uuid, nostr::RelayMessage),
PipelineResStreamOutEvent(Box<nostr::Event>),
PipelineResOk,
// Other
Str(String),
ServiceError(Error),
Noop,
}
#[derive(Debug, Clone)]
pub struct Message {
pub source: &'static str,
pub content: Command,
}
#[derive(Debug)]
pub struct PubSub {
subscribers: Mutex<HashMap<String, Vec<broadcast::Sender<Message>>>>,
}
impl Default for PubSub {
fn default() -> Self {
panic!("Use PubSub::new() to initialize PubSub");
}
}
impl PubSub {
pub fn new() -> Self {
PubSub {
subscribers: Mutex::new(HashMap::new()),
}
}
pub async fn subscribe(&self, topic: &str) -> broadcast::Receiver<Message> {
let (tx, _rx) = broadcast::channel(32); // 32 is the channel capacity
let mut subscribers = self.subscribers.lock().await;
subscribers
.entry(topic.to_string())
.or_insert_with(Vec::new)
.push(tx.clone());
tx.subscribe()
}
pub async fn publish(&self, topic: &str, message: Message) {
let mut subscribers = self.subscribers.lock().await;
if let Some(queue) = subscribers.get_mut(topic) {
for sender in queue.iter() {
sender.send(message.clone()).ok();
}
}
}
}
// #[cfg(test)]
// mod tests {
// use super::channels;
// use crate::bussy::{Command, Message, PubSub};
// use std::sync::Arc;
// #[tokio::test]
// async fn create_bus() {
// let pubsub = Arc::new(PubSub::new());
// let mut subscriber1 = pubsub.subscribe(channels::MSG_NIP05).await;
// let mut subscriber2 = pubsub.subscribe(channels::MSG_NOOSE).await;
// tokio::spawn(async move {
// while let Ok(message) = subscriber1.recv().await {
// println!("Subscriber1 received: {:?}", message);
// }
// });
// tokio::spawn(async move {
// while let Ok(message) = subscriber2.recv().await {
// println!("Subscriber2 received: {:?}", message);
// }
// });
// pubsub
// .publish(
// channels::MSG_NIP05,
// Message {
// source: "test",
// content: Command::Str("Hello S1".to_string()),
// },
// )
// .await;
// pubsub
// .publish(
// channels::MSG_NOOSE,
// Message {
// source: "test",
// content: Command::Str("Hello S2".to_string()),
// },
// )
// .await;
// dbg!(pubsub);
// // Sleep to keep the main thread alive while messages are processed in the background.
// tokio::time::sleep(std::time::Duration::from_secs(1)).await;
// }
// }

51
src/main.rs Normal file
View file

@ -0,0 +1,51 @@
#![allow(dead_code)]
#![allow(unused_variables)]
use std::thread;
pub mod bussy;
mod noose;
mod relay;
mod usernames;
pub mod utils;
use bussy::PubSub;
use flexi_logger::Logger;
use crate::utils::structs::Context;
#[macro_use]
extern crate lazy_static;
fn main() {
// Logger init
let _logger = Logger::try_with_env_or_str("info")
.unwrap()
.log_to_stdout()
.duplicate_to_stderr(flexi_logger::Duplicate::Warn)
.write_mode(flexi_logger::WriteMode::Async)
.start()
.unwrap();
// Services
let ctx_db = Context::new();
let ctx_relay = ctx_db.clone();
let ctx_usernames = ctx_relay.clone();
let db_handle = thread::spawn(move || {
noose::start(ctx_db);
});
let relay_handle = thread::spawn(move || {
relay::start(ctx_relay);
});
let nip05_handle = thread::spawn(move || {
usernames::start(ctx_usernames);
});
db_handle.join().unwrap();
relay_handle.join().unwrap();
nip05_handle.join().unwrap();
println!("Done");
}

18
src/noose/db.rs Normal file
View file

@ -0,0 +1,18 @@
use crate::{
bussy::PubSub,
utils::{error::Error, structs::Subscription},
};
use async_trait::async_trait;
use nostr::Event;
use std::sync::Arc;
#[async_trait]
pub trait Noose: Send + Sync {
async fn start(&mut self, pubsub: Arc<PubSub>) -> Result<(), Error>;
async fn migration_up(&self);
async fn write_event(&self, event: Box<Event>) -> Result<String, Error>;
async fn find_event(&self, subscription: Subscription) -> Result<Vec<String>, Error>;
}

View file

@ -0,0 +1,23 @@
CREATE TABLE events (
id TEXT PRIMARY KEY,
kind INTEGER NOT NULL,
pubkey TEXT NOT NULL,
content TEXT NOT NULL,
created_at INTEGER NOT NULL,
tags TEXT NOT NULL,
sig TEXT NOT NULL
);
CREATE INDEX idx_events_kind ON events (kind);
CREATE INDEX idx_events_pubkey ON events (pubkey);
CREATE TABLE tags (
tag TEXT NOT NULL,
value TEXT NOT NULL,
event_id TEXT REFERENCES events(id) ON DELETE CASCADE
);
CREATE INDEX idx_tags_tag ON tags (tag);
CREATE INDEX idx_tags_value ON tags (value);
CREATE INDEX idx_tags_event_id ON tags (event_id);

View file

@ -0,0 +1,5 @@
CREATE TABLE relays (
url TEXT PRIMARY KEY,
domain TEXT NOT NULL,
active BOOLEAN NOT NULL
);

View file

@ -0,0 +1 @@
CREATE VIRTUAL TABLE events_fts USING fts5(id, content);

View file

@ -0,0 +1,10 @@
CREATE TABLE users (
pubkey TEXT PRIMARY KEY,
username TEXT NOT NULL UNIQUE,
inserted_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
admin BOOLEAN DEFAULT false
);
CREATE INDEX idx_users_pubkey ON users (pubkey);
CREATE INDEX idx_users_username ON users (username);

View file

@ -0,0 +1,2 @@
PRAGMA foreign_keys = ON;
PRAGMA auto_vacuum = FULL;

View file

@ -0,0 +1,11 @@
CREATE TABLE unattached_media (
id TEXT PRIMARY KEY,
pubkey TEXT NOT NULL,
url TEXT NOT NULL,
data TEXT NOT NULL,
uploaded_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX unattached_media_id ON unattached_media (id);
CREATE INDEX unattached_media_pubkey ON unattached_media (pubkey);
CREATE INDEX unattached_media_url ON unattached_media (url);

33
src/noose/mod.rs Normal file
View file

@ -0,0 +1,33 @@
use crate::utils::structs::Context;
use tokio::runtime;
use db::Noose;
use pipeline::Pipeline;
pub mod db;
pub mod pipeline;
// mod sled;
mod sqlite;
pub mod user;
pub fn start(context: Context) {
let rt = runtime::Runtime::new().unwrap();
rt.block_on(async move {
let pipeline_pubsub = context.pubsub.clone();
let db_pubsub = context.pubsub.clone();
let pipeline_handle = tokio::task::spawn(async move {
let mut pipeline = Pipeline::new(pipeline_pubsub);
pipeline.start().await.unwrap();
});
let sqlite_writer_handle = tokio::task::spawn(async move {
let mut db_writer = sqlite::SqliteDb::new().await;
db_writer.start(db_pubsub).await.unwrap();
});
sqlite_writer_handle.await.unwrap();
pipeline_handle.await.unwrap();
});
}

166
src/noose/pipeline.rs Normal file
View file

@ -0,0 +1,166 @@
use crate::bussy::{channels, Command, Message, PubSub};
use crate::utils::error::Error;
use nostr::Event;
use std::sync::Arc;
pub struct Pipeline {
pubsub: Arc<PubSub>,
}
impl Pipeline {
pub fn new(pubsub: Arc<PubSub>) -> Self {
Self { pubsub }
}
pub async fn start(&mut self) -> Result<(), Error> {
let mut subscriber = self.pubsub.subscribe(channels::MSG_PIPELINE).await;
while let Ok(message) = subscriber.recv().await {
log::debug!("[Pipeline] received message: {:?}", message);
let command = match message.content {
Command::PipelineReqEvent(client_id, event) => {
match self.handle_event(client_id, event.clone()).await {
Ok(_) => {
let message =
nostr::RelayMessage::new_ok(event.id, true, "".to_string());
Command::PipelineResRelayMessageOk(client_id, message)
}
Err(e) => Command::ServiceError(e),
}
}
_ => Command::Noop,
};
if command != Command::Noop {
let channel = message.source;
let message = Message {
source: channels::MSG_PIPELINE,
content: command,
};
log::info!(
"[Pipeline] channel: {} - publishing new message: {:?}",
channel,
message
);
self.pubsub.publish(channel, message).await;
}
}
Ok(())
}
pub async fn handle_event(
&self,
client_id: uuid::Uuid,
event: Box<Event>,
) -> Result<(), Error> {
let store_event_task = self.store_event(event.clone());
let process_deletions_task = self.process_deletions(event.clone());
let track_hashtags_task = self.track_hashtags(event.clone());
let process_media_task = self.process_media(event.clone());
let stream_out_task = self.stream_out(event.clone());
let broadcast_task = self.broadcast(event.clone());
let (
store_event_result,
process_deletions_result,
track_hashtags_result,
process_media_result,
stream_out_result,
broadcast_result,
) = tokio::join!(
store_event_task,
process_deletions_task,
track_hashtags_task,
process_media_task,
stream_out_task,
broadcast_task
);
match (
store_event_result,
process_deletions_result,
track_hashtags_result,
process_media_result,
stream_out_result,
broadcast_result,
) {
(Ok(_), Ok(_), Ok(_), Ok(_), Ok(_), Ok(_)) => {
log::info!("[Pipeline] Tasks finished successfully");
Ok(())
}
_ => {
log::error!("[Pipeline] One or more futures returned an error.");
Err(Error::internal_with_message(
"[Pipeline] One or more futures returned an error.",
))
}
}
}
async fn store_event(&self, event: Box<Event>) -> Result<(), Error> {
if event.kind.is_ephemeral() {
return Ok(());
}
self.pubsub
.publish(
channels::MSG_NOOSE,
Message {
source: channels::MSG_PIPELINE,
content: Command::DbReqWriteEvent(event),
},
)
.await;
Ok(())
}
async fn process_deletions(&self, event: Box<Event>) -> Result<(), Error> {
// if event.kind.as_u32() == 5 {
// let events_for_deletion: Vec<String> = event
// .tags
// .iter()
// .filter_map(|tag| match tag {
// nostr::Tag::Event(event_id, _, _) => Some(event_id.to_string()),
// _ => None,
// })
// .collect();
// self.pubsub
// .publish(
// channels::MSG_NOOSE,
// Message {
// source: channels::MSG_PIPELINE,
// content: Command::DbReqDeleteEvents(events_for_deletion),
// },
// )
// .await;
// }
Ok(())
}
async fn track_hashtags(&self, event: Box<Event>) -> Result<(), Error> {
Ok(())
}
async fn process_media(&self, event: Box<Event>) -> Result<(), Error> {
Ok(())
}
async fn stream_out(&self, event: Box<Event>) -> Result<(), Error> {
let message = Message {
source: channels::MSG_PIPELINE,
content: Command::PipelineResStreamOutEvent(event),
};
self.pubsub.publish(channels::MSG_RELAY, message).await;
Ok(())
}
async fn broadcast(&self, event: Box<Event>) -> Result<(), Error> {
Ok(())
}
}

234
src/noose/sled.rs Normal file
View file

@ -0,0 +1,234 @@
use super::db::Noose;
use crate::bussy::{channels, Command, Message, PubSub};
use crate::utils::error::Error;
use crate::utils::structs::Subscription;
use async_trait::async_trait;
use nostr::Event;
use serde::Serialize;
use std::sync::Arc;
use super::user::{User, UserRow};
// Db Interface
pub struct SledDb {
db: sled::Db,
events: sled::Tree,
nip05s: sled::Tree,
pub users: sled::Tree,
index: sled::Db,
}
impl SledDb {
pub fn new() -> Self {
let db = sled::open("/tmp/sled_db").unwrap();
let events = db.open_tree("events").unwrap();
let nip05s = db.open_tree("identifiers").unwrap();
let accounts = db.open_tree("accounts").unwrap();
let index = sled::open("/tmp/sled_index").unwrap();
Self {
db,
events,
nip05s,
users: accounts,
index,
}
}
fn clear_db(&self) -> Result<(), sled::Error> {
self.db.clear()
}
fn clear_index(&self) -> Result<(), sled::Error> {
self.index.clear()
}
async fn insert_user(&self, user: UserRow) -> Result<(), Error> {
let pubkey = user.pubkey.clone();
let username = user.username.clone();
if let Ok(Some(_)) = self.nip05s.get(&username) {
return Err(Error::internal_with_message("User already exists"));
}
let mut user_buff = flexbuffers::FlexbufferSerializer::new();
user.serialize(&mut user_buff).unwrap();
self.nip05s.insert(&username, user_buff.view()).unwrap();
let prefix = "nip05:";
let key = format!("{}{}", prefix, pubkey);
self.index.insert(key, username.as_bytes()).unwrap();
Ok(())
}
async fn get_user(&self, user: User) -> Result<UserRow, Error> {
let mut user_row = None;
if let Some(username) = user.name {
if let Ok(Some(buff)) = self.nip05s.get(username) {
let b = flexbuffers::from_slice::<UserRow>(&buff).unwrap();
user_row = Some(b);
}
} else if let Some(pubkey) = user.pubkey {
let prefix = "nip05:";
let reference = format!("{}{}", prefix, pubkey);
if let Ok(Some(row)) = self.index.get(reference) {
let key = String::from_utf8(row.to_vec()).unwrap();
if let Ok(Some(buff)) = self.nip05s.get(key) {
let b = flexbuffers::from_slice::<UserRow>(&buff).unwrap();
user_row = Some(b);
}
}
}
match user_row {
Some(user) => Ok(user),
None => Err(Error::internal_with_message("User not found")),
}
}
}
#[async_trait]
impl Noose for SledDb {
async fn start(&mut self, pubsub: Arc<PubSub>) -> Result<(), Error> {
let mut subscriber = pubsub.subscribe(channels::MSG_NOOSE).await;
while let Ok(message) = subscriber.recv().await {
log::info!("noose subscriber received: {:?}", message);
let command = match message.content {
Command::DbReqInsertUser(user) => match self.insert_user(user).await {
Ok(_) => Command::DbResOk,
Err(e) => Command::ServiceError(e),
},
Command::DbReqGetUser(user) => match self.get_user(user).await {
Ok(user) => Command::DbResUser(user),
Err(e) => Command::ServiceError(e),
},
Command::DbReqWriteEvent(event) => match self.write_event(event).await {
Ok(_) => Command::DbResOk,
Err(e) => Command::ServiceError(e),
},
_ => Command::Noop,
};
if command != Command::Noop {
log::info!("Publishing new message");
let channel = message.source;
pubsub
.publish(
channel,
Message {
source: channels::MSG_NOOSE,
content: command,
},
)
.await;
}
}
Ok(())
}
async fn migration_up(&self) {}
async fn write_event(&self, event: Box<Event>) -> Result<String, Error> {
let mut event_buff = flexbuffers::FlexbufferSerializer::new();
event.serialize(&mut event_buff).unwrap();
self.events.insert(event.id, event_buff.view()).unwrap();
{
// Timestamp
let key = format!("created_at:{}|#e:{}", event.created_at, event.id);
self.index.insert(key, event.id.as_bytes()).unwrap();
}
{
// Author, pubkeys #p
let key = format!("#author:{}|#e:{}", event.pubkey, event.id);
self.index.insert(key, event.id.as_bytes()).unwrap();
// self.index.scan_prefix(
}
{
// Kinds
let key = format!("#k:{}|#e:{}", event.kind, event.id);
self.index.insert(key, event.id.as_bytes()).unwrap();
// self.index.scan_prefix(
}
{
// Tags
event.tags.iter().for_each(|tag| {
if let Some(key) = match tag {
// #e tag
nostr::Tag::Event(event_id, _, _) => Some(format!("#e:{}", event_id)),
// #p tag
nostr::Tag::PubKey(pubkey, _) => Some(format!("#p:{}|#e:{}", pubkey, event.id)),
// #t tag
nostr::Tag::Hashtag(hashtag) => Some(format!("#t:{}|#e:{}", hashtag, event.id)),
// #a tag
nostr::Tag::A {
kind,
public_key,
identifier,
relay_url,
} => Some(format!(
"#a:kind:{}|#a:pubkey:{}#a:identifier:{}|#e:{}",
kind, public_key, identifier, event.id
)),
_ => None,
} {
self.index.insert(key, event.id.as_bytes()).unwrap();
}
});
// let key = format!("#t:{}|#e:{}", event.kind, event.id);
// self.index.insert(key, event.id.as_bytes()).unwrap();
// self.index.scan_prefix(
}
let message = format!("[\"OK\", \"{}\", true, \"\"]", event.id.to_string());
Ok(message)
}
async fn find_event(&self, subscription: Subscription) -> Result<Vec<String>, Error> {
todo!()
}
}
#[cfg(test)]
mod tests {
use super::SledDb;
use crate::{
bussy::PubSub,
noose::user::{User, UserRow},
};
use std::sync::Arc;
#[tokio::test]
async fn get_db_names() {
let pubsub = Arc::new(PubSub::new());
let db = SledDb::new();
let pk = "npub1p3ya99jfdafnqlk87p6wfd36d2nme5mkld769rhd9pkht6hmqlaq6mzxdu".to_string();
let username = "klink".to_string();
let user = UserRow::new(pk, username, false);
let result = db.insert_user(user).await;
let pubkey = "npub1p3ya99jfdafnqlk87p6wfd36d2nme5mkld769rhd9pkht6hmqlaq6mzxdu".to_string();
let username = "klink".to_string();
let user = User {
name: None,
pubkey: Some(pubkey),
};
let user = db.get_user(user).await;
db.clear_db().unwrap();
db.clear_index().unwrap();
}
}

889
src/noose/sqlite.rs Normal file
View file

@ -0,0 +1,889 @@
use async_trait::async_trait;
use nostr::{Event, JsonUtil};
use sea_query::{extension::sqlite::SqliteExpr, Query};
use sea_query_binder::SqlxBinder;
use sqlx::sqlite::{Sqlite, SqlitePoolOptions};
use sqlx::FromRow;
use sqlx::{migrate::MigrateDatabase, Pool};
use std::sync::Arc;
use super::db::Noose;
use crate::bussy::{channels, Command, Message, PubSub};
use crate::utils::{error::Error, structs::Subscription};
enum EventsTable {
Table,
EventId,
Kind,
Pubkey,
Content,
CreatedAt,
Tags,
Sig,
}
impl sea_query::Iden for EventsTable {
fn unquoted(&self, s: &mut dyn std::fmt::Write) {
write!(
s,
"{}",
match self {
Self::Table => "events",
Self::EventId => "id",
Self::Kind => "kind",
Self::Pubkey => "pubkey",
Self::Content => "content",
Self::CreatedAt => "created_at",
Self::Tags => "tags",
Self::Sig => "sig",
}
)
.unwrap()
}
}
enum EventsFTSTable {
Table,
EventId,
Content,
}
impl sea_query::Iden for EventsFTSTable {
fn unquoted(&self, s: &mut dyn std::fmt::Write) {
write!(
s,
"{}",
match self {
Self::Table => "events_fts",
Self::EventId => "id",
Self::Content => "content",
}
)
.unwrap()
}
}
enum TagsTable {
Table,
Tag,
Value,
EventId,
}
impl sea_query::Iden for TagsTable {
fn unquoted(&self, s: &mut dyn std::fmt::Write) {
write!(
s,
"{}",
match self {
Self::Table => "tags",
Self::Tag => "tag",
Self::Value => "value",
Self::EventId => "event_id",
}
)
.unwrap()
}
}
#[derive(FromRow, Debug)]
struct EventsCountRow(i32);
#[derive(FromRow, Debug)]
struct EventRow {
id: String,
pubkey: String,
created_at: i64,
kind: i64,
tags: String,
sig: String,
content: String,
}
impl EventRow {
pub fn to_string(&self, subscription_id: nostr::SubscriptionId) -> String {
let tags: Vec<Vec<String>> = serde_json::from_str(&self.tags).unwrap();
let message = serde_json::json!([
"EVENT",
subscription_id,
{
"id": self.id,
"content": self.content,
"created_at": self.created_at,
"kind": self.kind,
"pubkey": self.pubkey,
"sig": self.sig,
"tags": tags
}
]);
message.to_string()
}
}
pub struct SqliteDb {
pool: Pool<Sqlite>,
}
impl SqliteDb {
pub async fn new() -> Self {
let pool = SqliteDb::build_pool("noose_pool", 42).await;
Self { pool }
}
pub fn info(&self) {
dbg!(self.pool.options());
}
async fn migrate(pool: &Pool<Sqlite>) {
sqlx::migrate!("src/noose/migrations")
.run(pool)
.await
.unwrap()
}
async fn build_pool(name: &str, max_size: u32) -> Pool<Sqlite> {
let pool_options = SqlitePoolOptions::new()
.test_before_acquire(true)
// .idle_timeout(Some(Duration::from_secs(10)))
// .max_lifetime(Some(Duration::from_secs(30)))
.max_lifetime(None)
.idle_timeout(None)
.max_connections(max_size);
let db_url = "sqlite://sqlite.db";
if !Sqlite::database_exists(db_url).await.unwrap_or(false) {
log::info!("Creating database {}", db_url);
match Sqlite::create_database(db_url).await {
Ok(_) => log::info!("Db {} created", db_url),
Err(_) => panic!("Failed to create database {}", db_url),
}
}
if let Ok(pool) = pool_options.connect(db_url).await {
log::info!("Connected to sqlite pool {}", name);
pool
} else {
panic!("Connection to sqlite pool {} failed", name);
}
}
async fn add_event(&self, event: Box<Event>) -> Result<String, Error> {
let id = event.id.to_string();
let kind = event.kind.to_string();
let pubkey = event.pubkey.to_string();
let content = event.content.to_string();
let created_at = event.created_at.as_i64();
let tags = serde_json::to_string(&event.tags).unwrap();
let sig = event.sig.to_string();
let message = format!("[\"OK\", \"{}\", true, \"\"]", id.clone());
if event.is_ephemeral() {
return Ok(message);
}
let tx = self.pool.begin().await.unwrap();
{
if event.is_replaceable() {
dbg!("new event is replaceable - searching for previously stored event");
let (sql, values) = Query::select()
.from(EventsTable::Table)
.columns([EventsTable::EventId])
.and_where(
sea_query::Expr::col(EventsTable::Pubkey).eq(event.pubkey.to_string()),
)
.and_where(sea_query::Expr::col(EventsTable::Kind).eq(event.kind.as_u32()))
.and_where(
sea_query::Expr::col(EventsTable::CreatedAt).gte(event.created_at.as_i64()),
)
.limit(1)
.build_sqlx(sea_query::SqliteQueryBuilder);
let repl_count = sqlx::query_with(&sql, values).fetch_one(&self.pool).await;
if repl_count.ok().is_some() {
return Ok(message);
}
}
}
{
if event.is_parameterized_replaceable() {
dbg!(
"new event is parametrized replaceable - searching for previously stored event"
);
let d_tags: Vec<String> = event
.tags
.iter()
.filter(|tag| tag.kind() == nostr::TagKind::D)
.map(|tag| tag.clone().to_vec()[1].clone())
.collect();
let (sql, values) = Query::select()
.from(EventsTable::Table)
.column((EventsTable::Table, EventsTable::EventId))
.left_join(
TagsTable::Table,
sea_query::Expr::col((TagsTable::Table, TagsTable::EventId))
.equals((EventsTable::Table, EventsTable::EventId)),
)
.and_where(
sea_query::Expr::col((EventsTable::Table, EventsTable::Pubkey))
.eq(event.pubkey.to_string()),
)
.and_where(
sea_query::Expr::col((EventsTable::Table, EventsTable::Kind))
.eq(event.kind.as_u32()),
)
.and_where(sea_query::Expr::col((TagsTable::Table, TagsTable::Tag)).eq("d"))
.and_where(
sea_query::Expr::col((TagsTable::Table, TagsTable::Value))
.eq(d_tags[0].to_string()),
)
.and_where(
sea_query::Expr::col((EventsTable::Table, EventsTable::CreatedAt))
.gte(event.created_at.as_i64()),
)
.limit(1)
.build_sqlx(sea_query::SqliteQueryBuilder);
let repl_count = sqlx::query_with(&sql, values).fetch_one(&self.pool).await;
if repl_count.ok().is_some() {
return Ok(message);
}
}
}
// Insert replaceble event
{
if event.is_replaceable() {
dbg!("deleting older replaceable event from events table");
let (sql, values) = Query::delete()
.from_table(EventsTable::Table)
.and_where(
sea_query::Expr::col((EventsTable::Table, EventsTable::Kind))
.eq(event.kind.as_u32()),
)
.and_where(
sea_query::Expr::col((EventsTable::Table, EventsTable::Pubkey))
.eq(event.pubkey.to_string()),
)
.and_where(
sea_query::Expr::col((EventsTable::Table, EventsTable::EventId))
.not_in_subquery(
Query::select()
.from(EventsTable::Table)
.column(EventsTable::EventId)
.and_where(
sea_query::Expr::col(EventsTable::Kind)
.eq(event.kind.as_u32()),
)
.and_where(
sea_query::Expr::col(EventsTable::Pubkey)
.eq(event.pubkey.to_string()),
)
.order_by(EventsTable::CreatedAt, sea_query::Order::Desc)
.limit(1)
.to_owned(),
),
)
.build_sqlx(sea_query::SqliteQueryBuilder);
let results = sqlx::query_with(&sql, values)
.execute(&self.pool)
.await
.unwrap();
if results.rows_affected() > 0 {
log::info!(
"removed {} older replaceable kind {} events for author: {:?}",
results.rows_affected(),
event.kind.as_u32(),
event.pubkey.to_string()
);
}
}
}
// Insert parametrized replaceble event
{
if event.is_parameterized_replaceable() {
dbg!("deleting older parametrized replaceable event from events table");
let d_tag = event.identifier();
let (sql, values) = Query::delete()
.from_table(EventsTable::Table)
.and_where(
sea_query::Expr::col((EventsTable::Table, EventsTable::EventId))
.in_subquery(
Query::select()
.from(EventsTable::Table)
.column((EventsTable::Table, EventsTable::EventId))
.left_join(
TagsTable::Table,
sea_query::Expr::col((
TagsTable::Table,
TagsTable::EventId,
))
.equals((EventsTable::Table, EventsTable::EventId)),
)
.and_where(
sea_query::Expr::col((
EventsTable::Table,
EventsTable::Kind,
))
.eq(event.kind.as_u32()),
)
.and_where(
sea_query::Expr::col((
EventsTable::Table,
EventsTable::Pubkey,
))
.eq(event.pubkey.to_string()),
)
.and_where(
sea_query::Expr::col((TagsTable::Table, TagsTable::Tag))
.eq("d"),
)
.and_where(
sea_query::Expr::col((TagsTable::Table, TagsTable::Value))
.eq(d_tag),
)
.to_owned(),
),
)
.build_sqlx(sea_query::SqliteQueryBuilder);
let results = sqlx::query_with(&sql, values)
.execute(&self.pool)
.await
.unwrap();
if results.rows_affected() > 0 {
log::info!("removed {} older parameterized replaceable kind {} events for author: {:?}", results.rows_affected(), event.kind, event.pubkey);
}
}
}
// Process deletion events
dbg!(event.as_json());
if event.kind.as_u32() == 5 {
dbg!("deleting event");
let ids: Vec<String> = event.event_ids().map(|eid| eid.to_string()).collect();
let (sql, values) = Query::delete()
.from_table(EventsTable::Table)
.and_where(sea_query::Expr::col(EventsTable::Kind).ne(5))
.and_where(sea_query::Expr::col(EventsTable::Pubkey).eq(event.pubkey.to_string()))
.and_where(sea_query::Expr::col(EventsTable::EventId).is_in(&ids))
.build_sqlx(sea_query::SqliteQueryBuilder);
let results = sqlx::query_with(&sql, values)
.execute(&self.pool)
.await
.unwrap();
if results.rows_affected() > 0 {
log::info!(
"removed {} events for author {:?}",
results.rows_affected(),
event.pubkey
);
}
// Delete from EventsFTS
let (sql, values) = Query::delete()
.from_table(EventsFTSTable::Table)
.and_where(sea_query::Expr::col(EventsFTSTable::EventId).is_in(&ids))
.build_sqlx(sea_query::SqliteQueryBuilder);
let _ = sqlx::query_with(&sql, values)
.execute(&self.pool)
.await
.unwrap();
} else {
dbg!("inserting new event in events");
// Insert into Events table
let (sql, values) = Query::insert()
.into_table(EventsTable::Table)
.columns([
EventsTable::EventId,
EventsTable::Content,
EventsTable::Kind,
EventsTable::Pubkey,
EventsTable::CreatedAt,
EventsTable::Tags,
EventsTable::Sig,
])
.values_panic([
id.clone().into(),
content.clone().into(),
kind.into(),
pubkey.into(),
created_at.into(),
tags.into(),
sig.into(),
])
.build_sqlx(sea_query::SqliteQueryBuilder);
let results = sqlx::query_with(&sql, values)
.execute(&self.pool)
.await
.unwrap();
// Insert into EventsFTS table
dbg!("inserting new event into eventsFTS");
let (sql, values) = Query::insert()
.into_table(EventsFTSTable::Table)
.columns([EventsFTSTable::EventId, EventsFTSTable::Content])
.values_panic([id.clone().into(), content.into()])
.build_sqlx(sea_query::SqliteQueryBuilder);
let results = sqlx::query_with(&sql, values)
.execute(&self.pool)
.await
.unwrap();
// Insert into Tags table
dbg!("inserting new event into tags");
for tag in event.tags.clone() {
let tag = tag.to_vec();
if tag.len() >= 2 {
let tag_name = &tag[0];
let tag_value = &tag[1];
if tag_name.len() == 1 {
let (sql, values) = Query::insert()
.into_table(TagsTable::Table)
.columns([TagsTable::Tag, TagsTable::Value, TagsTable::EventId])
.values_panic([tag_name.into(), tag_value.into(), id.clone().into()])
.build_sqlx(sea_query::SqliteQueryBuilder);
let results = sqlx::query_with(&sql, values)
.execute(&self.pool)
.await
.unwrap();
}
}
}
}
tx.commit().await.unwrap();
Ok(message)
}
async fn index_search(&self, event: Box<Event>) -> Result<(), Error> {
let id = event.id.to_string();
let content = event.content.to_string();
let (sql, values) = Query::insert()
.into_table(EventsFTSTable::Table)
.columns([EventsFTSTable::EventId, EventsFTSTable::Content])
.values_panic([id.into(), content.into()])
.build_sqlx(sea_query::SqliteQueryBuilder);
let results = sqlx::query_with(&sql, values).execute(&self.pool).await;
if results.is_ok() {
Ok(())
} else {
Err(Error::internal_with_message(
"Unable to write event to events_fts index",
))
}
}
async fn index_tags(&self, event: Box<Event>) -> Result<(), Error> {
// let t: Vec<String> = Vec::new();
// for tag in event.tags {
// tag.kind()
// }
Ok(())
}
fn get_filter_query(&self, filter: &nostr::Filter) -> sea_query::SelectStatement {
let mut query = Query::select()
.column((EventsTable::Table, EventsTable::EventId))
.column((EventsTable::Table, EventsTable::Content))
.columns([
EventsTable::Kind,
EventsTable::Pubkey,
EventsTable::CreatedAt,
EventsTable::Tags,
EventsTable::Sig,
])
.from(EventsTable::Table)
.order_by(EventsTable::CreatedAt, sea_query::Order::Desc)
.to_owned();
if !filter.ids.is_empty() {
let ids = filter.ids.iter().map(|id| id.to_string());
query = query
.and_where(
sea_query::Expr::col((EventsTable::Table, EventsTable::EventId)).is_in(ids),
)
.to_owned();
}
if !filter.kinds.is_empty() {
let kinds: Vec<u32> = filter.kinds.iter().map(|kind| kind.as_u32()).collect();
query = query
.and_where(
sea_query::Expr::col((EventsTable::Table, EventsTable::Kind)).is_in(kinds),
)
.to_owned();
}
if !filter.authors.is_empty() {
let authors = filter.authors.iter().map(|author| author.to_string());
query = query
.and_where(
sea_query::Expr::col((EventsTable::Table, EventsTable::Pubkey)).is_in(authors),
)
.to_owned();
}
if let Some(since) = filter.since {
query = query
.and_where(
sea_query::Expr::col((EventsTable::Table, EventsTable::CreatedAt))
.gte(since.as_u64()),
)
.to_owned();
}
if let Some(until) = filter.until {
query = query
.and_where(
sea_query::Expr::col((EventsTable::Table, EventsTable::CreatedAt))
.lte(until.as_u64()),
)
.to_owned();
}
if let Some(limit) = filter.limit {
query = query.limit(limit as u64).to_owned();
}
filter.generic_tags.iter().for_each(|(tag, values)| {
let values = values.iter().map(|val| val.to_string());
query = query
.left_join(
TagsTable::Table,
sea_query::Expr::col((TagsTable::Table, TagsTable::EventId))
.equals((EventsTable::Table, EventsTable::EventId)),
)
.and_where(
sea_query::Expr::col((TagsTable::Table, TagsTable::Tag)).eq(tag.to_string()),
)
.and_where(sea_query::Expr::col((TagsTable::Table, TagsTable::Value)).is_in(values))
.to_owned();
});
if let Some(search) = &filter.search {
query = query
.inner_join(
EventsFTSTable::Table,
sea_query::Expr::col((EventsTable::Table, EventsTable::EventId))
.equals((EventsFTSTable::Table, EventsFTSTable::EventId)),
)
.and_where(
sea_query::Expr::col((EventsFTSTable::Table, EventsFTSTable::Content))
.matches(search),
)
.to_owned();
}
query
}
fn get_filters_query(&self, subscription: Subscription) -> Option<sea_query::SelectStatement> {
subscription
.filters
.iter()
.map(|filter| {
Query::select()
.column((EventsTable::Table, EventsTable::EventId))
.column((EventsTable::Table, EventsTable::Content))
.columns([
EventsTable::Kind,
EventsTable::Pubkey,
EventsTable::CreatedAt,
EventsTable::Tags,
EventsTable::Sig,
])
.from_subquery(
self.get_filter_query(filter),
sea_query::Alias::new("events"),
)
.to_owned()
})
.reduce(|mut result, query| result.union(sea_query::UnionType::All, query).to_owned())
}
async fn delete_filters(&self, subscription: Subscription) -> Vec<EventRow> {
todo!()
}
async fn count_events_by_filters(&self, subscription: Subscription) -> i32 {
if subscription.filters.is_empty() {
return 0;
}
let (sql, values) = self
.get_filters_query(subscription)
.unwrap()
.clear_selects()
.expr_as(
sea_query::Func::count(sea_query::Expr::col((
EventsTable::Table,
EventsTable::EventId,
))),
sea_query::Alias::new("count"),
)
.build_sqlx(sea_query::SqliteQueryBuilder);
println!("count_filters SEA_QUERY built SQL: {}", sql.clone());
let counts = sqlx::query_as_with::<_, EventsCountRow, _>(&sql, values)
.fetch_one(&self.pool)
.await
.unwrap();
dbg!(counts);
1
}
}
#[async_trait]
impl Noose for SqliteDb {
async fn start(&mut self, pubsub: Arc<PubSub>) -> Result<(), Error> {
let mut subscriber = pubsub.subscribe(channels::MSG_NOOSE).await;
while let Ok(message) = subscriber.recv().await {
log::info!("[Noose] received message: {:?}", message);
let command = match message.content {
Command::DbReqWriteEvent(event) => match self.write_event(event).await {
Ok(status) => Command::DbResOkWithStatus(status),
Err(e) => Command::ServiceError(e),
},
Command::DbReqFindEvent(client_id, subscriptioin) => {
match self.find_event(subscriptioin).await {
Ok(events) => Command::DbResRelayMessage(client_id, events),
Err(e) => Command::ServiceError(e),
}
}
_ => Command::Noop,
};
if command != Command::Noop {
let channel = message.source;
let message = Message {
source: channels::MSG_NOOSE,
content: command,
};
log::debug!("[Noose] publishing new message: {:?}", message);
pubsub.publish(channel, message).await;
}
}
Ok(())
}
async fn migration_up(&self) {
SqliteDb::migrate(&self.pool).await;
}
async fn write_event(&self, event: Box<Event>) -> Result<String, Error> {
log::debug!("[Noose] write_event triggered");
let status = self.add_event(event).await.unwrap();
return Ok(status);
}
async fn find_event(&self, subscription: Subscription) -> Result<Vec<String>, Error> {
log::debug!("making query from filters...");
let eose_message =
vec![nostr::RelayMessage::EndOfStoredEvents(subscription.id.clone()).as_json()];
if let Some(sql_statement) = self.get_filters_query(subscription.clone()) {
let (sql, values) = sql_statement.build_sqlx(sea_query::SqliteQueryBuilder);
log::info!("SEA_QUERY built SQL: {}", sql.clone());
match sqlx::query_as_with::<_, EventRow, _>(&sql, values)
.fetch_all(&self.pool)
.await
{
Ok(rows) => {
if rows.is_empty() {
return Ok(eose_message);
} else {
let relay_messages: Vec<String> = rows
.iter()
.map(|row| row.to_string(subscription.id.clone()))
.collect();
return Ok(relay_messages);
}
}
Err(e) => {
log::error!("{}", e);
return Err(Error::internal(e.into()));
}
}
}
return Ok(eose_message);
}
}
#[cfg(test)]
mod tests {
use super::Noose;
use super::SqliteDb;
use crate::utils::structs::Subscription;
use nostr::util::JsonUtil;
#[tokio::test]
async fn find_event() {
let db = SqliteDb::new().await;
let t = std::time::Instant::now();
let client_id = "test_id".to_string();
let cm = nostr::ClientMessage::from_json(
r#"["REQ","7b9bc4b6-701c-40b6-898f-4e7c6b5b1510",{"authors":["04c915daefee38317fa734444acee390a8269fe5810b2241e5e6dd343dfbecc9"],"kinds":[0]}]"#,
).unwrap();
let (sub_id, filters) = match cm {
nostr::ClientMessage::Req {
subscription_id,
filters,
} => (subscription_id, filters),
_ => panic!("sneed :("),
};
let sub = Subscription::new(sub_id, filters);
db.find_event(sub).await.unwrap();
println!(
"Time passed: {}",
(std::time::Instant::now() - t).as_millis()
);
}
#[tokio::test]
async fn delete_events() {
let db = SqliteDb::new().await;
let t = std::time::Instant::now();
let client_id = "test_id".to_string();
let my_keys = nostr::Keys::generate();
let eid = nostr::EventId::all_zeros();
let tag_event = nostr::Tag::Event {
event_id: eid,
relay_url: None,
marker: None,
};
let tag_url = nostr::Tag::AbsoluteURL(nostr::types::UncheckedUrl::new(
"http://foo.net".to_string(),
));
let tag_hashtag = nostr::Tag::Hashtag("farm".to_string());
let event = nostr::EventBuilder::new_text_note(
"sneed feed and seed",
vec![tag_event, tag_url, tag_hashtag],
)
.to_event(&my_keys)
.unwrap();
dbg!(&event.as_json());
let resp = db.add_event(Box::new(event.clone())).await.unwrap();
dbg!(resp);
let delete_event = nostr::EventBuilder::delete(vec![event.id])
.to_event(&my_keys)
.unwrap();
dbg!(&delete_event);
let resp = db.add_event(Box::new(delete_event.clone())).await.unwrap();
dbg!(resp);
// let sub_id = nostr::SubscriptionId::new("test".to_string());
// let mut subscription = Subscription::new(sub_id, vec![]);
// if delete_event.kind == nostr::Kind::EventDeletion {
// delete_event
// .tags
// .iter()
// .filter(|tag| {
// matches!(
// tag,
// nostr::Tag::Event {
// event_id,
// relay_url,
// marker,
// }
// )
// })
// .for_each(|tag| {
// if let nostr::Tag::Event {
// event_id,
// relay_url,
// marker,
// } = tag
// {
// let filter = nostr::Filter::new();
// let filter = &filter.event(*event_id);
// subscription.filters.push(filter.clone());
// }
// });
// dbg!(&subscription);
// }
// let res = db.delete_filters(subscription).await;
// dbg!(res);
// let sub = Subscription::new(sub_id, filters);
// let num = db.delete_filters(sub).await.len();
// println!(
// "Time passed: {}",
// (std::time::Instant::now() - t).as_millis()
// );
// assert_eq!(num, 1);
}
#[tokio::test]
async fn count_events() {
let db = SqliteDb::new().await;
let t = std::time::Instant::now();
let client_id = "test_id".to_string();
let cm = nostr::ClientMessage::from_json(
r#"["COUNT","7b9bc4b6-701c-40b6-898f-4e7c6b5b1510",{"authors":["6be3c1446231fe6d117d72e29b60094bbb3eec029100c34f627dc4ebe8369a64"],"kinds":[1]}]"#,
).unwrap();
let (sub_id, filters) = match cm {
nostr::ClientMessage::Count {
subscription_id,
filters,
} => (subscription_id, filters),
_ => panic!("sneed :("),
};
let sub = Subscription::new(sub_id, filters);
let num = db.count_events_by_filters(sub).await;
println!(
"Time passed: {}",
(std::time::Instant::now() - t).as_millis()
);
assert_eq!(num, 1);
}
}

43
src/noose/user.rs Normal file
View file

@ -0,0 +1,43 @@
use chrono::Utc;
use regex::Regex;
use serde::{Deserialize, Serialize};
use validator::{Validate, ValidationError};
lazy_static! {
static ref VALID_CHARACTERS: Regex = Regex::new(r"^[a-zA-Z0-9\_]+$").unwrap();
}
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Validate)]
pub struct UserRow {
pub pubkey: String,
pub username: String,
inserted_at: i64,
admin: bool,
}
impl UserRow {
pub fn new(pubkey: String, username: String, admin: bool) -> Self {
Self {
pubkey,
username,
inserted_at: Utc::now().timestamp(),
admin,
}
}
}
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Validate)]
pub struct User {
#[validate(custom = "validate_pubkey")]
pub pubkey: Option<String>,
#[validate(length(min = 1), regex = "VALID_CHARACTERS")]
pub name: Option<String>,
}
pub fn validate_pubkey(value: &str) -> Result<(), ValidationError> {
use nostr::prelude::FromPkStr;
match nostr::Keys::from_pk_str(value) {
Ok(_) => Ok(()),
Err(_) => Err(ValidationError::new("Unable to parse pubkey")),
}
}

12
src/relay/handler.rs Normal file
View file

@ -0,0 +1,12 @@
use crate::relay::ws;
use crate::utils::structs::Context;
use std::net::SocketAddr;
use warp::{Rejection, Reply};
pub async fn ws_handler(
ws: warp::ws::Ws,
context: Context,
client_addr: Option<SocketAddr>,
) -> Result<impl Reply, Rejection> {
Ok(ws.on_upgrade(move |socket| ws::client_connection(socket, context, client_addr)))
}

24
src/relay/mod.rs Normal file
View file

@ -0,0 +1,24 @@
mod handler;
mod routes;
mod ws;
use crate::utils::rejection_handler::handle_rejection;
use crate::utils::structs::Context;
use tokio::runtime;
use warp::Filter;
pub fn start(context: Context) {
let rt = runtime::Runtime::new().unwrap();
rt.block_on(async {
log::info!("Starting Relay on wss://127.0.0.1:8080");
let routes = routes::routes(context).recover(handle_rejection);
warp::serve(routes)
// .tls()
// .cert(CERT)
// .key(KEY)
.run(([127, 0, 0, 1], 8080))
.await;
});
}

31
src/relay/routes.rs Normal file
View file

@ -0,0 +1,31 @@
use super::handler;
use crate::utils::filter::with_context;
use crate::utils::structs::Context;
use warp::{Filter, Rejection, Reply};
pub fn routes(context: Context) -> impl Filter<Extract = impl Reply, Error = Rejection> + Clone {
let cors = warp::cors().allow_any_origin();
static_files().or(index(context)).with(cors)
}
fn index(context: Context) -> impl Filter<Extract = impl Reply, Error = Rejection> + Clone {
let client_addr = warp::addr::remote();
warp::path::end()
.and(warp::ws())
.and(with_context(context))
.and(client_addr)
.and_then(handler::ws_handler)
}
fn static_files() -> impl Filter<Extract = impl Reply, Error = Rejection> + Clone {
let mut foo = std::env::current_exe().unwrap();
foo.pop();
let mut www = foo.clone();
www.pop();
www.push(std::path::Path::new("www/static"));
warp::get().and(warp::fs::dir(www))
}

264
src/relay/ws.rs Normal file
View file

@ -0,0 +1,264 @@
use crate::{
bussy::channels,
utils::structs::{Client, Context, Subscription},
};
use futures_util::StreamExt;
use nostr::{ClientMessage, Event, Filter, JsonUtil, SubscriptionId};
use serde_json::from_str;
use std::net::SocketAddr;
use tokio::sync::mpsc;
use tokio_stream::wrappers::UnboundedReceiverStream;
use warp::ws::{Message, WebSocket};
use futures_util::SinkExt;
pub async fn client_connection(ws: WebSocket, context: Context, client_addr: Option<SocketAddr>) {
let (mut ws_sender, mut ws_receiver) = ws.split();
let (client_sender, client_receiver) = mpsc::unbounded_channel();
let mut client_receiver = UnboundedReceiverStream::new(client_receiver);
// Create and Add to the Context new Client and set its sender
let ip = client_addr.unwrap().ip().to_string();
let mut client = Client::new(ip);
client.client_connection = Some(client_sender);
let mut subscriber = context.pubsub.subscribe(channels::MSG_RELAY).await;
loop {
tokio::select! {
Ok(message) = subscriber.recv() => {
match message.content {
crate::bussy::Command::PipelineResRelayMessageOk(client_id, relay_message) => {
if client.client_id == client_id {
if let Some(sender) = &client.client_connection {
if !sender.is_closed() {
log::info!("[Relay] sending back the status of the processed event: {}", relay_message.as_json());
sender.send(Ok(Message::text(relay_message.as_json()))).unwrap();
}
}
}
},
crate::bussy::Command::PipelineResStreamOutEvent(event) => {
// if client.client_id == client_id {
if let Some(sender) = &client.client_connection {
if let Some(relay_message) = get_relay_message(&client, event) {
log::info!("[Relay] sending processed event to subscribed client: {}", relay_message.as_json());
if !sender.is_closed() {sender.send(Ok(Message::text(relay_message.as_json()))).unwrap()};
}
}
// }
}
crate::bussy::Command::DbResRelayMessage(client_id, events) => {
if client.client_id == client_id {
if let Some(sender) = &client.client_connection {
if !sender.is_closed() {
for event in events {
sender.send(Ok(Message::text(event))).unwrap();
}
}
}
}
}
crate::bussy::Command::DbResOkWithStatus(status) => {
if let Some(sender) = &client.client_connection {
sender.send(Ok(Message::text(status))).unwrap();
}
},
_ => ()
}
},
Some(message) = client_receiver.next() => {
match message {
Ok(message) => {
// ws_sender
// .send(message)
// .unwrap_or_else(|e| {
// log::error!("websocket send error: {}", e);
// })
// .await;
match ws_sender.send(message).await {
Ok(_) => (),
Err(e) => {
log::error!("websocket send error: {}", e);
break;
}
}
}
Err(e) => {
log::error!("websocket send error: {}", e);
break;
}
}
},
Some(result) = ws_receiver.next() => {
let msg = match result {
Ok(msg) => msg,
Err(e) => {
log::error!(
"error receiving ws message for id: {}: {}",
client.client_id.clone(),
e
);
break;
}
};
socket_on_message(&context, &mut client, msg).await;
}
}
}
// Handle proper disconnects
socket_on_close(&client).await;
}
/// Checking if client with id needs the event
fn get_relay_message(client: &Client, event: Box<Event>) -> Option<nostr::RelayMessage> {
let mut id = &"".to_string();
log::info!(
"Checking if client with id {} needs the event",
client.client_id
);
if client.subscriptions.iter().any(|(sub_id, sub)| {
if sub.interested_in_event(&event) {
id = sub_id;
return true;
}
false
}) {
return Some(nostr::RelayMessage::Event {
subscription_id: nostr::SubscriptionId::new(id),
event,
});
}
None
}
async fn socket_on_close(client: &Client) {
// clients.write().await.remove(id);
log::info!("{} disconnected", client.client_id);
}
async fn socket_on_message(context: &Context, client: &mut Client, msg: Message) {
let message = match msg.to_str() {
Ok(raw_message) => raw_message,
Err(_) => return,
};
if message == "ping" || message == "ping\n" {
return;
}
let client_message: ClientMessage = match from_str(message) {
Ok(parsed_message) => parsed_message,
Err(e) => {
log::error!("error while parsing client message request: {}", e);
let response = nostr::RelayMessage::new_notice("Invalid message");
let message = Message::text(response.as_json());
send(client, message);
return;
}
};
log::info!(
"[client {} - {}] message: {}",
client.ip(),
client.client_id,
client_message.as_json()
);
handle_msg(context, client, client_message).await;
}
fn send(client: &Client, message: Message) {
if let Some(sender) = &client.client_connection {
if !sender.is_closed() {
sender.send(Ok(message)).unwrap();
}
}
}
async fn handle_msg(context: &Context, client: &mut Client, client_message: ClientMessage) {
match client_message {
ClientMessage::Event(event) => handle_event(context, client, event).await,
ClientMessage::Req {
subscription_id,
filters,
} => handle_req(context, client, subscription_id, filters).await,
ClientMessage::Count {
subscription_id,
filters,
} => handle_count(client, subscription_id, filters).await,
ClientMessage::Close(subscription_id) => handle_close(client, subscription_id).await,
ClientMessage::Auth(event) => handle_auth(client, event).await,
_ => (),
}
}
async fn handle_event(context: &Context, client: &Client, event: Box<Event>) {
log::debug!("handle_event is processing new event");
context
.pubsub
.publish(
channels::MSG_PIPELINE,
crate::bussy::Message {
source: channels::MSG_RELAY,
content: crate::bussy::Command::PipelineReqEvent(client.client_id, event),
},
)
.await;
}
async fn handle_req(
context: &Context,
client: &mut Client,
subscription_id: SubscriptionId,
filters: Vec<Filter>,
) {
let subscription = Subscription::new(subscription_id.clone(), filters);
let needs_historical_events = subscription.needs_historical_events();
client.subscribe(subscription.clone()).unwrap();
if needs_historical_events {
context
.pubsub
.publish(
channels::MSG_NOOSE,
crate::bussy::Message {
source: channels::MSG_RELAY,
content: crate::bussy::Command::DbReqFindEvent(client.client_id, subscription),
},
)
.await
}
}
async fn handle_count(client: &Client, subscription_id: SubscriptionId, filters: Vec<Filter>) {
// context.pubsub.send(new nostr event) then handle possible errors
let subscription = Subscription::new(subscription_id, filters);
let message = Message::text("COUNT not implemented");
send(client, message);
}
async fn handle_close(client: &mut Client, subscription_id: SubscriptionId) {
// context.pubsub.send(new nostr event) then handle possible errors
client.unsubscribe(subscription_id);
// let message = Message::text("CLOSE not implemented");
// send(client, message);
}
async fn handle_auth(client: &Client, event: Box<Event>) {
let message = Message::text("AUTH not implemented");
send(client, message);
}

62
src/usernames/accounts.rs Normal file
View file

@ -0,0 +1,62 @@
use crate::noose::user::{User, UserRow};
use crate::bussy::{channels, Command, Message};
use crate::usernames::util::InvalidParameter;
use crate::utils::error::Error;
use warp::{Rejection, Reply};
use super::Context;
pub async fn create_account(user: User, context: Context) -> Result<impl Reply, Rejection> {
let mut subscriber = context.pubsub.subscribe(channels::MSG_NIP05).await;
let pubkey = match user.pubkey {
Some(pk) => {
use nostr::prelude::FromPkStr;
let keys = nostr::Keys::from_pk_str(&pk).unwrap();
keys.public_key().to_string()
}
None => {
return Err(warp::reject::custom(Error::bad_request(
"Pubkey is required",
)))
}
};
let name = match user.name {
Some(n) => n,
None => return Err(warp::reject::custom(Error::bad_request("Name is required"))),
};
let user_row = UserRow::new(pubkey, name, false);
let command = Command::DbReqInsertUser(user_row);
context
.pubsub
.publish(
channels::MSG_NOOSE,
Message {
source: channels::MSG_NIP05,
content: command,
},
)
.await;
if let Ok(result) = subscriber.recv().await {
match result.content {
Command::DbResOk => Ok(warp::reply::with_status(
"ACCOUNT CREATED",
warp::http::StatusCode::CREATED,
)),
Command::ServiceError(e) => Err(warp::reject::custom(e)),
// _ => Err(warp::reject::custom(InvalidParameter)),
_ => Ok(warp::reply::with_status(
"something else",
warp::http::StatusCode::INTERNAL_SERVER_ERROR,
)),
}
} else {
Err(warp::reject::custom(InvalidParameter))
}
}

65
src/usernames/dto/mod.rs Normal file
View file

@ -0,0 +1,65 @@
use std::collections::HashMap;
use crate::usernames::validators::validate_pubkey;
use crate::utils::error::Error;
use nostr::prelude::*;
use nostr::{key::XOnlyPublicKey, Keys};
use regex::Regex;
use serde::{Deserialize, Serialize};
use validator::Validate;
lazy_static! {
static ref VALID_CHARACTERS: Regex = Regex::new(r"^[a-zA-Z0-9\_]+$").unwrap();
}
#[derive(Serialize, Deserialize, Debug, Validate)]
pub struct UserBody {
#[validate(length(min = 1), regex = "VALID_CHARACTERS")]
pub name: String,
#[validate(custom(function = "validate_pubkey"))]
pub pubkey: String,
}
impl UserBody {
pub fn get_pubkey(&self) -> XOnlyPublicKey {
let keys = Keys::from_pk_str(&self.pubkey).unwrap();
keys.public_key()
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Nip05 {
names: HashMap<String, String>,
}
#[derive(Serialize, Deserialize, Debug, Validate, Clone)]
pub struct UserQuery {
#[validate(length(min = 1))]
pub user: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, Validate)]
pub struct Account {
#[validate(custom(function = "validate_pubkey"))]
pub pubkey: String,
#[validate(length(min = 1), regex = "VALID_CHARACTERS")]
pub name: String,
#[validate(length(min = 1))]
pub password: String,
}
impl Account {
pub fn get_pubkey(&self) -> Result<XOnlyPublicKey, Error> {
match nostr::Keys::from_pk_str(&self.pubkey) {
Ok(pk) => Ok(pk.public_key()),
Err(e) => Err(Error::invalid_param("pubkey", self.pubkey.clone())),
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone, Validate)]
pub struct AccountPubkey {
#[validate(custom(function = "validate_pubkey"))]
pub pubkey: String,
}

27
src/usernames/filter.rs Normal file
View file

@ -0,0 +1,27 @@
use crate::utils::error::Error;
use validator::Validate;
use warp::{Filter, Rejection};
pub fn with_client_ip() {}
pub fn with_user_body() {}
pub fn validate_body_filter<T: serde::de::DeserializeOwned + Send + Validate + 'static>(
) -> impl Filter<Extract = (T,), Error = Rejection> + Copy {
warp::body::json::<T>().and_then(|query: T| async move {
match query.validate() {
Ok(_) => Ok(query),
Err(e) => Err(warp::reject::custom(Error::validation_error(e))),
}
})
}
pub fn validate_query_filter<T: serde::de::DeserializeOwned + Send + Validate + 'static>(
) -> impl Filter<Extract = (T,), Error = Rejection> + Copy {
warp::query::query::<T>().and_then(|query: T| async move {
match query.validate() {
Ok(_) => Ok(query),
Err(e) => Err(warp::reject::custom(Error::validation_error(e))),
}
})
}

89
src/usernames/handler.rs Normal file
View file

@ -0,0 +1,89 @@
use crate::bussy::{channels, Command, Message};
use crate::noose::user::User;
use crate::usernames::dto::{Account, UserQuery};
use crate::utils::error::Error;
use crate::utils::structs::Context;
use serde_json::json;
use warp::{Rejection, Reply};
pub async fn get_account(
// account: Result<AccountPubkey, Error>,
account: Account,
context: Context,
) -> Result<impl Reply, Rejection> {
let mut subscriber = context.pubsub.subscribe(channels::MSG_NIP05).await;
let command = Command::DbReqGetAccount(account.pubkey);
context
.pubsub
.publish(
channels::MSG_NOOSE,
Message {
source: channels::MSG_NIP05,
content: command,
},
)
.await;
if let Ok(result) = subscriber.recv().await {
match result.content {
Command::DbResAccount => Ok(warp::reply::with_status(
"ACCOUNT CREATED",
warp::http::StatusCode::CREATED,
)),
Command::ServiceError(e) => Err(warp::reject::custom(e)),
_ => Err(warp::reject::custom(Error::internal_with_message(
"Unhandled message type",
))),
}
} else {
Err(warp::reject::custom(Error::internal_with_message(
"Unhandeled message type",
)))
}
}
pub async fn get_user(user_query: UserQuery, context: Context) -> Result<impl Reply, Rejection> {
let name = user_query.user;
let mut subscriber = context.pubsub.subscribe(channels::MSG_NIP05).await;
let user = User {
name: Some(name),
pubkey: None,
};
let command = Command::DbReqGetUser(user);
context
.pubsub
.publish(
channels::MSG_NOOSE,
Message {
source: channels::MSG_NIP05,
content: command,
},
)
.await;
if let Ok(message) = subscriber.recv().await {
let mut response = json!({"names": {}, "relays": {}});
match message.content {
Command::DbResUser(user) => {
response = json!({
"names": {
user.username: user.pubkey
},
"relays": {}
});
Ok(warp::reply::json(&response))
}
Command::ServiceError(e) => Ok(warp::reply::json(&response)),
_ => Err(warp::reject::custom(Error::internal_with_message(
"Unhandeled message type",
))),
}
} else {
Err(warp::reject::custom(Error::internal_with_message(
"Unhandeled message type",
)))
}
}

22
src/usernames/mod.rs Normal file
View file

@ -0,0 +1,22 @@
mod accounts;
pub mod dto;
mod filter;
mod handler;
mod routes;
mod util;
mod validators;
use super::utils::structs::Context;
use crate::utils::rejection_handler::handle_rejection;
use tokio::runtime;
use warp::Filter;
pub fn start(context: Context) {
let rt = runtime::Runtime::new().unwrap();
rt.block_on(async {
log::info!("Starting NIP-05 on http://127.0.0.1:8085");
let routes = routes::routes(context).recover(handle_rejection);
warp::serve(routes).run(([127, 0, 0, 1], 8085)).await;
})
}

57
src/usernames/routes.rs Normal file
View file

@ -0,0 +1,57 @@
use crate::noose::user::User;
use super::accounts::create_account;
use super::dto::{Account, UserQuery};
use super::filter::{validate_body_filter, validate_query_filter};
use super::handler::{get_account, get_user};
use crate::utils::filter::with_context;
use crate::utils::structs::Context;
use warp::{Filter, Rejection, Reply};
pub fn routes(context: Context) -> impl Filter<Extract = impl Reply, Error = Rejection> + Clone {
let index = warp::path::end().map(|| warp::reply::html("<h1>SNEED!</h1>"));
index
.or(nip05_get(context.clone()))
.or(account_create(context.clone()))
}
pub fn nip05_get(context: Context) -> impl Filter<Extract = impl Reply, Error = Rejection> + Clone {
warp::get()
.and(warp::path(".well-known"))
.and(warp::path("nostr.json"))
.and(validate_query_filter::<UserQuery>())
.and(with_context(context))
.and_then(get_user)
}
pub fn account_create(
context: Context,
) -> impl Filter<Extract = impl Reply, Error = Rejection> + Clone {
warp::path("account")
.and(warp::post())
.and(validate_body_filter::<User>())
.and(with_context(context))
.and_then(create_account)
}
pub fn account_get(
context: Context,
) -> impl Filter<Extract = impl Reply, Error = Rejection> + Clone {
warp::path("account")
.and(warp::get())
.and(validate_body_filter::<Account>())
.and(with_context(context))
.and_then(get_account)
}
// pub fn account_update(
// context: Context,
// ) -> impl Filter<Extract = impl Reply, Error = Rejection> + Clone {
// warp::path("account")
// .and(warp::put())
// .and(warp::body::json::<Account>())
// .then(validate_body)
// .and(with_context(context))
// .and_then(update_account)
// }

3
src/usernames/util.rs Normal file
View file

@ -0,0 +1,3 @@
#[derive(Debug)]
pub struct InvalidParameter;
impl warp::reject::Reject for InvalidParameter {}

View file

@ -0,0 +1,30 @@
use super::dto::AccountPubkey;
use crate::utils::error::Error;
use nostr::prelude::FromPkStr;
use validator::{Validate, ValidationError};
pub async fn validate_account_pubkey_query(
account_pubkey: AccountPubkey,
) -> Result<AccountPubkey, Error>
where
AccountPubkey: Validate,
{
match account_pubkey.validate() {
Ok(_) => Ok(account_pubkey),
Err(e) => {
log::error!("AccountPubkey validation errors: {}", e);
Err(Error::validation_error(e))
}
}
}
pub fn validate_pubkey(value: &str) -> Result<(), ValidationError> {
if value.is_empty() {
return Err(ValidationError::new("Value is empty"));
}
match nostr::Keys::from_pk_str(value) {
Ok(_) => Ok(()),
Err(_) => Err(ValidationError::new("Unable to parse pk_str")),
}
}

52
src/utils/crypto.rs Normal file
View file

@ -0,0 +1,52 @@
use argon2::{
password_hash::{rand_core::OsRng, PasswordHash, PasswordHasher, PasswordVerifier, SaltString},
Argon2,
};
pub fn password_hash(password: String) -> String {
let password = password.as_bytes();
let salt = SaltString::generate(&mut OsRng);
// Argon2 with default params (Argon2id v19)
let argon2 = Argon2::default();
// Hash password to PHC string ($argon2id$v=19$...)
let password_hash = argon2.hash_password(password, &salt).unwrap().to_string();
password_hash
}
pub fn password_verify(password: String, password_hash: String) -> bool {
// Verify password against PHC string.
//
// NOTE: hash params from `parsed_hash` are used instead of what is configured in the
// `Argon2` instance.
let password = password.as_bytes();
let parsed_hash = PasswordHash::new(&password_hash).unwrap();
Argon2::default()
.verify_password(password, &parsed_hash)
.is_ok()
}
#[cfg(test)]
mod tests {
use super::{password_hash, password_verify};
#[test]
fn hash() {
let pass = "mysecretpassword".to_string();
let hash = password_hash(pass);
assert_ne!(hash, "".to_string());
}
#[test]
fn verify() {
let pass = "mysecretpassword".to_string();
let hash = password_hash(pass.clone());
let verification = password_verify(pass, hash);
assert!(verification);
}
}

132
src/utils/error.rs Normal file
View file

@ -0,0 +1,132 @@
use serde::{Deserialize, Serialize};
use serde_json;
use std::{
convert::From,
fmt::{self, Display},
};
use validator::ValidationErrors;
use warp::{http::StatusCode, reject::Reject};
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)]
pub struct Error {
pub code: u16,
pub message: String,
/// Sneedstr version.
#[serde(skip_serializing_if = "Option::is_none")]
pub sneedstr_version: Option<u16>,
}
impl Error {
pub fn new(code: StatusCode, message: String) -> Self {
Self {
code: code.as_u16(),
message,
sneedstr_version: None,
}
}
pub fn from_anyhow_error(code: StatusCode, err: anyhow::Error) -> Self {
Self::new(code, err.to_string())
}
pub fn bad_request<S: Display>(msg: S) -> Self {
Self::new(StatusCode::BAD_REQUEST, msg.to_string())
}
pub fn internal_with_message<S: Display>(msg: S) -> Self {
Self::new(StatusCode::INTERNAL_SERVER_ERROR, msg.to_string())
}
pub fn validation_error(msg: ValidationErrors) -> Self {
let message =
serde_json::to_string(&msg.field_errors()).unwrap_or("Validation Error".to_string());
Self::new(StatusCode::BAD_REQUEST, message)
}
pub fn not_found<S: Display>(resource: &str, identifier: S, service_version: u16) -> Self {
Self::new(
StatusCode::NOT_FOUND,
format!("{} not found by {}", resource, identifier),
)
.sneedstr_version(service_version)
}
pub fn invalid_param<S: Display>(name: &str, value: S) -> Self {
Self::bad_request(format!("invalid parameter {}: {}", name, value))
}
pub fn invalid_request_body<S: Display>(msg: S) -> Self {
Self::bad_request(format!("invalid request body: {}", msg))
}
pub fn internal(err: anyhow::Error) -> Self {
Self::from_anyhow_error(StatusCode::INTERNAL_SERVER_ERROR, err)
}
pub fn status_code(&self) -> StatusCode {
StatusCode::from_u16(self.code).unwrap_or(StatusCode::INTERNAL_SERVER_ERROR)
}
pub fn sneedstr_version(mut self, service_version: u16) -> Self {
self.sneedstr_version = Some(service_version);
self
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}: {}", self.status_code(), &self.message)?;
if let Some(val) = &self.sneedstr_version {
write!(f, "\ndiem ledger version: {}", val)?;
}
Ok(())
}
}
impl Reject for Error {}
impl From<anyhow::Error> for Error {
fn from(e: anyhow::Error) -> Self {
Self::internal(e)
}
}
impl From<serde_json::error::Error> for Error {
fn from(err: serde_json::error::Error) -> Self {
Self::internal(err.into())
}
}
#[cfg(test)]
mod tests {
use super::Error;
use warp::http::StatusCode;
#[test]
fn test_to_string() {
let err = Error::new(StatusCode::BAD_REQUEST, "invalid address".to_owned());
assert_eq!(err.to_string(), "400 Bad Request: invalid address")
}
#[test]
fn test_from_anyhow_error_as_internal_error() {
let err = Error::from(anyhow::format_err!("hello"));
assert_eq!(err.to_string(), "500 Internal Server Error: hello")
}
#[test]
fn test_to_string_with_sneedstr_version() {
let err =
Error::new(StatusCode::BAD_REQUEST, "invalid address".to_owned()).sneedstr_version(123);
assert_eq!(
err.to_string(),
"400 Bad Request: invalid address\ndiem ledger version: 123"
)
}
#[test]
fn test_internal_error() {
let err = Error::internal(anyhow::format_err!("hello"));
assert_eq!(err.to_string(), "500 Internal Server Error: hello")
}
}

8
src/utils/filter.rs Normal file
View file

@ -0,0 +1,8 @@
use super::structs::Context;
use warp::Filter;
pub fn with_context(
context: Context,
) -> impl Filter<Extract = (Context,), Error = std::convert::Infallible> + Clone {
warp::any().map(move || context.clone())
}

7
src/utils/mod.rs Normal file
View file

@ -0,0 +1,7 @@
pub mod crypto;
pub mod error;
pub mod filter;
// mod nostr_filter_helpers;
pub mod rejection_handler;
pub mod response;
pub mod structs;

View file

@ -0,0 +1,156 @@
use nostr::{Event, Filter, Kind, Tag};
fn ids_match(filter: &Filter, event: &Event) -> bool {
if filter.ids.is_empty() {
println!("[FILTER][IDS] skipped");
return true;
}
println!(
"[FILTER][IDS] matched: {:?}",
filter.ids.iter().any(|id| id == &event.id.to_string())
);
filter.ids.iter().any(|id| id == &event.id.to_string())
}
fn kind_match(filter: &Filter, kind: Kind) -> bool {
if filter.kinds.is_empty() {
println!("[FILTER][KINDS] skipped");
return true;
}
println!(
"[FILTER][KIND] matched: {:?}",
filter.kinds.iter().any(|k| k == &kind)
);
filter.kinds.iter().any(|k| k == &kind)
}
fn pubkeys_match(filter: &Filter, event: &Event) -> bool {
if filter.pubkeys.is_empty() {
println!("[FILTER][PUBKEYS] skipped");
return true;
}
println!(
"[FILTER][PUBKEYS] matched: {:?}",
filter.pubkeys.iter().any(|pk| pk == &event.pubkey)
);
filter.pubkeys.iter().any(|pk| pk == &event.pubkey)
}
fn authors_match(filter: &Filter, event: &Event) -> bool {
dbg!(filter);
if filter.authors.is_empty() {
println!("[FILTER][AUTHORS] skipped");
return true;
}
println!(
"[FILTER][AUTHORS] matched: {:?}",
filter
.authors
.iter()
.any(|author| author == &event.pubkey.to_string())
);
filter
.authors
.iter()
.any(|author| author == &event.pubkey.to_string())
}
fn delegated_authors_match(filter: &Filter, event: &Event) -> bool {
// Optional implementation
// let delegated_authors_match = filter.authors.iter().any(|author| {
// event.tags.iter().any(|tag| match tag {
// Tag::Delegation {
// delegator_pk,
// conditions,
// sig,
// } => filter
// .authors
// .iter()
// .any(|author| author == &delegator_pk.to_string()),
// _ => false,
// })
// });
println!(
"[FILTER][DELEGATED_AUTHORS] matched: {:?}",
event.tags.iter().any(|tag| match tag {
Tag::Delegation {
delegator_pk,
conditions,
sig,
} => filter
.authors
.iter()
.any(|author| author == &delegator_pk.to_string()),
_ => false,
})
);
event.tags.iter().any(|tag| match tag {
Tag::Delegation {
delegator_pk,
conditions,
sig,
} => filter
.authors
.iter()
.any(|author| author == &delegator_pk.to_string()),
_ => true,
})
}
fn tag_match(filter: &Filter, event: &Event) -> bool {
println!(
"[FILTER][TAG] matched: {:?}",
filter.generic_tags.iter().any(|(key, value)| {
event.tags.iter().any(|tag| {
let kv = tag.as_vec();
key.to_string() == kv[0] && value.iter().any(|vv| vv == &kv[1])
})
})
);
filter.generic_tags.iter().any(|(key, value)| {
event.tags.iter().any(|tag| {
let kv = tag.as_vec();
key.to_string() == kv[0] && value.iter().any(|vv| vv == &kv[1])
})
});
true // TODO: Fix delegated authors check
}
pub fn interested_in_event(filter: &Filter, event: &Event) -> bool {
ids_match(filter, event)
&& filter.since.map_or(
{
println!("[FILTER][SINCE][default] matched: {:?}", true);
true
},
|t| {
println!("[FILTER][SINCE] matched: {:?}", event.created_at >= t);
event.created_at >= t
},
)
&& filter.until.map_or(
{
println!("[FILTER][UNTIL][default] matched: {:?}", true);
true
},
|t| {
println!("[FILTER][UNTIL] matched: {:?}", event.created_at <= t);
event.created_at <= t
},
)
&& kind_match(filter, event.kind)
&& (pubkeys_match(filter, event)
|| authors_match(filter, event)
|| delegated_authors_match(filter, event))
&& tag_match(filter, event)
}

View file

@ -0,0 +1,45 @@
use super::error::Error;
use std::convert::Infallible;
use warp::{
body::BodyDeserializeError,
cors::CorsForbidden,
http::StatusCode,
reject::{LengthRequired, MethodNotAllowed, PayloadTooLarge, UnsupportedMediaType},
reply, Rejection, Reply,
};
pub async fn handle_rejection(err: Rejection) -> Result<impl Reply, Infallible> {
let code;
let body;
if err.is_not_found() {
code = StatusCode::NOT_FOUND;
body = reply::json(&Error::new(code, "Not Found".to_owned()));
} else if let Some(error) = err.find::<Error>() {
code = error.status_code();
body = reply::json(error);
} else if let Some(cause) = err.find::<CorsForbidden>() {
code = StatusCode::FORBIDDEN;
body = reply::json(&Error::new(code, cause.to_string()));
} else if let Some(cause) = err.find::<BodyDeserializeError>() {
code = StatusCode::BAD_REQUEST;
body = reply::json(&Error::new(code, cause.to_string()));
} else if let Some(cause) = err.find::<LengthRequired>() {
code = StatusCode::LENGTH_REQUIRED;
body = reply::json(&Error::new(code, cause.to_string()));
} else if let Some(cause) = err.find::<PayloadTooLarge>() {
code = StatusCode::PAYLOAD_TOO_LARGE;
body = reply::json(&Error::new(code, cause.to_string()));
} else if let Some(cause) = err.find::<UnsupportedMediaType>() {
code = StatusCode::UNSUPPORTED_MEDIA_TYPE;
body = reply::json(&Error::new(code, cause.to_string()));
} else if let Some(cause) = err.find::<MethodNotAllowed>() {
code = StatusCode::METHOD_NOT_ALLOWED;
body = reply::json(&Error::new(code, cause.to_string()));
} else {
code = StatusCode::INTERNAL_SERVER_ERROR;
body = reply::json(&Error::new(code, format!("unexpected error: {:?}", err)));
}
Ok(reply::with_status(body, code))
}

28
src/utils/response.rs Normal file
View file

@ -0,0 +1,28 @@
use super::error::Error;
use anyhow::Result;
use serde::Serialize;
use warp::http::header::{HeaderValue, CONTENT_TYPE};
pub struct Response {
pub body: Vec<u8>,
}
impl Response {
pub fn new<T: Serialize>(body: &T) -> Result<Self, Error> {
Ok(Self {
body: serde_json::to_vec(body)?,
})
}
}
impl warp::Reply for Response {
fn into_response(self) -> warp::reply::Response {
let mut res = warp::reply::Response::new(self.body.into());
let headers = res.headers_mut();
headers.insert(CONTENT_TYPE, HeaderValue::from_static("application/json"));
res
}
}

135
src/utils/structs.rs Normal file
View file

@ -0,0 +1,135 @@
use super::error::Error;
// use super::nostr_filter_helpers;
use crate::PubSub;
use nostr::{Event, Filter, SubscriptionId};
use std::collections::HashMap;
use std::sync::Arc;
use tokio::sync::mpsc;
use uuid::Uuid;
use warp::ws::Message;
const MAX_SUBSCRIPTIONS: usize = 256;
const MAX_SUBSCRIPTION_ID_LEN: usize = 256;
#[derive(serde::Serialize, PartialEq, Eq, Debug, Clone)]
pub struct Subscription {
pub id: SubscriptionId,
pub filters: Vec<Filter>,
}
impl Subscription {
pub fn new(id: SubscriptionId, filters: Vec<Filter>) -> Self {
Self { id, filters }
}
pub fn get_id(&self) -> String {
self.id.to_string()
}
pub fn needs_historical_events(&self) -> bool {
self.filters.iter().any(|f| f.limit != Some(0))
}
pub fn interested_in_event(&self, event: &Event) -> bool {
log::info!("[Subscription] Checking if client is interested in the new event");
for filter in &self.filters {
if filter.match_event(event) {
log::info!("[Subscription] found filter that matches the event");
return true;
}
}
false
}
}
#[derive(Debug, Clone)]
pub struct Client {
client_ip_addr: String,
pub client_id: Uuid,
pub client_connection: Option<mpsc::UnboundedSender<Result<Message, Error>>>,
pub subscriptions: HashMap<String, Subscription>,
max_subs: usize,
}
impl Client {
pub fn new(client_ip_addr: String) -> Self {
Self {
client_ip_addr,
client_id: Uuid::new_v4(),
client_connection: None,
subscriptions: HashMap::new(),
max_subs: MAX_SUBSCRIPTIONS,
}
}
pub fn ip(&self) -> &str {
&self.client_ip_addr
}
pub fn subscribe(&mut self, subscription: Subscription) -> Result<(), Error> {
let k = subscription.get_id();
let sub_id_len = k.len();
if sub_id_len > MAX_SUBSCRIPTION_ID_LEN {
log::debug!(
"Ignoring sub request with excessive length: ({})",
sub_id_len
);
return Err(Error::bad_request("sub request is too long"));
}
if self.subscriptions.contains_key(&k) {
self.subscriptions.remove(&k);
self.subscriptions.insert(k, subscription.clone());
log::debug!(
"replaced existing subscription (cid: {}, sub: {:?})",
self.client_id,
subscription.get_id()
);
return Ok(());
}
if self.subscriptions.len() >= self.max_subs {
return Err(Error::bad_request("max subs exceeded"));
}
// Insert subscription
self.subscriptions.insert(k, subscription);
log::debug!(
"registered new subscription, currently have {} active subs (cid: {})",
self.subscriptions.len(),
self.client_id
);
Ok(())
}
pub fn unsubscribe(&mut self, subscription_id: SubscriptionId) {
self.subscriptions.remove(&subscription_id.to_string());
log::debug!(
"removed subscription, currently have {} active subs (cid: {})",
self.subscriptions.len(),
self.client_id
);
}
}
#[derive(Debug, Clone)]
pub struct Context {
pub pubsub: Arc<PubSub>,
}
impl Default for Context {
fn default() -> Self {
panic!("Use Context::new() to initialize Contexr");
}
}
impl Context {
pub fn new() -> Self {
let pubsub = Arc::new(PubSub::new());
Self { pubsub }
}
}