Add pubkey ban support

Temporary use nostr-db as a db backend due to breaking API change
    in the library
This commit is contained in:
Tony Klink 2024-01-23 10:30:47 -06:00
parent a178cdee05
commit 0bbce25d39
Signed by: klink
GPG key ID: 85175567C4D19231
12 changed files with 561 additions and 917 deletions

715
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -17,15 +17,16 @@ futures-util = "0.3.28"
rustls = "0.21" rustls = "0.21"
anyhow = "1.0" anyhow = "1.0"
sled = "0.34.7" sled = "0.34.7"
sqlx = { version = "0.7", features = [ "runtime-tokio", "tls-rustls", "sqlite", "migrate", "macros"] } # sqlx = { version = "0.7", features = [ "runtime-tokio", "tls-rustls", "sqlite", "migrate", "macros"] }
flexi_logger = { version = "0.27.3", features = [ "async", "compress" ] } flexi_logger = { version = "0.27.3", features = [ "async", "compress" ] }
lazy_static = "1.4.0" lazy_static = "1.4.0"
log = "0.4" log = "0.4"
nostr = "0.26.0" nostr = "0.27.0"
nostr-sqlite = "0.27.0"
regex = "1.9.5" regex = "1.9.5"
sailfish = "0.7.0" sailfish = "0.7.0"
sea-query = { version = "0.30.4", features = ["backend-sqlite", "thread-safe"] } sea-query = { version = "0.30.4", features = ["backend-sqlite", "thread-safe"] }
sea-query-binder = { version = "0.5.0", features = ["sqlx-sqlite"] } # sea-query-binder = { version = "0.5.0", features = ["sqlx-sqlite"] }
serde = "1.0" serde = "1.0"
serde_json = "1.0" serde_json = "1.0"
thiserror = "1.0.48" thiserror = "1.0.48"

View file

@ -1,5 +1,6 @@
use crate::{ use crate::{
noose::user::{User, UserRow}, noose::user::{User, UserRow},
noose::sled::BanInfo,
utils::{error::Error, structs::Subscription}, utils::{error::Error, structs::Subscription},
}; };
use nostr::secp256k1::XOnlyPublicKey; use nostr::secp256k1::XOnlyPublicKey;
@ -11,6 +12,7 @@ pub mod channels {
pub static MSG_NIP05: &str = "MSG_NIP05"; pub static MSG_NIP05: &str = "MSG_NIP05";
pub static MSG_RELAY: &str = "MSG_RELAY"; pub static MSG_RELAY: &str = "MSG_RELAY";
pub static MSG_PIPELINE: &str = "MSG_PIPELINE"; pub static MSG_PIPELINE: &str = "MSG_PIPELINE";
pub static MSG_SLED: &str = "MSG_SLED";
} }
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
@ -19,6 +21,7 @@ pub enum Command {
DbReqWriteEvent(/* client_id */ uuid::Uuid, Box<nostr::Event>), DbReqWriteEvent(/* client_id */ uuid::Uuid, Box<nostr::Event>),
DbReqFindEvent(/* client_id*/ uuid::Uuid, Subscription), DbReqFindEvent(/* client_id*/ uuid::Uuid, Subscription),
DbReqDeleteEvents(/* client_id*/ uuid::Uuid, Box<nostr::Event>), DbReqDeleteEvents(/* client_id*/ uuid::Uuid, Box<nostr::Event>),
DbReqEventCounts(/* client_id*/ uuid::Uuid, Subscription),
// Old messages // Old messages
DbReqInsertUser(UserRow), DbReqInsertUser(UserRow),
@ -27,15 +30,16 @@ pub enum Command {
DbReqGetAccount(String), DbReqGetAccount(String),
DbReqClear, DbReqClear,
// DbResponse // DbResponse
DbResRelayMessage( DbResRelayMessages(
/* client_id*/ uuid::Uuid, /* client_id*/ uuid::Uuid,
/* Vec<RelayMessage::Event> */ Vec<String>, /* Vec<RelayMessage::Event> */ Vec<nostr::RelayMessage>,
), ),
DbResInfo, DbResInfo,
DbResOk, DbResOk,
DbResOkWithStatus(/* client_id */ uuid::Uuid, nostr::RelayMessage), DbResOkWithStatus(/* client_id */ uuid::Uuid, nostr::RelayMessage),
DbResAccount, // TODO: Add Account DTO as a param DbResAccount, // TODO: Add Account DTO as a param
DbResUser(UserRow), DbResUser(UserRow),
DbResEventCounts(/* client_id */ uuid::Uuid, nostr::RelayMessage),
// Event Pipeline // Event Pipeline
PipelineReqEvent(/* client_id */ uuid::Uuid, Box<nostr::Event>), PipelineReqEvent(/* client_id */ uuid::Uuid, Box<nostr::Event>),
PipelineResRelayMessageOk(/* client_id */ uuid::Uuid, nostr::RelayMessage), PipelineResRelayMessageOk(/* client_id */ uuid::Uuid, nostr::RelayMessage),
@ -43,6 +47,14 @@ pub enum Command {
PipelineResOk, PipelineResOk,
// Subscription Errors // Subscription Errors
ClientSubscriptionError(/* error message */ String), ClientSubscriptionError(/* error message */ String),
// Sled
SledReqBanUser(Box<BanInfo>),
SledReqBanInfo(/* pubkey */ String),
SledReqUnbanUser(/* pubkey */ String),
SledReqGetBans,
SledResBan(Option<BanInfo>),
SledResBans(Vec<BanInfo>),
SledResSuccess(bool),
// Other // Other
Str(String), Str(String),
ServiceError(Error), ServiceError(Error),

View file

@ -9,11 +9,9 @@ use std::sync::Arc;
pub trait Noose: Send + Sync { pub trait Noose: Send + Sync {
async fn start(&mut self, pubsub: Arc<PubSub>) -> Result<(), Error>; async fn start(&mut self, pubsub: Arc<PubSub>) -> Result<(), Error>;
async fn migration_up(&self);
async fn write_event(&self, event: Box<Event>) -> Result<RelayMessage, Error>; async fn write_event(&self, event: Box<Event>) -> Result<RelayMessage, Error>;
async fn delete_events(&self, event_ids: Box<Event>) -> Result<RelayMessage, Error>; async fn find_event(&self, subscription: Subscription) -> Result<Vec<RelayMessage>, Error>;
async fn find_event(&self, subscription: Subscription) -> Result<Vec<String>, Error>; async fn counts(&self, subscription: Subscription) -> Result<RelayMessage, Error>;
} }

View file

@ -1,13 +1,12 @@
use crate::utils::structs::Context; use crate::utils::structs::Context;
use tokio::runtime;
use db::Noose; use db::Noose;
use pipeline::Pipeline; use pipeline::Pipeline;
use tokio::runtime;
pub mod db; pub mod db;
mod nostr_db;
pub mod pipeline; pub mod pipeline;
// mod sled; pub mod sled;
mod sqlite; // mod sqlite;
pub mod user; pub mod user;
pub fn start(context: Context) { pub fn start(context: Context) {
@ -16,19 +15,32 @@ pub fn start(context: Context) {
rt.block_on(async move { rt.block_on(async move {
let pipeline_pubsub = context.pubsub.clone(); let pipeline_pubsub = context.pubsub.clone();
let pipeline_config = context.config.clone(); let pipeline_config = context.config.clone();
let db_config = context.config.clone();
let db_pubsub = context.pubsub.clone(); let db_pubsub = context.pubsub.clone();
let sled_pubsub = context.pubsub.clone();
let pipeline_handle = tokio::task::spawn(async move { let pipeline_handle = tokio::task::spawn(async move {
let mut pipeline = Pipeline::new(pipeline_pubsub, pipeline_config); let mut pipeline = Pipeline::new(pipeline_pubsub, pipeline_config);
pipeline.start().await.unwrap(); pipeline.start().await.unwrap();
}); });
let sqlite_writer_handle = tokio::task::spawn(async move { let sled_handle = tokio::task::spawn(async move {
let mut db_writer = sqlite::SqliteDb::new().await; let mut sled_writer = sled::SledDb::new();
sled_writer.start(sled_pubsub).await.unwrap();
});
let nostr_db_writer_handle = tokio::task::spawn(async move {
let mut db_writer = nostr_db::NostrDb::new(db_config).await;
db_writer.start(db_pubsub).await.unwrap(); db_writer.start(db_pubsub).await.unwrap();
}); });
sqlite_writer_handle.await.unwrap(); // let sqlite_writer_handle = tokio::task::spawn(async move {
// let mut db_writer = sqlite::SqliteDb::new().await;
// db_writer.start(db_pubsub).await.unwrap();
// });
// sqlite_writer_handle.await.unwrap();
pipeline_handle.await.unwrap(); pipeline_handle.await.unwrap();
}); });
} }

111
src/noose/nostr_db.rs Normal file
View file

@ -0,0 +1,111 @@
use crate::{
bussy::{channels, Command, Message, PubSub},
noose::Noose,
utils::{config::Config, error::Error, structs::Subscription},
};
use nostr_sqlite::{database::NostrDatabase, SQLiteDatabase};
use std::sync::Arc;
pub struct NostrDb {
db: SQLiteDatabase,
}
impl NostrDb {
pub async fn new(config: Arc<Config>) -> Self {
let db_path = config.get_db_path();
if let Ok(db) = SQLiteDatabase::open(db_path).await {
return Self { db };
}
panic!("[NostrDb] Failed to initialize database");
}
}
impl Noose for NostrDb {
async fn start(&mut self, pubsub: Arc<PubSub>) -> Result<(), Error> {
let mut subscriber = pubsub.subscribe(channels::MSG_NOOSE).await;
while let Ok(message) = subscriber.recv().await {
log::info!("[Noose] received message: {:?}", message);
let command = match message.content {
Command::DbReqWriteEvent(client_id, event) => match self.write_event(event).await {
Ok(status) => Command::DbResOkWithStatus(client_id, status),
Err(e) => Command::ServiceError(e),
},
Command::DbReqFindEvent(client_id, subscriptioin) => {
match self.find_event(subscriptioin).await {
Ok(relay_messages) => {
Command::DbResRelayMessages(client_id, relay_messages)
}
Err(e) => Command::ServiceError(e),
}
}
Command::DbReqEventCounts(client_id, subscriptioin) => {
match self.counts(subscriptioin).await {
Ok(relay_message) => Command::DbResEventCounts(client_id, relay_message),
Err(e) => Command::ServiceError(e),
}
}
_ => Command::Noop,
};
if command != Command::Noop {
let channel = message.source;
let message = Message {
source: channels::MSG_NOOSE,
content: command,
};
log::info!(
"[Noose] publishing new message: {:?} to channel {}",
message,
channel
);
pubsub.publish(channel, message).await;
}
}
Ok(())
}
async fn write_event(&self, event: Box<nostr::Event>) -> Result<nostr::RelayMessage, Error> {
// TODO: Maybe do event validation and admin deletions here
match self.db.save_event(&event).await {
Ok(status) => {
let relay_message = nostr::RelayMessage::ok(event.id, status, "");
Ok(relay_message)
}
Err(err) => Err(Error::bad_request(err.to_string())),
}
}
async fn find_event(
&self,
subscription: Subscription,
) -> Result<Vec<nostr::RelayMessage>, Error> {
match self
.db
.query(subscription.filters, nostr_sqlite::database::Order::Desc)
.await
{
Ok(events) => {
let relay_messages = events
.into_iter()
.map(|event| nostr::RelayMessage::event(subscription.id.clone(), event))
.collect();
Ok(relay_messages)
}
Err(err) => Err(Error::bad_request(err.to_string())),
}
}
async fn counts(&self, subscription: Subscription) -> Result<nostr::RelayMessage, Error> {
match self.db.count(subscription.filters).await {
Ok(counts) => {
let relay_message = nostr::RelayMessage::count(subscription.id, counts);
Ok(relay_message)
}
Err(err) => Err(Error::internal_with_message(err.to_string())),
}
}
}

View file

@ -1,234 +1,138 @@
use super::db::Noose; use std::sync::Arc;
use crate::bussy::{channels, Command, Message, PubSub}; use crate::bussy::{channels, Command, Message, PubSub};
use crate::utils::error::Error; use crate::utils::error::Error;
use crate::utils::structs::Subscription;
use async_trait::async_trait;
use nostr::Event;
use serde::Serialize;
use std::sync::Arc;
use super::user::{User, UserRow}; #[derive(Debug, Clone, PartialEq)]
pub struct BanInfo {
pub pubkey: String,
pub reason: String,
}
// Db Interface // Db Interface
pub struct SledDb { pub struct SledDb {
db: sled::Db, db: sled::Db,
events: sled::Tree, banned_pubkeys: sled::Tree,
nip05s: sled::Tree,
pub users: sled::Tree,
index: sled::Db,
} }
impl SledDb { impl SledDb {
pub fn new() -> Self { pub fn new() -> Self {
let db = sled::open("/tmp/sled_db").unwrap(); let db = sled::open("/tmp/sled_db").unwrap();
let events = db.open_tree("events").unwrap(); let banned_pubkeys = db.open_tree("banned_pubkeys").unwrap();
let nip05s = db.open_tree("identifiers").unwrap();
let accounts = db.open_tree("accounts").unwrap();
let index = sled::open("/tmp/sled_index").unwrap();
Self { Self {
db, db,
events, banned_pubkeys
nip05s,
users: accounts,
index,
} }
} }
pub async fn start(&mut self, pubsub: Arc<PubSub>) -> Result<(), Error> {
let mut subscriber = pubsub.subscribe(channels::MSG_NOOSE).await;
while let Ok(message) = subscriber.recv().await {
log::info!("[Noose] received message: {:?}", message);
let command = match message.content {
Command::SledReqBanUser(ban_info) => match self.ban_user(ban_info).await {
Ok(status) => Command::SledResSuccess(status),
Err(e) => Command::ServiceError(e),
},
Command::SledReqUnbanUser(pubkey) => match self.unban_user(&pubkey).await {
Ok(status) => Command::SledResSuccess(status),
Err(e) => Command::ServiceError(e),
},
Command::SledReqGetBans => match self.get_bans().await {
Ok(bans) => Command::SledResBans(bans),
Err(e) => Command::ServiceError(e),
},
Command::SledReqBanInfo(pubkey) => match self.get_ban_by_pubkey(&pubkey).await {
Ok(ban_info) => Command::SledResBan(ban_info),
Err(e) => Command::ServiceError(e),
},
_ => Command::Noop,
};
if command != Command::Noop {
let channel = message.source;
let message = Message {
source: channels::MSG_SLED,
content: command,
};
log::info!(
"[Sled] publishing new message: {:?} to channel {}",
message,
channel
);
pubsub.publish(channel, message).await;
}
}
Ok(())
}
fn clear_db(&self) -> Result<(), sled::Error> { fn clear_db(&self) -> Result<(), sled::Error> {
self.db.clear() self.db.clear()
} }
fn clear_index(&self) -> Result<(), sled::Error> { async fn ban_user(&self, ban_info: Box<BanInfo>) -> Result<bool, Error> {
self.index.clear() if let Ok(Some(_)) = self.banned_pubkeys.insert(ban_info.pubkey, ban_info.reason.as_bytes()) {
return Ok(true)
} }
async fn insert_user(&self, user: UserRow) -> Result<(), Error> { Ok(false)
let pubkey = user.pubkey.clone();
let username = user.username.clone();
if let Ok(Some(_)) = self.nip05s.get(&username) {
return Err(Error::internal_with_message("User already exists"));
} }
let mut user_buff = flexbuffers::FlexbufferSerializer::new(); fn is_banned(&self, pubkey: &String) -> bool{
user.serialize(&mut user_buff).unwrap(); if let Ok(Some(banned)) = self.banned_pubkeys.get(pubkey) {
return true
self.nip05s.insert(&username, user_buff.view()).unwrap(); }
false
let prefix = "nip05:";
let key = format!("{}{}", prefix, pubkey);
self.index.insert(key, username.as_bytes()).unwrap();
Ok(())
} }
async fn get_user(&self, user: User) -> Result<UserRow, Error> { async fn unban_user(&self, pubkey: &String) -> Result<bool, Error> {
let mut user_row = None; if self.is_banned(pubkey) {
if let Some(username) = user.name { self.banned_pubkeys.remove(pubkey).unwrap();
if let Ok(Some(buff)) = self.nip05s.get(username) {
let b = flexbuffers::from_slice::<UserRow>(&buff).unwrap();
user_row = Some(b);
}
} else if let Some(pubkey) = user.pubkey {
let prefix = "nip05:";
let reference = format!("{}{}", prefix, pubkey);
if let Ok(Some(row)) = self.index.get(reference) {
let key = String::from_utf8(row.to_vec()).unwrap();
if let Ok(Some(buff)) = self.nip05s.get(key) { return Ok(true);
let b = flexbuffers::from_slice::<UserRow>(&buff).unwrap();
user_row = Some(b);
}
}
}
match user_row {
Some(user) => Ok(user),
None => Err(Error::internal_with_message("User not found")),
}
}
} }
#[async_trait] Ok(false)
impl Noose for SledDb { }
async fn start(&mut self, pubsub: Arc<PubSub>) -> Result<(), Error> {
let mut subscriber = pubsub.subscribe(channels::MSG_NOOSE).await;
while let Ok(message) = subscriber.recv().await { async fn get_bans(&self) -> Result<Vec<BanInfo>, Error> {
log::info!("noose subscriber received: {:?}", message); let bans: Vec<BanInfo> = self.banned_pubkeys.iter().filter_map(|row| {
let command = match message.content { if let Ok((k, v)) = row {
Command::DbReqInsertUser(user) => match self.insert_user(user).await { let ban_info = BanInfo {
Ok(_) => Command::DbResOk, pubkey: String::from_utf8(k.to_vec()).unwrap(),
Err(e) => Command::ServiceError(e), reason: String::from_utf8(v.to_vec()).unwrap(),
},
Command::DbReqGetUser(user) => match self.get_user(user).await {
Ok(user) => Command::DbResUser(user),
Err(e) => Command::ServiceError(e),
},
Command::DbReqWriteEvent(event) => match self.write_event(event).await {
Ok(_) => Command::DbResOk,
Err(e) => Command::ServiceError(e),
},
_ => Command::Noop,
}; };
if command != Command::Noop {
log::info!("Publishing new message");
let channel = message.source;
pubsub Some(ban_info)
.publish( } else {
channel, None
Message {
source: channels::MSG_NOOSE,
content: command,
},
)
.await;
} }
}).collect();
Ok(bans)
} }
Ok(()) async fn get_ban_by_pubkey(&self, pubkey: &String) -> Result<Option<BanInfo>, Error> {
if self.is_banned(pubkey) {
if let Ok(Some(reason)) = self.banned_pubkeys.get(pubkey) {
let ban_info = BanInfo {
pubkey: pubkey.to_owned(),
reason: String::from_utf8(reason.to_vec()).unwrap()
};
return Ok(Some(ban_info));
} }
async fn migration_up(&self) {} return Ok(None);
async fn write_event(&self, event: Box<Event>) -> Result<String, Error> {
let mut event_buff = flexbuffers::FlexbufferSerializer::new();
event.serialize(&mut event_buff).unwrap();
self.events.insert(event.id, event_buff.view()).unwrap();
{
// Timestamp
let key = format!("created_at:{}|#e:{}", event.created_at, event.id);
self.index.insert(key, event.id.as_bytes()).unwrap();
} }
{ Ok(None)
// Author, pubkeys #p
let key = format!("#author:{}|#e:{}", event.pubkey, event.id);
self.index.insert(key, event.id.as_bytes()).unwrap();
// self.index.scan_prefix(
}
{
// Kinds
let key = format!("#k:{}|#e:{}", event.kind, event.id);
self.index.insert(key, event.id.as_bytes()).unwrap();
// self.index.scan_prefix(
}
{
// Tags
event.tags.iter().for_each(|tag| {
if let Some(key) = match tag {
// #e tag
nostr::Tag::Event(event_id, _, _) => Some(format!("#e:{}", event_id)),
// #p tag
nostr::Tag::PubKey(pubkey, _) => Some(format!("#p:{}|#e:{}", pubkey, event.id)),
// #t tag
nostr::Tag::Hashtag(hashtag) => Some(format!("#t:{}|#e:{}", hashtag, event.id)),
// #a tag
nostr::Tag::A {
kind,
public_key,
identifier,
relay_url,
} => Some(format!(
"#a:kind:{}|#a:pubkey:{}#a:identifier:{}|#e:{}",
kind, public_key, identifier, event.id
)),
_ => None,
} {
self.index.insert(key, event.id.as_bytes()).unwrap();
}
});
// let key = format!("#t:{}|#e:{}", event.kind, event.id);
// self.index.insert(key, event.id.as_bytes()).unwrap();
// self.index.scan_prefix(
}
let message = format!("[\"OK\", \"{}\", true, \"\"]", event.id.to_string());
Ok(message)
}
async fn find_event(&self, subscription: Subscription) -> Result<Vec<String>, Error> {
todo!()
} }
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::SledDb;
use crate::{
bussy::PubSub,
noose::user::{User, UserRow},
};
use std::sync::Arc;
#[tokio::test]
async fn get_db_names() {
let pubsub = Arc::new(PubSub::new());
let db = SledDb::new();
let pk = "npub1p3ya99jfdafnqlk87p6wfd36d2nme5mkld769rhd9pkht6hmqlaq6mzxdu".to_string();
let username = "klink".to_string();
let user = UserRow::new(pk, username, false);
let result = db.insert_user(user).await;
let pubkey = "npub1p3ya99jfdafnqlk87p6wfd36d2nme5mkld769rhd9pkht6hmqlaq6mzxdu".to_string();
let username = "klink".to_string();
let user = User {
name: None,
pubkey: Some(pubkey),
};
let user = db.get_user(user).await;
db.clear_db().unwrap();
db.clear_index().unwrap();
}
} }

View file

@ -67,17 +67,26 @@ pub async fn client_connection(
} }
// } // }
} }
crate::bussy::Command::DbResRelayMessage(client_id, events) => { crate::bussy::Command::DbResRelayMessages(client_id, relay_messages) => {
if client.client_id == client_id { if client.client_id == client_id {
if let Some(sender) = &client.client_connection { if let Some(sender) = &client.client_connection {
if !sender.is_closed() { if !sender.is_closed() {
for event in events { for message in relay_messages {
sender.send(Ok(Message::text(event))).unwrap(); sender.send(Ok(Message::text(message.as_json()))).unwrap();
} }
} }
} }
} }
} }
crate::bussy::Command::DbResEventCounts(client_id, relay_message) => {
if client.client_id == client_id {
if let Some(sender) = &client.client_connection {
if !sender.is_closed() {
sender.send(Ok(Message::text(relay_message.as_json()))).unwrap();
}
}
}
}
crate::bussy::Command::DbResOkWithStatus(client_id, status) => { crate::bussy::Command::DbResOkWithStatus(client_id, status) => {
if client.client_id == client_id { if client.client_id == client_id {
if let Some(sender) = &client.client_connection { if let Some(sender) = &client.client_connection {
@ -177,7 +186,7 @@ async fn socket_on_message(context: &Context, client: &mut Client, msg: Message)
Err(e) => { Err(e) => {
log::error!("error while parsing client message request: {}", e); log::error!("error while parsing client message request: {}", e);
let response = nostr::RelayMessage::new_notice("Invalid message"); let response = nostr::RelayMessage::notice("Invalid message");
let message = Message::text(response.as_json()); let message = Message::text(response.as_json());
send(client, message); send(client, message);
@ -212,7 +221,7 @@ async fn handle_msg(context: &Context, client: &mut Client, client_message: Clie
ClientMessage::Count { ClientMessage::Count {
subscription_id, subscription_id,
filters, filters,
} => handle_count(client, subscription_id, filters).await, } => handle_count(context, client, subscription_id, filters).await,
ClientMessage::Close(subscription_id) => handle_close(client, subscription_id).await, ClientMessage::Close(subscription_id) => handle_close(client, subscription_id).await,
ClientMessage::Auth(event) => handle_auth(client, event).await, ClientMessage::Auth(event) => handle_auth(client, event).await,
_ => (), _ => (),
@ -224,7 +233,7 @@ async fn handle_event(context: &Context, client: &Client, event: Box<Event>) {
if let Err(err) = event.verify() { if let Err(err) = event.verify() {
let relay_message = let relay_message =
nostr::RelayMessage::new_ok(event.id, false, "Failed to verify event signature"); nostr::RelayMessage::ok(event.id, false, "Failed to verify event signature");
let message = crate::bussy::Message { let message = crate::bussy::Message {
source: channels::MSG_RELAY, source: channels::MSG_RELAY,
content: crate::bussy::Command::PipelineResRelayMessageOk( content: crate::bussy::Command::PipelineResRelayMessageOk(
@ -263,6 +272,7 @@ async fn handle_req(
client.ip(), client.ip(),
&subscription_error.message &subscription_error.message
); );
let message = format!( let message = format!(
"[\"CLOSED\", \"{}\", \"{}\"]", "[\"CLOSED\", \"{}\", \"{}\"]",
subscription_id, subscription_error.message subscription_id, subscription_error.message
@ -282,6 +292,7 @@ async fn handle_req(
return; return;
}; };
log::info!("[SUBSCRIPTION] needs historical events");
if needs_historical_events { if needs_historical_events {
context context
.pubsub .pubsub
@ -296,12 +307,24 @@ async fn handle_req(
} }
} }
async fn handle_count(client: &Client, subscription_id: SubscriptionId, filters: Vec<Filter>) { async fn handle_count(
// context.pubsub.send(new nostr event) then handle possible errors context: &Context,
client: &Client,
subscription_id: SubscriptionId,
filters: Vec<Filter>,
) {
let subscription = Subscription::new(subscription_id, filters); let subscription = Subscription::new(subscription_id, filters);
let message = Message::text("COUNT not implemented"); context
send(client, message); .pubsub
.publish(
channels::MSG_NOOSE,
crate::bussy::Message {
source: channels::MSG_RELAY,
content: crate::bussy::Command::DbReqEventCounts(client.client_id, subscription),
},
)
.await
} }
async fn handle_close(client: &mut Client, subscription_id: SubscriptionId) { async fn handle_close(client: &mut Client, subscription_id: SubscriptionId) {

View file

@ -1,8 +1,11 @@
use std::path::PathBuf;
use nostr::{key::FromPkStr, secp256k1::XOnlyPublicKey}; use nostr::{key::FromPkStr, secp256k1::XOnlyPublicKey};
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct Config { pub struct Config {
admin_pubkey: XOnlyPublicKey, admin_pubkey: XOnlyPublicKey,
db_path: PathBuf,
} }
impl Default for Config { impl Default for Config {
@ -13,26 +16,30 @@ impl Default for Config {
impl Config { impl Config {
pub fn new() -> Self { pub fn new() -> Self {
if let Ok(env_admin_pk) = std::env::var("ADMIN_PUBKEY") { let admin_pubkey = std::env::var("ADMIN_PUBKEY")
match nostr::Keys::from_pk_str(&env_admin_pk) { .map(|env_pk| nostr::Keys::from_pk_str(&env_pk))
Ok(admin_keys) => { .and_then(|result| result.map_err(|err| panic!("{}", err)))
return Self { .unwrap()
admin_pubkey: admin_keys.public_key(), .public_key();
};
}
Err(e) => {
panic!("Unable to parse ADMIN_PUBKEY: {}", e);
}
}
}
panic!("Environment variable ADMIN_PUBKEY not defined"); let db_path = std::env::var("DATABASE_URL")
.map(PathBuf::from)
.unwrap();
Self {
admin_pubkey,
db_path,
}
} }
pub fn get_admin_pubkey(&self) -> &XOnlyPublicKey { pub fn get_admin_pubkey(&self) -> &XOnlyPublicKey {
&self.admin_pubkey &self.admin_pubkey
} }
pub fn get_db_path(&self) -> PathBuf {
self.db_path.clone()
}
pub fn get_relay_config_json(&self) -> serde_json::Value { pub fn get_relay_config_json(&self) -> serde_json::Value {
serde_json::json!({ serde_json::json!({
"contact": "klink@zhitno.st", "contact": "klink@zhitno.st",

View file

@ -1,7 +1,7 @@
pub mod crypto; pub mod crypto;
pub mod error; pub mod error;
pub mod filter; pub mod filter;
// mod nostr_filter_helpers; mod nostr_filter_helpers;
pub mod config; pub mod config;
pub mod rejection_handler; pub mod rejection_handler;
pub mod response; pub mod response;

View file

@ -1,27 +1,27 @@
use nostr::{Event, Filter, Kind, Tag}; use nostr::{key::FromPkStr, Event, Filter, Kind};
fn ids_match(filter: &Filter, event: &Event) -> bool { fn ids_match(filter: &Filter, event: &Event) -> bool {
if filter.ids.is_empty() { if filter.ids.is_empty() {
println!("[FILTER][IDS] skipped"); log::info!("[FILTER][ids_match] skipped");
return true; return true;
} }
println!( log::info!(
"[FILTER][IDS] matched: {:?}", "[FILTER][ids_match] matched: {:?}",
filter.ids.iter().any(|id| id == &event.id.to_string()) filter.ids.iter().any(|id| id == &event.id)
); );
filter.ids.iter().any(|id| id == &event.id.to_string()) filter.ids.iter().any(|id| id == &event.id)
} }
fn kind_match(filter: &Filter, kind: Kind) -> bool { fn kind_match(filter: &Filter, kind: Kind) -> bool {
if filter.kinds.is_empty() { if filter.kinds.is_empty() {
println!("[FILTER][KINDS] skipped"); log::debug!("[FILTER][kind_match] skipped");
return true; return true;
} }
println!( log::debug!(
"[FILTER][KIND] matched: {:?}", "[FILTER][kind_match] matched: {:?}",
filter.kinds.iter().any(|k| k == &kind) filter.kinds.iter().any(|k| k == &kind)
); );
@ -29,122 +29,142 @@ fn kind_match(filter: &Filter, kind: Kind) -> bool {
} }
fn pubkeys_match(filter: &Filter, event: &Event) -> bool { fn pubkeys_match(filter: &Filter, event: &Event) -> bool {
if filter.pubkeys.is_empty() { log::debug!(
println!("[FILTER][PUBKEYS] skipped"); "[FILTER][pubkeys_match] matched: {:?}",
if let Some((p_tag, p_set)) = filter.generic_tags.get_key_value(&nostr::Alphabet::P) {
if p_set.is_empty() {
log::debug!("[FILTER][PUBKEYS] skipped");
return true; return true;
} }
println!( return p_set.iter().any(|pk| match pk {
"[FILTER][PUBKEYS] matched: {:?}", nostr::GenericTagValue::Pubkey(pk) => pk == &event.pubkey,
filter.pubkeys.iter().any(|pk| pk == &event.pubkey) _ => false,
});
}
); );
filter.pubkeys.iter().any(|pk| pk == &event.pubkey) if let Some((p_tag, p_set)) = filter.generic_tags.get_key_value(&nostr::Alphabet::P) {
if p_set.is_empty() {
log::debug!("[FILTER][PUBKEYS] skipped");
return true;
}
return p_set.iter().any(|pk| match pk {
nostr::GenericTagValue::Pubkey(pk) => pk == &event.pubkey,
_ => false,
});
}
false
} }
fn authors_match(filter: &Filter, event: &Event) -> bool { fn authors_match(filter: &Filter, event: &Event) -> bool {
dbg!(filter);
if filter.authors.is_empty() { if filter.authors.is_empty() {
println!("[FILTER][AUTHORS] skipped"); log::debug!("[FILTER][authors_match] skipped");
return true; return true;
} }
println!( log::debug!(
"[FILTER][AUTHORS] matched: {:?}", "[FILTER][authors_match] matched: {:?}",
filter filter.authors.iter().any(|author| author == &event.pubkey)
.authors
.iter()
.any(|author| author == &event.pubkey.to_string())
); );
filter filter.authors.iter().any(|author| author == &event.pubkey)
.authors
.iter()
.any(|author| author == &event.pubkey.to_string())
} }
fn delegated_authors_match(filter: &Filter, event: &Event) -> bool { fn delegated_authors_match(filter: &Filter, event: &Event) -> bool {
// Optional implementation log::debug!(
"[FILTER][delegated_authors_match] matched: {:?}",
// let delegated_authors_match = filter.authors.iter().any(|author| { event.tags.iter().any(|tag| {
// event.tags.iter().any(|tag| match tag { if tag.kind() == nostr::TagKind::Delegation {
// Tag::Delegation { let tag = tag.as_vec();
// delegator_pk, if let Ok(event_pubkey) = nostr::Keys::from_pk_str(&tag[1]) {
// conditions, let pk = event_pubkey.public_key();
// sig, return filter.authors.iter().any(|author| author == &pk);
// } => filter }
// .authors return false;
// .iter() }
// .any(|author| author == &delegator_pk.to_string()), false
// _ => false,
// })
// });
println!(
"[FILTER][DELEGATED_AUTHORS] matched: {:?}",
event.tags.iter().any(|tag| match tag {
Tag::Delegation {
delegator_pk,
conditions,
sig,
} => filter
.authors
.iter()
.any(|author| author == &delegator_pk.to_string()),
_ => false,
}) })
); );
event.tags.iter().any(|tag| match tag { event.tags.iter().any(|tag| {
Tag::Delegation { if tag.kind() == nostr::TagKind::Delegation {
delegator_pk, let tag = tag.as_vec();
conditions, if let Ok(event_pubkey) = nostr::Keys::from_pk_str(&tag[1]) {
sig, let pk = event_pubkey.public_key();
} => filter return filter.authors.iter().any(|author| author == &pk);
.authors }
.iter() return false;
.any(|author| author == &delegator_pk.to_string()), }
_ => true, false
}) })
} }
fn tag_match(filter: &Filter, event: &Event) -> bool { fn tag_match(filter: &Filter, event: &Event) -> bool {
println!( if filter.generic_tags.is_empty() && event.tags.is_empty() {
"[FILTER][TAG] matched: {:?}", return true;
filter.generic_tags.iter().any(|(key, value)| { }
event.tags.iter().any(|tag| { log::debug!(
let kv = tag.as_vec(); "[FILTER][tag_match] matched: {:?}",
key.to_string() == kv[0] && value.iter().any(|vv| vv == &kv[1]) filter
.generic_tags
.iter()
.any(|(filter_tag_key, filter_tag_value)| {
event.tags.iter().any(|event_tag| {
let event_tag = event_tag.as_vec();
let event_tag_key = event_tag[0].clone();
let event_tag_value = event_tag[1].clone();
if filter_tag_key.to_string() == event_tag_key {
return filter_tag_value
.iter()
.any(|f_tag_val| f_tag_val.to_string() == event_tag_value);
};
false
}) })
}) })
); );
filter.generic_tags.iter().any(|(key, value)| { filter
event.tags.iter().any(|tag| { .generic_tags
let kv = tag.as_vec(); .iter()
key.to_string() == kv[0] && value.iter().any(|vv| vv == &kv[1]) .any(|(filter_tag_key, filter_tag_value)| {
}) event.tags.iter().any(|event_tag| {
}); let event_tag = event_tag.as_vec();
let event_tag_key = event_tag[0].clone();
let event_tag_value = event_tag[1].clone();
true // TODO: Fix delegated authors check if filter_tag_key.to_string() == event_tag_key {
return filter_tag_value
.iter()
.any(|f_tag_val| f_tag_val.to_string() == event_tag_value);
};
false
})
})
} }
pub fn interested_in_event(filter: &Filter, event: &Event) -> bool { pub fn interested_in_event(filter: &Filter, event: &Event) -> bool {
ids_match(filter, event) ids_match(filter, event)
&& filter.since.map_or( && filter.since.map_or(
{ {
println!("[FILTER][SINCE][default] matched: {:?}", true); log::info!("[FILTER][SINCE][default] matched: {:?}", true);
true true
}, },
|t| { |t| {
println!("[FILTER][SINCE] matched: {:?}", event.created_at >= t); log::info!("[FILTER][SINCE] matched: {:?}", event.created_at >= t);
event.created_at >= t event.created_at >= t
}, },
) )
&& filter.until.map_or( && filter.until.map_or(
{ {
println!("[FILTER][UNTIL][default] matched: {:?}", true); log::info!("[FILTER][UNTIL][default] matched: {:?}", true);
true true
}, },
|t| { |t| {
println!("[FILTER][UNTIL] matched: {:?}", event.created_at <= t); log::info!("[FILTER][UNTIL] matched: {:?}", event.created_at <= t);
event.created_at <= t event.created_at <= t
}, },
) )
@ -154,3 +174,27 @@ pub fn interested_in_event(filter: &Filter, event: &Event) -> bool {
|| delegated_authors_match(filter, event)) || delegated_authors_match(filter, event))
&& tag_match(filter, event) && tag_match(filter, event)
} }
#[cfg(test)]
mod tests {
use crate::utils::nostr_filter_helpers::interested_in_event;
#[test]
fn check_simple_match() {
let my_keys = nostr::Keys::generate();
let event = nostr::EventBuilder::text_note("hello", [])
.to_event(&my_keys)
.unwrap();
let k = nostr::Kind::TextNote;
let filter = nostr::Filter::new()
.kinds(vec![k])
.authors(vec![event.pubkey]);
let res = interested_in_event(&filter, &event);
dbg!(&res);
assert!(res);
}
}

View file

@ -1,5 +1,5 @@
use super::{config::Config, error::Error}; use super::{config::Config, error::Error};
// use super::nostr_filter_helpers; use crate::utils::nostr_filter_helpers;
use crate::PubSub; use crate::PubSub;
use nostr::{Event, Filter, SubscriptionId}; use nostr::{Event, Filter, SubscriptionId};
@ -34,11 +34,12 @@ impl Subscription {
pub fn interested_in_event(&self, event: &Event) -> bool { pub fn interested_in_event(&self, event: &Event) -> bool {
log::info!("[Subscription] Checking if client is interested in the new event"); log::info!("[Subscription] Checking if client is interested in the new event");
for filter in &self.filters { for filter in &self.filters {
if filter.match_event(event) { if nostr_filter_helpers::interested_in_event(filter, event) {
log::info!("[Subscription] found filter that matches the event"); log::info!("[Subscription] found filter that matches the event");
return true; return true;
} }
} }
false false
} }
} }