diff --git a/Cargo.lock b/Cargo.lock index 3a4412b..56ae522 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -35,7 +35,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" dependencies = [ "cfg-if", - "getrandom", "once_cell", "version_check", ] @@ -99,15 +98,6 @@ dependencies = [ "syn 2.0.28", ] -[[package]] -name = "atoi" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f28d99ec8bfea296261ca1af174f24225171fea9664ba9003cbebee704810528" -dependencies = [ - "num-traits", -] - [[package]] name = "autocfg" version = "1.1.0" @@ -211,9 +201,6 @@ name = "bitflags" version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42" -dependencies = [ - "serde", -] [[package]] name = "blake2" @@ -319,12 +306,6 @@ dependencies = [ "inout", ] -[[package]] -name = "const-oid" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28c122c3980598d243d63d9a704629a2d748d101f278052ff068be5a4423ab6f" - [[package]] name = "core-foundation-sys" version = "0.8.4" @@ -340,21 +321,6 @@ dependencies = [ "libc", ] -[[package]] -name = "crc" -version = "3.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86ec7a15cbe22e59248fc7eadb1907dab5ba09372595da4d73dd805ed4417dfe" -dependencies = [ - "crc-catalog", -] - -[[package]] -name = "crc-catalog" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cace84e55f07e7301bae1c519df89cdad8cc3cd868413d3fdbdeca9ff3db484" - [[package]] name = "crc32fast" version = "1.3.2" @@ -417,14 +383,53 @@ dependencies = [ ] [[package]] -name = "der" -version = "0.7.8" +name = "deadpool" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fffa369a668c8af7dbf8b5e56c9f744fbd399949ed171606040001947de40b1c" +checksum = "fb84100978c1c7b37f09ed3ce3e5f843af02c2a2c431bae5b19230dad2c1b490" dependencies = [ - "const-oid", - "pem-rfc7468", - "zeroize", + "async-trait", + "deadpool-runtime", + "num_cpus", + "tokio", +] + +[[package]] +name = "deadpool-runtime" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63dfa964fe2a66f3fde91fc70b267fe193d822c7e603e2a675a49a7f46ad3f49" +dependencies = [ + "tokio", +] + +[[package]] +name = "deadpool-sqlite" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8010e36e12f3be22543a5e478b4af20aeead9a700dd69581a5e050a070fc22c" +dependencies = [ + "deadpool", + "deadpool-sync", + "rusqlite", +] + +[[package]] +name = "deadpool-sync" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8db70494c13cae4ce67b4b4dafdaf828cf0df7237ab5b9e2fcabee4965d0a0a" +dependencies = [ + "deadpool-runtime", +] + +[[package]] +name = "deranged" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +dependencies = [ + "powerfmt", ] [[package]] @@ -434,25 +439,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", - "const-oid", "crypto-common", "subtle", ] -[[package]] -name = "dotenvy" -version = "0.15.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" - [[package]] name = "either" version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" -dependencies = [ - "serde", -] [[package]] name = "encoding_rs" @@ -491,27 +486,16 @@ dependencies = [ ] [[package]] -name = "etcetera" -version = "0.8.0" +name = "fallible-iterator" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943" -dependencies = [ - "cfg-if", - "home", - "windows-sys", -] +checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649" [[package]] -name = "event-listener" -version = "2.5.3" +name = "fallible-streaming-iterator" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" - -[[package]] -name = "fastrand" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" +checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a" [[package]] name = "filetime" @@ -525,12 +509,6 @@ dependencies = [ "windows-sys", ] -[[package]] -name = "finl_unicode" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fcfdc7a0362c9f4444381a9e697c79d435fe65b52a37466fc2c1184cee9edc6" - [[package]] name = "flate2" version = "1.0.28" @@ -573,17 +551,6 @@ dependencies = [ "thiserror", ] -[[package]] -name = "flume" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55ac459de2512911e4b674ce33cf20befaba382d05b62b008afc1c8b57cbf181" -dependencies = [ - "futures-core", - "futures-sink", - "spin 0.9.8", -] - [[package]] name = "fnv" version = "1.0.7" @@ -625,34 +592,6 @@ version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c" -[[package]] -name = "futures-executor" -version = "0.3.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" -dependencies = [ - "futures-core", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-intrusive" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d930c203dd0b6ff06e0201a4a2fe9149b43c684fd4420555b26d21b1a02956f" -dependencies = [ - "futures-core", - "lock_api", - "parking_lot 0.12.1", -] - -[[package]] -name = "futures-io" -version = "0.3.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964" - [[package]] name = "futures-macro" version = "0.3.28" @@ -683,11 +622,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533" dependencies = [ "futures-core", - "futures-io", "futures-macro", "futures-sink", "futures-task", - "memchr", "pin-project-lite", "pin-utils", "slab", @@ -811,9 +748,6 @@ name = "heck" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" -dependencies = [ - "unicode-segmentation", -] [[package]] name = "hermit-abi" @@ -821,36 +755,12 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" -[[package]] -name = "hex" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" - [[package]] name = "hex_lit" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3011d1213f159867b13cfd6ac92d2cd5f1345762c63be3554e84092d85a50bbd" -[[package]] -name = "hkdf" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "791a029f6b9fc27657f6f188ec6e5e43f6911f6f878e0dc5501396e09809d437" -dependencies = [ - "hmac", -] - -[[package]] -name = "hmac" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" -dependencies = [ - "digest", -] - [[package]] name = "home" version = "0.5.5" @@ -1041,15 +951,6 @@ dependencies = [ "windows-sys", ] -[[package]] -name = "itertools" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" -dependencies = [ - "either", -] - [[package]] name = "itoa" version = "1.0.9" @@ -1076,9 +977,6 @@ name = "lazy_static" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" -dependencies = [ - "spin 0.5.2", -] [[package]] name = "libc" @@ -1086,17 +984,11 @@ version = "0.2.147" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" -[[package]] -name = "libm" -version = "0.2.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" - [[package]] name = "libsqlite3-sys" -version = "0.26.0" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afc22eff61b133b115c6e8c74e818c628d6d5e7a502afea6f64dee076dd94326" +checksum = "cf4e226dcd58b4be396f7bd3c20da8fdee2911400705297ba7d2d7cc2c30f716" dependencies = [ "cc", "pkg-config", @@ -1125,16 +1017,6 @@ version = "0.4.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4" -[[package]] -name = "md-5" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" -dependencies = [ - "cfg-if", - "digest", -] - [[package]] name = "memchr" version = "2.6.3" @@ -1166,12 +1048,6 @@ dependencies = [ "unicase", ] -[[package]] -name = "minimal-lexical" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" - [[package]] name = "miniz_oxide" version = "0.7.1" @@ -1216,21 +1092,11 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e664971378a3987224f7a0e10059782035e89899ae403718ee07de85bec42afe" -[[package]] -name = "nom" -version = "7.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" -dependencies = [ - "memchr", - "minimal-lexical", -] - [[package]] name = "nostr" -version = "0.26.0" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72adfa99eb5f1e5afa3f1fe6503c89989b7b0b61c4e5ec23c5b839967cd54da1" +checksum = "3e47228d958fd65ef3e04650a3b1dd80f16f10f0243c80ed969556dead0f48c8" dependencies = [ "aes", "base64 0.21.2", @@ -1240,6 +1106,7 @@ dependencies = [ "chacha20", "getrandom", "instant", + "js-sys", "negentropy", "once_cell", "reqwest", @@ -1247,6 +1114,22 @@ dependencies = [ "serde_json", "tracing", "url-fork", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "nostr-database" +version = "0.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa0550256c8d4f0aaf74891ac986bd5ba46b2957c2c7e20f51838fa5819285f8" +dependencies = [ + "async-trait", + "nostr", + "thiserror", + "tokio", + "tracing", ] [[package]] @@ -1258,44 +1141,6 @@ dependencies = [ "windows-sys", ] -[[package]] -name = "num-bigint-dig" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151" -dependencies = [ - "byteorder", - "lazy_static", - "libm", - "num-integer", - "num-iter", - "num-traits", - "rand", - "smallvec", - "zeroize", -] - -[[package]] -name = "num-integer" -version = "0.1.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" -dependencies = [ - "autocfg", - "num-traits", -] - -[[package]] -name = "num-iter" -version = "0.1.43" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d03e6c028c5dc5cac6e2dec0efda81fc887605bb3d884578bb6d6bf7514e252" -dependencies = [ - "autocfg", - "num-integer", - "num-traits", -] - [[package]] name = "num-traits" version = "0.2.16" @@ -1303,7 +1148,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" dependencies = [ "autocfg", - "libm", ] [[package]] @@ -1348,9 +1192,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.18.0" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" [[package]] name = "parking_lot" @@ -1411,21 +1255,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "paste" -version = "1.0.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" - -[[package]] -name = "pem-rfc7468" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" -dependencies = [ - "base64ct", -] - [[package]] name = "percent-encoding" version = "2.3.0" @@ -1464,33 +1293,18 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" -[[package]] -name = "pkcs1" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f" -dependencies = [ - "der", - "pkcs8", - "spki", -] - -[[package]] -name = "pkcs8" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" -dependencies = [ - "der", - "spki", -] - [[package]] name = "pkg-config" version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + [[package]] name = "ppv-lite86" version = "0.2.17" @@ -1662,7 +1476,7 @@ dependencies = [ "wasm-bindgen", "wasm-bindgen-futures", "web-sys", - "webpki-roots 0.22.6", + "webpki-roots", "winreg", ] @@ -1682,25 +1496,31 @@ dependencies = [ ] [[package]] -name = "rsa" -version = "0.9.2" +name = "rusqlite" +version = "0.30.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ab43bb47d23c1a631b4b680199a45255dce26fa9ab2fa902581f624ff13e6a8" +checksum = "a78046161564f5e7cd9008aff3b2990b3850dc8e0349119b98e8f251e099f24d" dependencies = [ - "byteorder", - "const-oid", - "digest", - "num-bigint-dig", - "num-integer", - "num-iter", - "num-traits", - "pkcs1", - "pkcs8", - "rand_core", - "signature", - "spki", - "subtle", - "zeroize", + "bitflags 2.3.3", + "chrono", + "fallible-iterator", + "fallible-streaming-iterator", + "hashlink", + "libsqlite3-sys", + "serde_json", + "smallvec", + "time", + "uuid", +] + +[[package]] +name = "rusqlite_migration" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4116d1697de2912db0b72069473dfb025f6c332b4a085ed041d121e8d745aea" +dependencies = [ + "log", + "rusqlite", ] [[package]] @@ -1833,22 +1653,16 @@ dependencies = [ [[package]] name = "sea-query" -version = "0.30.4" +version = "0.30.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41558fa9bb5f4d73952dac0b9d9c2ce23966493fc9ee0008037b01d709838a68" +checksum = "4166a1e072292d46dc91f31617c2a1cdaf55a8be4b5c9f4bf2ba248e3ac4999b" dependencies = [ + "chrono", "inherent", "sea-query-derive", -] - -[[package]] -name = "sea-query-binder" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36bbb68df92e820e4d5aeb17b4acd5cc8b5d18b2c36a4dd6f4626aabfa7ab1b9" -dependencies = [ - "sea-query", - "sqlx", + "serde_json", + "time", + "uuid", ] [[package]] @@ -1864,6 +1678,16 @@ dependencies = [ "thiserror", ] +[[package]] +name = "sea-query-rusqlite" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e74efa64c7ba4d62ed221bfd6bf8a685b1bbc37e29c51b1039405fc6c33a2dd" +dependencies = [ + "rusqlite", + "sea-query", +] + [[package]] name = "secp256k1" version = "0.27.0" @@ -1887,18 +1711,18 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.181" +version = "1.0.193" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d3e73c93c3240c0bda063c239298e633114c69a888c3e37ca8bb33f343e9890" +checksum = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.181" +version = "1.0.193" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be02f6cb0cd3a5ec20bbcfbcbd749f57daddb1a0882dc2e46a6c236c90b977ed" +checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3" dependencies = [ "proc-macro2", "quote", @@ -1911,6 +1735,7 @@ version = "1.0.104" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "076066c5f1078eac5b722a31827a8832fe108bed65dfa75e233c89f8206e976c" dependencies = [ + "indexmap 2.0.0", "itoa", "ryu", "serde", @@ -1948,17 +1773,6 @@ dependencies = [ "digest", ] -[[package]] -name = "sha2" -version = "0.10.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest", -] - [[package]] name = "signal-hook-registry" version = "1.4.1" @@ -1968,16 +1782,6 @@ dependencies = [ "libc", ] -[[package]] -name = "signature" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e1788eed21689f9cf370582dfc467ef36ed9c707f073528ddafa8d83e3b8500" -dependencies = [ - "digest", - "rand_core", -] - [[package]] name = "slab" version = "0.4.8" @@ -2017,21 +1821,24 @@ dependencies = [ "argon2", "async-trait", "chrono", + "deadpool-sqlite", "flexbuffers", "flexi_logger", "futures-util", "lazy_static", "log", "nostr", + "nostr-database", "regex", + "rusqlite", + "rusqlite_migration", "rustls 0.21.6", "sailfish", "sea-query", - "sea-query-binder", + "sea-query-rusqlite", "serde", "serde_json", "sled", - "sqlx", "thiserror", "tokio", "tokio-stream", @@ -2061,238 +1868,6 @@ name = "spin" version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" -dependencies = [ - "lock_api", -] - -[[package]] -name = "spki" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d1e996ef02c474957d681f1b05213dfb0abab947b446a62d37770b23500184a" -dependencies = [ - "base64ct", - "der", -] - -[[package]] -name = "sqlformat" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b7b278788e7be4d0d29c0f39497a0eef3fba6bbc8e70d8bf7fde46edeaa9e85" -dependencies = [ - "itertools", - "nom", - "unicode_categories", -] - -[[package]] -name = "sqlx" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e50c216e3624ec8e7ecd14c6a6a6370aad6ee5d8cfc3ab30b5162eeeef2ed33" -dependencies = [ - "sqlx-core", - "sqlx-macros", - "sqlx-mysql", - "sqlx-postgres", - "sqlx-sqlite", -] - -[[package]] -name = "sqlx-core" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d6753e460c998bbd4cd8c6f0ed9a64346fcca0723d6e75e52fdc351c5d2169d" -dependencies = [ - "ahash", - "atoi", - "byteorder", - "bytes", - "crc", - "crossbeam-queue", - "dotenvy", - "either", - "event-listener", - "futures-channel", - "futures-core", - "futures-intrusive", - "futures-io", - "futures-util", - "hashlink", - "hex", - "indexmap 2.0.0", - "log", - "memchr", - "once_cell", - "paste", - "percent-encoding", - "rustls 0.21.6", - "rustls-pemfile", - "serde", - "serde_json", - "sha2", - "smallvec", - "sqlformat", - "thiserror", - "tokio", - "tokio-stream", - "tracing", - "url", - "webpki-roots 0.24.0", -] - -[[package]] -name = "sqlx-macros" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a793bb3ba331ec8359c1853bd39eed32cdd7baaf22c35ccf5c92a7e8d1189ec" -dependencies = [ - "proc-macro2", - "quote", - "sqlx-core", - "sqlx-macros-core", - "syn 1.0.109", -] - -[[package]] -name = "sqlx-macros-core" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a4ee1e104e00dedb6aa5ffdd1343107b0a4702e862a84320ee7cc74782d96fc" -dependencies = [ - "dotenvy", - "either", - "heck", - "hex", - "once_cell", - "proc-macro2", - "quote", - "serde", - "serde_json", - "sha2", - "sqlx-core", - "sqlx-mysql", - "sqlx-sqlite", - "syn 1.0.109", - "tempfile", - "tokio", - "url", -] - -[[package]] -name = "sqlx-mysql" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "864b869fdf56263f4c95c45483191ea0af340f9f3e3e7b4d57a61c7c87a970db" -dependencies = [ - "atoi", - "base64 0.21.2", - "bitflags 2.3.3", - "byteorder", - "bytes", - "crc", - "digest", - "dotenvy", - "either", - "futures-channel", - "futures-core", - "futures-io", - "futures-util", - "generic-array", - "hex", - "hkdf", - "hmac", - "itoa", - "log", - "md-5", - "memchr", - "once_cell", - "percent-encoding", - "rand", - "rsa", - "serde", - "sha1", - "sha2", - "smallvec", - "sqlx-core", - "stringprep", - "thiserror", - "tracing", - "whoami", -] - -[[package]] -name = "sqlx-postgres" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb7ae0e6a97fb3ba33b23ac2671a5ce6e3cabe003f451abd5a56e7951d975624" -dependencies = [ - "atoi", - "base64 0.21.2", - "bitflags 2.3.3", - "byteorder", - "crc", - "dotenvy", - "etcetera", - "futures-channel", - "futures-core", - "futures-io", - "futures-util", - "hex", - "hkdf", - "hmac", - "home", - "itoa", - "log", - "md-5", - "memchr", - "once_cell", - "rand", - "serde", - "serde_json", - "sha1", - "sha2", - "smallvec", - "sqlx-core", - "stringprep", - "thiserror", - "tracing", - "whoami", -] - -[[package]] -name = "sqlx-sqlite" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d59dc83cf45d89c555a577694534fcd1b55c545a816c816ce51f20bbe56a4f3f" -dependencies = [ - "atoi", - "flume", - "futures-channel", - "futures-core", - "futures-executor", - "futures-intrusive", - "futures-util", - "libsqlite3-sys", - "log", - "percent-encoding", - "serde", - "sqlx-core", - "tracing", - "url", -] - -[[package]] -name = "stringprep" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb41d74e231a107a1b4ee36bd1214b11285b77768d2e3824aedafa988fd36ee6" -dependencies = [ - "finl_unicode", - "unicode-bidi", - "unicode-normalization", -] [[package]] name = "subtle" @@ -2322,19 +1897,6 @@ dependencies = [ "unicode-ident", ] -[[package]] -name = "tempfile" -version = "3.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef" -dependencies = [ - "cfg-if", - "fastrand", - "redox_syscall 0.3.5", - "rustix", - "windows-sys", -] - [[package]] name = "thiserror" version = "1.0.48" @@ -2355,6 +1917,35 @@ dependencies = [ "syn 2.0.28", ] +[[package]] +name = "time" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f657ba42c3f86e7680e53c8cd3af8abbe56b5491790b46e22e19c0d57463583e" +dependencies = [ + "deranged", + "itoa", + "powerfmt", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" + +[[package]] +name = "time-macros" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26197e33420244aeb70c3e8c78376ca46571bc4e701e4791c2cd9f57dcb3a43f" +dependencies = [ + "time-core", +] + [[package]] name = "tinyvec" version = "1.6.0" @@ -2605,18 +2196,6 @@ dependencies = [ "tinyvec", ] -[[package]] -name = "unicode-segmentation" -version = "1.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" - -[[package]] -name = "unicode_categories" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" - [[package]] name = "untrusted" version = "0.7.1" @@ -2859,21 +2438,6 @@ dependencies = [ "webpki", ] -[[package]] -name = "webpki-roots" -version = "0.24.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b291546d5d9d1eab74f069c77749f2cb8504a12caa20f0f2de93ddbf6f411888" -dependencies = [ - "rustls-webpki", -] - -[[package]] -name = "whoami" -version = "1.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22fc3756b8a9133049b26c7f61ab35416c130e8c09b660f5b3958b446f52cc50" - [[package]] name = "winapi" version = "0.3.9" @@ -2988,9 +2552,3 @@ checksum = "80d0f4e272c85def139476380b12f9ac60926689dd2e01d4923222f40580869d" dependencies = [ "winapi", ] - -[[package]] -name = "zeroize" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" diff --git a/Cargo.toml b/Cargo.toml index 2060a8c..c4f5d07 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,15 +17,23 @@ futures-util = "0.3.28" rustls = "0.21" anyhow = "1.0" sled = "0.34.7" -sqlx = { version = "0.7", features = [ "runtime-tokio", "tls-rustls", "sqlite", "migrate", "macros"] } flexi_logger = { version = "0.27.3", features = [ "async", "compress" ] } lazy_static = "1.4.0" log = "0.4" -nostr = "0.26.0" +deadpool-sqlite = "0.7.0" +rusqlite = { version = "0.30.0", features = [ "bundled", "vtab" ] } +rusqlite_migration = "1.0.2" +nostr = "0.27.0" +nostr-database = "0.27.0" regex = "1.9.5" sailfish = "0.7.0" sea-query = { version = "0.30.4", features = ["backend-sqlite", "thread-safe"] } -sea-query-binder = { version = "0.5.0", features = ["sqlx-sqlite"] } +sea-query-rusqlite = { version="0", features = [ + "with-chrono", + "with-json", + "with-uuid", + "with-time", +] } serde = "1.0" serde_json = "1.0" thiserror = "1.0.48" diff --git a/src/bussy/mod.rs b/src/bussy/mod.rs index 4ec2f0b..5fdfb72 100644 --- a/src/bussy/mod.rs +++ b/src/bussy/mod.rs @@ -1,5 +1,6 @@ use crate::{ noose::user::{User, UserRow}, + noose::sled::BanInfo, utils::{error::Error, structs::Subscription}, }; use nostr::secp256k1::XOnlyPublicKey; @@ -11,6 +12,7 @@ pub mod channels { pub static MSG_NIP05: &str = "MSG_NIP05"; pub static MSG_RELAY: &str = "MSG_RELAY"; pub static MSG_PIPELINE: &str = "MSG_PIPELINE"; + pub static MSG_SLED: &str = "MSG_SLED"; } #[derive(Debug, Clone, PartialEq)] @@ -19,6 +21,7 @@ pub enum Command { DbReqWriteEvent(/* client_id */ uuid::Uuid, Box), DbReqFindEvent(/* client_id*/ uuid::Uuid, Subscription), DbReqDeleteEvents(/* client_id*/ uuid::Uuid, Box), + DbReqEventCounts(/* client_id*/ uuid::Uuid, Subscription), // Old messages DbReqInsertUser(UserRow), @@ -27,15 +30,16 @@ pub enum Command { DbReqGetAccount(String), DbReqClear, // DbResponse - DbResRelayMessage( + DbResRelayMessages( /* client_id*/ uuid::Uuid, - /* Vec */ Vec, + /* Vec */ Vec, ), DbResInfo, DbResOk, DbResOkWithStatus(/* client_id */ uuid::Uuid, nostr::RelayMessage), DbResAccount, // TODO: Add Account DTO as a param DbResUser(UserRow), + DbResEventCounts(/* client_id */ uuid::Uuid, nostr::RelayMessage), // Event Pipeline PipelineReqEvent(/* client_id */ uuid::Uuid, Box), PipelineResRelayMessageOk(/* client_id */ uuid::Uuid, nostr::RelayMessage), @@ -43,6 +47,14 @@ pub enum Command { PipelineResOk, // Subscription Errors ClientSubscriptionError(/* error message */ String), + // Sled + SledReqBanUser(Box), + SledReqBanInfo(/* pubkey */ String), + SledReqUnbanUser(/* pubkey */ String), + SledReqGetBans, + SledResBan(Option), + SledResBans(Vec), + SledResSuccess(bool), // Other Str(String), ServiceError(Error), diff --git a/src/noose/db.rs b/src/noose/db.rs index a14abd6..cb56a8a 100644 --- a/src/noose/db.rs +++ b/src/noose/db.rs @@ -9,11 +9,9 @@ use std::sync::Arc; pub trait Noose: Send + Sync { async fn start(&mut self, pubsub: Arc) -> Result<(), Error>; - async fn migration_up(&self); - async fn write_event(&self, event: Box) -> Result; - async fn delete_events(&self, event_ids: Box) -> Result; + async fn find_event(&self, subscription: Subscription) -> Result, Error>; - async fn find_event(&self, subscription: Subscription) -> Result, Error>; + async fn counts(&self, subscription: Subscription) -> Result; } diff --git a/src/noose/migrations/1697409647688_create_events.sql b/src/noose/migrations/1697409647688_create_events.sql index ea9ca33..9c663b0 100644 --- a/src/noose/migrations/1697409647688_create_events.sql +++ b/src/noose/migrations/1697409647688_create_events.sql @@ -21,3 +21,10 @@ CREATE TABLE tags ( CREATE INDEX idx_tags_tag ON tags (tag); CREATE INDEX idx_tags_value ON tags (value); CREATE INDEX idx_tags_event_id ON tags (event_id); + +CREATE TABLE deleted_coordinates ( + coordinate TEXT NOT NULL, + created_at INTEGER NOT NULL +); + +CREATE INDEX idx_coordinates_coordinate ON coordinates (coordinate); diff --git a/src/noose/migrations/1697410424624_pragma.sql b/src/noose/migrations/1697410424624_pragma.sql index 610bd0b..c0d1526 100644 --- a/src/noose/migrations/1697410424624_pragma.sql +++ b/src/noose/migrations/1697410424624_pragma.sql @@ -1,2 +1,6 @@ +PRAGMA encoding = "UTF-8"; +PRAGMA journal_mode=WAL; PRAGMA foreign_keys = ON; PRAGMA auto_vacuum = FULL; +PRAGMA journal_size_limit=32768; +PRAGMA mmap_size = 17179869184; -- cap mmap at 16GB diff --git a/src/noose/migrations/1706115586021_event_seen_by_relays.sql b/src/noose/migrations/1706115586021_event_seen_by_relays.sql new file mode 100644 index 0000000..d3ffdfd --- /dev/null +++ b/src/noose/migrations/1706115586021_event_seen_by_relays.sql @@ -0,0 +1,7 @@ +CREATE TABLE IF NOT EXISTS event_seen_by_relays ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + event_id TEXT NOT NULL, + relay_url TEXT NOT NULL +); + +CREATE UNIQUE INDEX IF NOT EXISTS event_seen_by_relays_index ON event_seen_by_relays(event_id,relay_url); diff --git a/src/noose/migrations/mod.rs b/src/noose/migrations/mod.rs new file mode 100644 index 0000000..b9c7fdd --- /dev/null +++ b/src/noose/migrations/mod.rs @@ -0,0 +1,50 @@ +use rusqlite::Connection; +use rusqlite_migration::{Migrations, M}; + +pub struct MigrationRunner {} + +impl MigrationRunner { + pub fn up(connection: &mut Connection) -> bool { + let m_create_events = include_str!("./1697409647688_create_events.sql"); + let m_event_seen_by_relays = include_str!("1706115586021_event_seen_by_relays.sql"); + let m_add_realys = include_str!("./1697410161900_add_relays.sql"); + let m_events_fts = include_str!("./1697410223576_events_fts.sql"); + let m_users = include_str!("./1697410294265_users.sql"); + let m_unattached_media = include_str!("./1697410480767_unattached_media.sql"); + let m_pragma = include_str!("./1697410424624_pragma.sql"); + + let migrations = Migrations::new(vec![ + M::up(m_create_events), + M::up(m_event_seen_by_relays), + M::up(m_add_realys), + M::up(m_events_fts), + M::up(m_users), + M::up(m_unattached_media), + M::up(m_pragma), + ]); + + match migrations.to_latest(connection) { + Ok(()) => true, + Err(err) => { + log::error!("Migrations failed: {}", err.to_string()); + false + } + } + } +} + +#[cfg(test)] +mod tests { + use rusqlite::Connection; + + use super::MigrationRunner; + + #[test] + fn get_sql_path() { + let mut connection = Connection::open_in_memory().unwrap(); + + let runner = MigrationRunner::up(&mut connection); + + assert!(runner); + } +} diff --git a/src/noose/mod.rs b/src/noose/mod.rs index 6b7df86..c97c993 100644 --- a/src/noose/mod.rs +++ b/src/noose/mod.rs @@ -1,14 +1,13 @@ use crate::utils::structs::Context; -use tokio::runtime; - use db::Noose; use pipeline::Pipeline; - +use tokio::runtime; pub mod db; pub mod pipeline; -// mod sled; +pub mod sled; mod sqlite; pub mod user; +mod migrations; pub fn start(context: Context) { let rt = runtime::Runtime::new().unwrap(); @@ -16,19 +15,27 @@ pub fn start(context: Context) { rt.block_on(async move { let pipeline_pubsub = context.pubsub.clone(); let pipeline_config = context.config.clone(); + + let db_config = context.config.clone(); let db_pubsub = context.pubsub.clone(); + let sled_pubsub = context.pubsub.clone(); let pipeline_handle = tokio::task::spawn(async move { let mut pipeline = Pipeline::new(pipeline_pubsub, pipeline_config); pipeline.start().await.unwrap(); }); - let sqlite_writer_handle = tokio::task::spawn(async move { - let mut db_writer = sqlite::SqliteDb::new().await; + let sled_handle = tokio::task::spawn(async move { + let mut sled_writer = sled::SledDb::new(); + sled_writer.start(sled_pubsub).await.unwrap(); + }); + + let nostr_sqlite_writer_handle = tokio::task::spawn(async move { + let mut db_writer = sqlite::NostrSqlite::new(db_config).await; db_writer.start(db_pubsub).await.unwrap(); }); - sqlite_writer_handle.await.unwrap(); + nostr_sqlite_writer_handle.await.unwrap(); pipeline_handle.await.unwrap(); }); } diff --git a/src/noose/sled.rs b/src/noose/sled.rs index d48785f..d6b9401 100644 --- a/src/noose/sled.rs +++ b/src/noose/sled.rs @@ -1,234 +1,138 @@ -use super::db::Noose; +use std::sync::Arc; use crate::bussy::{channels, Command, Message, PubSub}; use crate::utils::error::Error; -use crate::utils::structs::Subscription; -use async_trait::async_trait; -use nostr::Event; -use serde::Serialize; -use std::sync::Arc; -use super::user::{User, UserRow}; +#[derive(Debug, Clone, PartialEq)] +pub struct BanInfo { + pub pubkey: String, + pub reason: String, +} // Db Interface pub struct SledDb { db: sled::Db, - events: sled::Tree, - nip05s: sled::Tree, - pub users: sled::Tree, - - index: sled::Db, + banned_pubkeys: sled::Tree, } impl SledDb { pub fn new() -> Self { let db = sled::open("/tmp/sled_db").unwrap(); - let events = db.open_tree("events").unwrap(); - let nip05s = db.open_tree("identifiers").unwrap(); - let accounts = db.open_tree("accounts").unwrap(); - - let index = sled::open("/tmp/sled_index").unwrap(); - + let banned_pubkeys = db.open_tree("banned_pubkeys").unwrap(); Self { db, - events, - nip05s, - users: accounts, - index, + banned_pubkeys } } + pub async fn start(&mut self, pubsub: Arc) -> Result<(), Error> { + let mut subscriber = pubsub.subscribe(channels::MSG_NOOSE).await; + + while let Ok(message) = subscriber.recv().await { + log::info!("[Noose] received message: {:?}", message); + let command = match message.content { + Command::SledReqBanUser(ban_info) => match self.ban_user(ban_info).await { + Ok(status) => Command::SledResSuccess(status), + Err(e) => Command::ServiceError(e), + }, + Command::SledReqUnbanUser(pubkey) => match self.unban_user(&pubkey).await { + Ok(status) => Command::SledResSuccess(status), + Err(e) => Command::ServiceError(e), + }, + Command::SledReqGetBans => match self.get_bans().await { + Ok(bans) => Command::SledResBans(bans), + Err(e) => Command::ServiceError(e), + }, + Command::SledReqBanInfo(pubkey) => match self.get_ban_by_pubkey(&pubkey).await { + Ok(ban_info) => Command::SledResBan(ban_info), + Err(e) => Command::ServiceError(e), + }, + _ => Command::Noop, + }; + if command != Command::Noop { + let channel = message.source; + let message = Message { + source: channels::MSG_SLED, + content: command, + }; + + log::info!( + "[Sled] publishing new message: {:?} to channel {}", + message, + channel + ); + + pubsub.publish(channel, message).await; + } + } + + Ok(()) + } + fn clear_db(&self) -> Result<(), sled::Error> { self.db.clear() } - fn clear_index(&self) -> Result<(), sled::Error> { - self.index.clear() - } - - async fn insert_user(&self, user: UserRow) -> Result<(), Error> { - let pubkey = user.pubkey.clone(); - let username = user.username.clone(); - - if let Ok(Some(_)) = self.nip05s.get(&username) { - return Err(Error::internal_with_message("User already exists")); + async fn ban_user(&self, ban_info: Box) -> Result { + if let Ok(Some(_)) = self.banned_pubkeys.insert(ban_info.pubkey, ban_info.reason.as_bytes()) { + return Ok(true) } - let mut user_buff = flexbuffers::FlexbufferSerializer::new(); - user.serialize(&mut user_buff).unwrap(); - - self.nip05s.insert(&username, user_buff.view()).unwrap(); - - let prefix = "nip05:"; - let key = format!("{}{}", prefix, pubkey); - self.index.insert(key, username.as_bytes()).unwrap(); - - Ok(()) + Ok(false) } - async fn get_user(&self, user: User) -> Result { - let mut user_row = None; - if let Some(username) = user.name { - if let Ok(Some(buff)) = self.nip05s.get(username) { - let b = flexbuffers::from_slice::(&buff).unwrap(); - user_row = Some(b); + fn is_banned(&self, pubkey: &String) -> bool{ + if let Ok(Some(banned)) = self.banned_pubkeys.get(pubkey) { + return true + } + false + } + + async fn unban_user(&self, pubkey: &String) -> Result { + if self.is_banned(pubkey) { + self.banned_pubkeys.remove(pubkey).unwrap(); + + return Ok(true); + } + + Ok(false) + } + + async fn get_bans(&self) -> Result, Error> { + let bans: Vec = self.banned_pubkeys.iter().filter_map(|row| { + if let Ok((k, v)) = row { + let ban_info = BanInfo { + pubkey: String::from_utf8(k.to_vec()).unwrap(), + reason: String::from_utf8(v.to_vec()).unwrap(), + }; + + Some(ban_info) + } else { + None } - } else if let Some(pubkey) = user.pubkey { - let prefix = "nip05:"; - let reference = format!("{}{}", prefix, pubkey); - if let Ok(Some(row)) = self.index.get(reference) { - let key = String::from_utf8(row.to_vec()).unwrap(); + }).collect(); - if let Ok(Some(buff)) = self.nip05s.get(key) { - let b = flexbuffers::from_slice::(&buff).unwrap(); + Ok(bans) + } - user_row = Some(b); - } + async fn get_ban_by_pubkey(&self, pubkey: &String) -> Result, Error> { + if self.is_banned(pubkey) { + if let Ok(Some(reason)) = self.banned_pubkeys.get(pubkey) { + let ban_info = BanInfo { + pubkey: pubkey.to_owned(), + reason: String::from_utf8(reason.to_vec()).unwrap() + }; + + return Ok(Some(ban_info)); } - } - match user_row { - Some(user) => Ok(user), - None => Err(Error::internal_with_message("User not found")), - } - } -} -#[async_trait] -impl Noose for SledDb { - async fn start(&mut self, pubsub: Arc) -> Result<(), Error> { - let mut subscriber = pubsub.subscribe(channels::MSG_NOOSE).await; - - while let Ok(message) = subscriber.recv().await { - log::info!("noose subscriber received: {:?}", message); - let command = match message.content { - Command::DbReqInsertUser(user) => match self.insert_user(user).await { - Ok(_) => Command::DbResOk, - Err(e) => Command::ServiceError(e), - }, - Command::DbReqGetUser(user) => match self.get_user(user).await { - Ok(user) => Command::DbResUser(user), - Err(e) => Command::ServiceError(e), - }, - Command::DbReqWriteEvent(event) => match self.write_event(event).await { - Ok(_) => Command::DbResOk, - Err(e) => Command::ServiceError(e), - }, - _ => Command::Noop, - }; - if command != Command::Noop { - log::info!("Publishing new message"); - let channel = message.source; - - pubsub - .publish( - channel, - Message { - source: channels::MSG_NOOSE, - content: command, - }, - ) - .await; - } + return Ok(None); } - Ok(()) - } - - async fn migration_up(&self) {} - - async fn write_event(&self, event: Box) -> Result { - let mut event_buff = flexbuffers::FlexbufferSerializer::new(); - event.serialize(&mut event_buff).unwrap(); - - self.events.insert(event.id, event_buff.view()).unwrap(); - { - // Timestamp - let key = format!("created_at:{}|#e:{}", event.created_at, event.id); - self.index.insert(key, event.id.as_bytes()).unwrap(); - } - - { - // Author, pubkeys #p - let key = format!("#author:{}|#e:{}", event.pubkey, event.id); - self.index.insert(key, event.id.as_bytes()).unwrap(); - // self.index.scan_prefix( - } - - { - // Kinds - let key = format!("#k:{}|#e:{}", event.kind, event.id); - self.index.insert(key, event.id.as_bytes()).unwrap(); - // self.index.scan_prefix( - } - - { - // Tags - event.tags.iter().for_each(|tag| { - if let Some(key) = match tag { - // #e tag - nostr::Tag::Event(event_id, _, _) => Some(format!("#e:{}", event_id)), - // #p tag - nostr::Tag::PubKey(pubkey, _) => Some(format!("#p:{}|#e:{}", pubkey, event.id)), - // #t tag - nostr::Tag::Hashtag(hashtag) => Some(format!("#t:{}|#e:{}", hashtag, event.id)), - // #a tag - nostr::Tag::A { - kind, - public_key, - identifier, - relay_url, - } => Some(format!( - "#a:kind:{}|#a:pubkey:{}#a:identifier:{}|#e:{}", - kind, public_key, identifier, event.id - )), - _ => None, - } { - self.index.insert(key, event.id.as_bytes()).unwrap(); - } - }); - - // let key = format!("#t:{}|#e:{}", event.kind, event.id); - // self.index.insert(key, event.id.as_bytes()).unwrap(); - // self.index.scan_prefix( - } - - let message = format!("[\"OK\", \"{}\", true, \"\"]", event.id.to_string()); - Ok(message) - } - - async fn find_event(&self, subscription: Subscription) -> Result, Error> { - todo!() + Ok(None) } } #[cfg(test)] mod tests { - use super::SledDb; - use crate::{ - bussy::PubSub, - noose::user::{User, UserRow}, - }; - use std::sync::Arc; - #[tokio::test] - async fn get_db_names() { - let pubsub = Arc::new(PubSub::new()); - - let db = SledDb::new(); - - let pk = "npub1p3ya99jfdafnqlk87p6wfd36d2nme5mkld769rhd9pkht6hmqlaq6mzxdu".to_string(); - let username = "klink".to_string(); - let user = UserRow::new(pk, username, false); - let result = db.insert_user(user).await; - - let pubkey = "npub1p3ya99jfdafnqlk87p6wfd36d2nme5mkld769rhd9pkht6hmqlaq6mzxdu".to_string(); - let username = "klink".to_string(); - let user = User { - name: None, - pubkey: Some(pubkey), - }; - let user = db.get_user(user).await; - - db.clear_db().unwrap(); - db.clear_index().unwrap(); - } } diff --git a/src/noose/sqlite.rs b/src/noose/sqlite.rs index 9540bfb..43f2b77 100644 --- a/src/noose/sqlite.rs +++ b/src/noose/sqlite.rs @@ -1,14 +1,76 @@ -use nostr::{Event, JsonUtil, RelayMessage}; -use sea_query::{extension::sqlite::SqliteExpr, Query}; -use sea_query_binder::SqlxBinder; -use sqlx::sqlite::{Sqlite, SqlitePoolOptions}; -use sqlx::FromRow; -use sqlx::{migrate::MigrateDatabase, Pool}; +use super::{db::Noose, migrations::MigrationRunner}; +use crate::{ + bussy::{channels, Command, Message, PubSub}, + utils::{config::Config as ServiceConfig, error::Error, structs::Subscription}, +}; +use async_trait::async_trait; +use deadpool_sqlite::{Config, Object, Pool, Runtime}; +use nostr::{nips::nip01::Coordinate, Event, EventId, Filter, RelayMessage, Timestamp, Url}; +use nostr_database::{Backend, DatabaseOptions, NostrDatabase, Order}; +use rusqlite::Row; +use sea_query::{extension::sqlite::SqliteExpr, Order as SqOrder, Query, SqliteQueryBuilder}; +use sea_query_rusqlite::RusqliteBinder; use std::sync::Arc; +use std::{collections::HashSet, str::FromStr}; -use super::db::Noose; -use crate::bussy::{channels, Command, Message, PubSub}; -use crate::utils::{error::Error, structs::Subscription}; +#[derive(Debug, Clone)] +struct EventRow { + id: String, + pubkey: String, + created_at: i64, + kind: i64, + tags: String, + sig: String, + content: String, +} + +impl From<&Row<'_>> for EventRow { + fn from(row: &Row) -> Self { + let id: String = row.get("id").unwrap(); + let pubkey: String = row.get("pubkey").unwrap(); + let created_at: i64 = row.get("created_at").unwrap(); + let kind: i64 = row.get("kind").unwrap(); + let tags: String = row.get("tags").unwrap(); + let sig: String = row.get("sig").unwrap(); + let content: String = row.get("content").unwrap(); + + Self { + id, + pubkey, + created_at, + kind, + tags, + sig, + content, + } + } +} + +impl From<&EventRow> for Event { + fn from(row: &EventRow) -> Self { + row.to_event() + } +} + +impl EventRow { + pub fn to_event(&self) -> Event { + let tags: Vec> = serde_json::from_str(&self.tags).unwrap(); + + let event_json = serde_json::json!( + { + "id": self.id, + "content": self.content, + "created_at": self.created_at, + "kind": self.kind, + "pubkey": self.pubkey, + "sig": self.sig, + "tags": tags + } + ); + + Event::from_value(event_json).unwrap() + } +} enum EventsTable { Table, @@ -85,455 +147,680 @@ impl sea_query::Iden for TagsTable { } } -#[derive(FromRow, Debug)] -struct EventsCountRow(i32); - -#[derive(FromRow, Debug)] -struct EventRow { - id: String, - pubkey: String, - created_at: i64, - kind: i64, - tags: String, - sig: String, - content: String, +enum DeletedCoordinatesTable { + Table, + Coordinate, + CreatedAt, } -impl EventRow { - pub fn to_string(&self, subscription_id: nostr::SubscriptionId) -> String { - let tags: Vec> = serde_json::from_str(&self.tags).unwrap(); - - let message = serde_json::json!([ - "EVENT", - subscription_id, - { - "id": self.id, - "content": self.content, - "created_at": self.created_at, - "kind": self.kind, - "pubkey": self.pubkey, - "sig": self.sig, - "tags": tags +impl sea_query::Iden for DeletedCoordinatesTable { + fn unquoted(&self, s: &mut dyn std::fmt::Write) { + write!( + s, + "{}", + match self { + Self::Table => "deleted_coordinates", + Self::Coordinate => "coordinate", + Self::CreatedAt => "created_at", } - ]); - - message.to_string() + ) + .unwrap() } } -pub struct SqliteDb { - pool: Pool, +enum EventSeenByRelaysTable { + Table, + Id, + EventId, + RelayURL, } -impl SqliteDb { - pub async fn new() -> Self { - let pool = SqliteDb::build_pool("noose_pool", 42).await; - - Self { pool } +impl sea_query::Iden for EventSeenByRelaysTable { + fn unquoted(&self, s: &mut dyn std::fmt::Write) { + write!( + s, + "{}", + match self { + Self::Table => "event_seen_by_relays", + Self::Id => "id", + Self::EventId => "event_id", + Self::RelayURL => "relay_url", + } + ) + .unwrap() } +} - pub fn info(&self) { - dbg!(self.pool.options()); - } - - async fn migrate(pool: &Pool) { - sqlx::migrate!("src/noose/migrations") - .run(pool) - .await - .unwrap() - } - - async fn build_pool(name: &str, max_size: u32) -> Pool { - let pool_options = SqlitePoolOptions::new() - .test_before_acquire(true) - .idle_timeout(Some(std::time::Duration::from_secs(10))) - .max_lifetime(Some(std::time::Duration::from_secs(30))) - .max_lifetime(None) - .idle_timeout(None) - .max_connections(max_size); +#[derive(Debug)] +pub struct NostrSqlite { + pool: Pool, +} +impl NostrSqlite { + pub async fn new(config: Arc) -> Self { let env_db_path = std::env::var("DATABASE_URL").unwrap_or("/tmp/sqlite.db".to_string()); - if !Sqlite::database_exists(&env_db_path).await.unwrap_or(false) { - log::info!("Creating database {}", &env_db_path); - match Sqlite::create_database(&env_db_path).await { - Ok(_) => log::info!("Db {} created", &env_db_path), - Err(_) => panic!("Failed to create database {}", &env_db_path), - } - } - if let Ok(pool) = pool_options.connect(&env_db_path).await { - log::info!("Connected to sqlite pool {}", name); - SqliteDb::migrate(&pool).await; + let cfg = Config::new(env_db_path); + let pool = cfg.create_pool(Runtime::Tokio1).unwrap(); - pool - } else { - panic!("Connection to sqlite pool {} failed", name); + if NostrSqlite::run_migrations(&pool).await { + return Self { pool }; + } + + panic!("Pool's closed"); + } + + async fn run_migrations(pool: &Pool) -> bool { + let connection = pool.get().await.unwrap(); + connection.interact(MigrationRunner::up).await.unwrap() + } + + async fn get_connection(&self) -> Result { + match self.pool.get().await { + Ok(connection) => Ok(connection), + Err(err) => Err(Error::internal(err.into())), } } - async fn add_event(&self, event: Box) -> Result { + async fn save_event(&self, event: &Event) -> Result { + let event = event.clone(); + let id = event.id.to_string(); - let kind = event.kind.to_string(); + let kind = event.kind.as_u64(); let pubkey = event.pubkey.to_string(); let content = event.content.to_string(); let created_at = event.created_at.as_i64(); let tags = serde_json::to_string(&event.tags).unwrap(); let sig = event.sig.to_string(); - let message = nostr::RelayMessage::Ok { - event_id: event.id, - status: true, - message: "".to_string(), - }; + let ids: Vec = event.event_ids().map(|eid| eid.to_string()).collect(); - // Skip events that are older than 10 minutes - if chrono::Utc::now().timestamp() - 600 > created_at { - let message = nostr::RelayMessage::Ok { - event_id: event.id, - status: false, - message: "invalid: event creation date is too far off from the current time" - .to_string(), - }; - return Ok(message); + if self.event_is_too_old(&event) { + return Ok(false); } if event.is_ephemeral() { - return Ok(message); + return Ok(false); } - { - let (sql, value) = Query::select() - .from(EventsTable::Table) - .column(EventsTable::EventId) - .and_where(sea_query::Expr::col(EventsTable::EventId).eq(event.id.to_string())) - .limit(1) - .build_sqlx(sea_query::SqliteQueryBuilder); + let Ok(connection) = self.get_connection().await else { + return Err(Error::internal_with_message("Unable to get DB connection")); + }; - let events = sqlx::query_with(&sql, value).fetch_one(&self.pool).await; - if events.ok().is_some() { - let message = nostr::RelayMessage::Ok { - event_id: event.id, - status: false, - message: "invalid: event with this id already exists".to_string(), - }; - return Ok(message); - } - } + let Ok(event_saved) = connection + .interact(move |conn| -> Result { + let tx = conn.transaction().unwrap(); - let tx = self.pool.begin().await.unwrap(); - { - if event.is_replaceable() { - dbg!("new event is replaceable - searching for previously stored event"); - let (sql, values) = Query::select() - .from(EventsTable::Table) - .columns([EventsTable::EventId]) - .and_where( - sea_query::Expr::col(EventsTable::Pubkey).eq(event.pubkey.to_string()), - ) - .and_where(sea_query::Expr::col(EventsTable::Kind).eq(event.kind.as_u32())) - .and_where( - sea_query::Expr::col(EventsTable::CreatedAt).gte(event.created_at.as_i64()), - ) - .limit(1) - .build_sqlx(sea_query::SqliteQueryBuilder); + if event.is_replaceable() { + dbg!("new event is replaceable - searching for previously stored event"); + let (sql, values) = Query::select() + .from(EventsTable::Table) + .columns([EventsTable::EventId]) + .and_where(sea_query::Expr::col(EventsTable::Pubkey).eq(&pubkey)) + .and_where(sea_query::Expr::col(EventsTable::Kind).eq(kind)) + .and_where(sea_query::Expr::col(EventsTable::CreatedAt).gte(created_at)) + .limit(1) + .build_rusqlite(SqliteQueryBuilder); - let repl_count = sqlx::query_with(&sql, values).fetch_one(&self.pool).await; - - if repl_count.ok().is_some() { - return Ok(message); + if let Ok(res) = tx.execute(sql.as_str(), &*values.as_params()) { + if res > 0 { + return Ok(true); + } + }; + } - } - } - { - if event.is_parameterized_replaceable() { - dbg!( + if event.is_parameterized_replaceable() { + dbg!( "new event is parametrized replaceable - searching for previously stored event" ); - let d_tags: Vec = event - .tags - .iter() - .filter(|tag| tag.kind() == nostr::TagKind::D) - .map(|tag| tag.clone().to_vec()[1].clone()) - .collect(); - let (sql, values) = Query::select() - .from(EventsTable::Table) - .column((EventsTable::Table, EventsTable::EventId)) - .left_join( - TagsTable::Table, - sea_query::Expr::col((TagsTable::Table, TagsTable::EventId)) - .equals((EventsTable::Table, EventsTable::EventId)), - ) - .and_where( - sea_query::Expr::col((EventsTable::Table, EventsTable::Pubkey)) - .eq(event.pubkey.to_string()), - ) - .and_where( - sea_query::Expr::col((EventsTable::Table, EventsTable::Kind)) - .eq(event.kind.as_u32()), - ) - .and_where(sea_query::Expr::col((TagsTable::Table, TagsTable::Tag)).eq("d")) - .and_where( - sea_query::Expr::col((TagsTable::Table, TagsTable::Value)) - .eq(d_tags[0].to_string()), - ) - .and_where( - sea_query::Expr::col((EventsTable::Table, EventsTable::CreatedAt)) - .gte(event.created_at.as_i64()), - ) - .limit(1) - .build_sqlx(sea_query::SqliteQueryBuilder); + let d_tags: Vec = event + .tags + .iter() + .filter(|tag| tag.kind() == nostr::TagKind::D) + .map(|tag| tag.clone().to_vec()[1].clone()) + .collect(); + let (sql, values) = Query::select() + .from(EventsTable::Table) + .column((EventsTable::Table, EventsTable::EventId)) + .left_join( + TagsTable::Table, + sea_query::Expr::col((TagsTable::Table, TagsTable::EventId)) + .equals((EventsTable::Table, EventsTable::EventId)), + ) + .and_where( + sea_query::Expr::col((EventsTable::Table, EventsTable::Pubkey)) + .eq(&pubkey), + ) + .and_where( + sea_query::Expr::col((EventsTable::Table, EventsTable::Kind)).eq(kind), + ) + .and_where(sea_query::Expr::col((TagsTable::Table, TagsTable::Tag)).eq("d")) + .and_where( + sea_query::Expr::col((TagsTable::Table, TagsTable::Value)) + .eq(d_tags[0].to_string()), + ) + .and_where( + sea_query::Expr::col((EventsTable::Table, EventsTable::CreatedAt)) + .gte(created_at), + ) + .limit(1) + .build_rusqlite(SqliteQueryBuilder); - let repl_count = sqlx::query_with(&sql, values).fetch_one(&self.pool).await; - - if repl_count.ok().is_some() { - return Ok(message); - } - } - } - - // Insert replaceble event - { - if event.is_replaceable() { - dbg!("deleting older replaceable event from events table"); - let (sql, values) = Query::delete() - .from_table(EventsTable::Table) - .and_where( - sea_query::Expr::col((EventsTable::Table, EventsTable::Kind)) - .eq(event.kind.as_u32()), - ) - .and_where( - sea_query::Expr::col((EventsTable::Table, EventsTable::Pubkey)) - .eq(event.pubkey.to_string()), - ) - .and_where( - sea_query::Expr::col((EventsTable::Table, EventsTable::EventId)) - .not_in_subquery( - Query::select() - .from(EventsTable::Table) - .column(EventsTable::EventId) - .and_where( - sea_query::Expr::col(EventsTable::Kind) - .eq(event.kind.as_u32()), - ) - .and_where( - sea_query::Expr::col(EventsTable::Pubkey) - .eq(event.pubkey.to_string()), - ) - .order_by(EventsTable::CreatedAt, sea_query::Order::Desc) - .limit(1) - .to_owned(), - ), - ) - .build_sqlx(sea_query::SqliteQueryBuilder); - - let results = sqlx::query_with(&sql, values) - .execute(&self.pool) - .await - .unwrap(); - - if results.rows_affected() > 0 { - log::info!( - "removed {} older replaceable kind {} events for author: {:?}", - results.rows_affected(), - event.kind.as_u32(), - event.pubkey.to_string() - ); - } - } - } - - // Insert parametrized replaceble event - { - if event.is_parameterized_replaceable() { - dbg!("deleting older parametrized replaceable event from events table"); - log::info!("deleting older parametrized replaceable event from events table"); - let d_tag = event.identifier(); - let (sql, values) = Query::delete() - .from_table(EventsTable::Table) - .and_where( - sea_query::Expr::col((EventsTable::Table, EventsTable::EventId)) - .in_subquery( - Query::select() - .from(EventsTable::Table) - .column((EventsTable::Table, EventsTable::EventId)) - .left_join( - TagsTable::Table, - sea_query::Expr::col(( - TagsTable::Table, - TagsTable::EventId, - )) - .equals((EventsTable::Table, EventsTable::EventId)), - ) - .and_where( - sea_query::Expr::col(( - EventsTable::Table, - EventsTable::Kind, - )) - .eq(event.kind.as_u32()), - ) - .and_where( - sea_query::Expr::col(( - EventsTable::Table, - EventsTable::Pubkey, - )) - .eq(event.pubkey.to_string()), - ) - .and_where( - sea_query::Expr::col((TagsTable::Table, TagsTable::Tag)) - .eq("d"), - ) - .and_where( - sea_query::Expr::col((TagsTable::Table, TagsTable::Value)) - .eq(d_tag), - ) - .to_owned(), - ), - ) - .build_sqlx(sea_query::SqliteQueryBuilder); - - let results = sqlx::query_with(&sql, values) - .execute(&self.pool) - .await - .unwrap(); - - if results.rows_affected() > 0 { - log::info!("removed {} older parameterized replaceable kind {} events for author: {:?}", results.rows_affected(), event.kind, event.pubkey); - } - } - } - - // Process deletion events - dbg!(event.as_json()); - if event.kind.as_u32() == 5 { - dbg!("deleting event"); - log::info!("deleting event"); - let ids: Vec = event.event_ids().map(|eid| eid.to_string()).collect(); - let (sql, values) = Query::delete() - .from_table(EventsTable::Table) - .and_where(sea_query::Expr::col(EventsTable::Kind).ne(5)) - .and_where(sea_query::Expr::col(EventsTable::Pubkey).eq(event.pubkey.to_string())) - .and_where(sea_query::Expr::col(EventsTable::EventId).is_in(&ids)) - .build_sqlx(sea_query::SqliteQueryBuilder); - - let results = sqlx::query_with(&sql, values) - .execute(&self.pool) - .await - .unwrap(); - - if results.rows_affected() > 0 { - log::info!( - "removed {} events for author {:?}", - results.rows_affected(), - event.pubkey - ); - } - - // Delete from EventsFTS - let (sql, values) = Query::delete() - .from_table(EventsFTSTable::Table) - .and_where(sea_query::Expr::col(EventsFTSTable::EventId).is_in(&ids)) - .build_sqlx(sea_query::SqliteQueryBuilder); - let _ = sqlx::query_with(&sql, values) - .execute(&self.pool) - .await - .unwrap(); - } else { - dbg!("inserting new event in events"); - log::info!("inserting new event in events"); - // Insert into Events table - let (sql, values) = Query::insert() - .into_table(EventsTable::Table) - .columns([ - EventsTable::EventId, - EventsTable::Content, - EventsTable::Kind, - EventsTable::Pubkey, - EventsTable::CreatedAt, - EventsTable::Tags, - EventsTable::Sig, - ]) - .values_panic([ - id.clone().into(), - content.clone().into(), - kind.into(), - pubkey.into(), - created_at.into(), - tags.into(), - sig.into(), - ]) - .build_sqlx(sea_query::SqliteQueryBuilder); - - if let Err(e) = sqlx::query_with(&sql, values).execute(&self.pool).await { - log::error!("Error inserting event into 'events' table: {}", e); - } - - // Insert into EventsFTS table - dbg!("inserting new event into eventsFTS"); - let (sql, values) = Query::insert() - .into_table(EventsFTSTable::Table) - .columns([EventsFTSTable::EventId, EventsFTSTable::Content]) - .values_panic([id.clone().into(), content.into()]) - .build_sqlx(sea_query::SqliteQueryBuilder); - - if let Err(e) = sqlx::query_with(&sql, values).execute(&self.pool).await { - log::error!("Error inserting event into 'eventsFTS' table: {}", e); - } - - // Insert into Tags table - dbg!("inserting new event into tags"); - for tag in event.tags.clone() { - let tag = tag.to_vec(); - if tag.len() >= 2 { - let tag_name = &tag[0]; - let tag_value = &tag[1]; - if tag_name.len() == 1 { - let (sql, values) = Query::insert() - .into_table(TagsTable::Table) - .columns([TagsTable::Tag, TagsTable::Value, TagsTable::EventId]) - .values_panic([tag_name.into(), tag_value.into(), id.clone().into()]) - .build_sqlx(sea_query::SqliteQueryBuilder); - - if let Err(e) = sqlx::query_with(&sql, values).execute(&self.pool).await { - log::error!("Error inserting event into 'tags' table: {}", e); + if let Ok(results) = tx.execute(sql.as_str(), &*values.as_params()) { + if results > 0 { + return Ok(true); } } } + + // Insert replaceble event + if event.is_replaceable() { + dbg!("deleting older replaceable event from events table"); + let (sql, values) = Query::delete() + .from_table(EventsTable::Table) + .and_where( + sea_query::Expr::col((EventsTable::Table, EventsTable::Kind)).eq(kind), + ) + .and_where( + sea_query::Expr::col((EventsTable::Table, EventsTable::Pubkey)) + .eq(&pubkey), + ) + .and_where( + sea_query::Expr::col((EventsTable::Table, EventsTable::EventId)) + .not_in_subquery( + Query::select() + .from(EventsTable::Table) + .column(EventsTable::EventId) + .and_where(sea_query::Expr::col(EventsTable::Kind).eq(kind)) + .and_where( + sea_query::Expr::col(EventsTable::Pubkey).eq(&pubkey), + ) + .order_by(EventsTable::CreatedAt, sea_query::Order::Desc) + .limit(1) + .to_owned(), + ), + ) + .build_rusqlite(SqliteQueryBuilder); + + if let Ok(results) = tx.execute(sql.as_str(), &*values.as_params()) { + if results > 0 { + log::info!( + "removed {} older replaceable kind {} events for author: {:?}", + results, + event.kind.as_u32(), + event.pubkey.to_string() + ); + } + } + } + + // Insert parametrized replaceble event + if event.is_parameterized_replaceable() { + dbg!("deleting older parametrized replaceable event from events table"); + log::info!("deleting older parametrized replaceable event from events table"); + let d_tag = event.identifier(); + let (sql, values) = Query::delete() + .from_table(EventsTable::Table) + .and_where( + sea_query::Expr::col((EventsTable::Table, EventsTable::EventId)) + .in_subquery( + Query::select() + .from(EventsTable::Table) + .column((EventsTable::Table, EventsTable::EventId)) + .left_join( + TagsTable::Table, + sea_query::Expr::col(( + TagsTable::Table, + TagsTable::EventId, + )) + .equals((EventsTable::Table, EventsTable::EventId)), + ) + .and_where( + sea_query::Expr::col(( + EventsTable::Table, + EventsTable::Kind, + )) + .eq(kind), + ) + .and_where( + sea_query::Expr::col(( + EventsTable::Table, + EventsTable::Pubkey, + )) + .eq(&pubkey), + ) + .and_where( + sea_query::Expr::col(( + TagsTable::Table, + TagsTable::Tag, + )) + .eq("d"), + ) + .and_where( + sea_query::Expr::col(( + TagsTable::Table, + TagsTable::Value, + )) + .eq(d_tag), + ) + .to_owned(), + ), + ) + .build_rusqlite(SqliteQueryBuilder); + + if let Ok(results) = tx.execute(sql.as_str(), &*values.as_params()) { + if results > 0 { + log::info!("removed {} older parameterized replaceable kind {} events for author: {:?}", results, event.kind, event.pubkey); + } + } + } + + if event.kind == nostr::Kind::EventDeletion { + // Delete from Events + let (sql, values) = Query::delete() + .from_table(EventsTable::Table) + .and_where(sea_query::Expr::col(EventsTable::Kind).ne(5)) + .and_where(sea_query::Expr::col(EventsTable::Pubkey).eq(&pubkey)) + .and_where(sea_query::Expr::col(EventsTable::EventId).is_in(&ids)) + .build_rusqlite(SqliteQueryBuilder); + + if let Err(err) = tx.execute(sql.as_str(), &*values.as_params()) { + tx.rollback().unwrap(); + + return Ok(false); + } + + // Delete from EventsFTS + let (sql, values) = Query::delete() + .from_table(EventsFTSTable::Table) + .and_where(sea_query::Expr::col(EventsFTSTable::EventId).is_in(ids)) + .build_rusqlite(SqliteQueryBuilder); + + if let Err(err) = tx.execute(sql.as_str(), &*values.as_params()) { + tx.rollback().unwrap(); + + return Ok(false); + } + } else { + log::debug!("inserting new event in events"); + // Insert into Events table + let (sql, values) = Query::insert() + .into_table(EventsTable::Table) + .columns([ + EventsTable::EventId, + EventsTable::Content, + EventsTable::Kind, + EventsTable::Pubkey, + EventsTable::CreatedAt, + EventsTable::Tags, + EventsTable::Sig, + ]) + .values_panic([ + id.clone().into(), + content.clone().into(), + kind.into(), + pubkey.into(), + created_at.into(), + tags.into(), + sig.into(), + ]) + .build_rusqlite(SqliteQueryBuilder); + + if let Err(err) = tx.execute(sql.as_str(), &*values.as_params()) { + log::error!("Error inserting event into 'events' table: {}", err); + tx.rollback().unwrap(); + + return Ok(false); + }; + + // Insert into EventsFTS table + log::debug!("inserting new event into eventsFTS"); + let (sql, values) = Query::insert() + .into_table(EventsFTSTable::Table) + .columns([EventsFTSTable::EventId, EventsFTSTable::Content]) + .values_panic([id.clone().into(), content.into()]) + .build_rusqlite(SqliteQueryBuilder); + + if let Err(err) = tx.execute(sql.as_str(), &*values.as_params()) { + log::error!("Error inserting event into 'eventsFTS' table: {}", err); + tx.rollback().unwrap(); + + return Ok(false); + } + + // Insert into Tags table + log::debug!("inserting new event into tags"); + for tag in event.tags.clone() { + let tag = tag.to_vec(); + if tag.len() >= 2 { + let tag_name = &tag[0]; + let tag_value = &tag[1]; + if tag_name.len() == 1 { + let (sql, values) = Query::insert() + .into_table(TagsTable::Table) + .columns([TagsTable::Tag, TagsTable::Value, TagsTable::EventId]) + .values_panic([ + tag_name.into(), + tag_value.into(), + id.clone().into(), + ]) + .build_rusqlite(SqliteQueryBuilder); + + if let Err(err) = tx.execute(sql.as_str(), &*values.as_params()) { + log::error!("Error inserting event into 'tags' table: {}", err); + tx.rollback().unwrap(); + + return Ok(false); + } + } + } + } + } + + match tx.commit() { + Ok(_) => Ok(true), + Err(err) => { + log::error!("Error during transaction commit: {}", err); + Ok(false) + } + } + }) + .await else { + return Err(Error::internal_with_message("Failed to execute query")); + }; + + event_saved + } + + async fn has_event_already_been_saved(&self, event_id: &EventId) -> Result { + match self.get_event_by_id(*event_id).await { + Ok(_) => Ok(true), + Err(_) => Ok(false), + } + } + + async fn has_event_id_been_deleted(&self, event_id: &EventId) -> Result { + match self.get_event_by_id(*event_id).await { + Ok(_) => Ok(false), + Err(_) => Ok(true), + } + } + + /// Skip events that are older than 10 minutes + fn event_is_too_old(&self, event: &Event) -> bool { + if chrono::Utc::now().timestamp() - 600 > event.created_at.as_i64() { + return true; + } + false + } + + async fn set_event_id_seen(&self, event_id: EventId, relay_url: Url) -> Result<(), Error> { + let Ok(connection) = self.get_connection().await else { + return Err(Error::internal_with_message("Unable to get DB connection")); + }; + + let event_id = event_id.to_string(); + let relay_url = relay_url.to_string(); + + let Ok(query_result) = connection + .interact(|conn| -> Result { + let (sql, values) = Query::insert() + .into_table(EventSeenByRelaysTable::Table) + .columns([ + EventSeenByRelaysTable::EventId, + EventSeenByRelaysTable::RelayURL, + ]) + .values_panic([event_id.into(), relay_url.into()]) + .build_rusqlite(SqliteQueryBuilder); + + match conn.execute(sql.as_str(), &*values.as_params()) { + Ok(updated_rows) => Ok(updated_rows), + Err(err) => Err(Error::internal(err.into())), + } + }) + .await + else { + return Err(Error::internal_with_message("Failed to execute query")); + }; + + if let Ok(event_id_inserted) = query_result { + if event_id_inserted > 0 { + return Ok(()); } - } - tx.commit().await.unwrap(); - - log::info!("[SQLite] add_event completed"); - Ok(message) - } - - async fn index_search(&self, event: Box) -> Result<(), Error> { - let id = event.id.to_string(); - let content = event.content.to_string(); - let (sql, values) = Query::insert() - .into_table(EventsFTSTable::Table) - .columns([EventsFTSTable::EventId, EventsFTSTable::Content]) - .values_panic([id.into(), content.into()]) - .build_sqlx(sea_query::SqliteQueryBuilder); - - let results = sqlx::query_with(&sql, values).execute(&self.pool).await; - if results.is_ok() { - Ok(()) + Err(Error::internal_with_message("Failed to insert new record")) } else { - Err(Error::internal_with_message( - "Unable to write event to events_fts index", - )) + Err(Error::internal_with_message("Failed to insert new record")) } } - async fn index_tags(&self, event: Box) -> Result<(), Error> { - // let t: Vec = Vec::new(); - // for tag in event.tags { - // tag.kind() - // } + async fn get_event_seen_on_relays( + &self, + event_id: EventId, + ) -> Result>, Error> { + let Ok(connection) = self.get_connection().await else { + return Err(Error::internal_with_message("Unable to get DB connection")); + }; + let event_id = event_id.to_string(); - Ok(()) + let Ok(query_result) = connection + .interact(|conn| -> Result>, Error> { + let (sql, values) = Query::select() + .from(EventSeenByRelaysTable::Table) + .column(EventSeenByRelaysTable::RelayURL) + .and_where(sea_query::Expr::col(EventSeenByRelaysTable::EventId).eq(event_id)) + .build_rusqlite(SqliteQueryBuilder); + + let mut stmt = conn.prepare(sql.as_str()).unwrap(); + let mut rows = stmt.query(&*values.as_params()).unwrap(); + + let mut urls: HashSet = HashSet::new(); + while let Ok(Some(record)) = rows.next() { + if let Ok(url_string) = record.get::<_, String>("relay_url") { + if let Ok(relay_url) = Url::from_str(&url_string) { + urls.insert(relay_url); + } + } + } + + if !urls.is_empty() { + return Ok(Some(urls)); + } + + Ok(None) + }) + .await + else { + return Err(Error::internal_with_message("Failed to execute query")); + }; + + let Ok(found_relay_urls) = query_result else { + return Err(Error::internal_with_message("Failed to execute query")); + }; + + Ok(found_relay_urls) + } + + async fn has_event_already_been_seen(&self, event_id: &EventId) -> Result { + let Ok(connection) = self.get_connection().await else { + return Err(Error::internal_with_message("Unable to get DB connection")); + }; + + let event_id = event_id.to_string(); + + let Ok(query_result) = connection + .interact(|conn| -> Result { + let (sql, values) = Query::select() + .expr_as( + sea_query::Expr::exists( + Query::select() + .column(EventSeenByRelaysTable::EventId) + .from(EventSeenByRelaysTable::Table) + .and_where( + sea_query::Expr::col(EventSeenByRelaysTable::EventId) + .eq(event_id), + ) + .limit(1) + .take(), + ), + sea_query::Alias::new("event_exists"), + ) + .build_rusqlite(SqliteQueryBuilder); + + let mut stmt = conn.prepare(sql.as_str()).unwrap(); + let mut rows = stmt.query(&*values.as_params()).unwrap(); + + let exists = match rows.next() { + Ok(_) => 1, + Err(_) => 0, + }; + + Ok(exists == 1) + }) + .await + else { + return Err(Error::internal_with_message("Failed to execute query")); + }; + + if let Ok(result) = query_result { + return Ok(result); + } + + Ok(false) + } + + async fn admin_delete_events(&self, event: &Event) -> Result { + let event_ids: Vec = event.event_ids().map(|e| e.to_string()).collect(); + let Ok(connection) = self.get_connection().await else { + return Err(Error::internal_with_message("Unable to get DB connection")); + }; + let Ok(query_result) = connection + .interact( + |conn: &mut rusqlite::Connection| -> Result { + let (sql, values) = Query::delete() + .from_table(EventsTable::Table) + .and_where(sea_query::Expr::col(EventsTable::EventId).is_in(event_ids)) + .build_rusqlite(SqliteQueryBuilder); + + match conn.execute(sql.as_str(), &*values.as_params()) { + Ok(affected_rows) => { + let message = format!("{} events deleted", affected_rows); + Ok(RelayMessage::notice(message)) + } + Err(err) => { + log::error!("[admin_delete_events] Failed to execute query: {}", err); + + Ok(RelayMessage::notice("unable to delete events")) + } + } + }, + ) + .await + else { + log::error!("[admin_delete_events] Failed to execute query"); + return Err(Error::internal_with_message( + "Failed to execute query 'admin_delete_events'", + )); + }; + + let Ok(relay_message) = query_result else { + let message = RelayMessage::Notice { + message: "unable to acquire pool connection".to_string(), + }; + return Ok(message); + }; + + Ok(relay_message) + } + + async fn get_event_by_id(&self, event_id: EventId) -> Result { + let eid = event_id.to_string(); + let Ok(connection) = self.get_connection().await else { + return Err(Error::internal_with_message("Unable to get DB connection")); + }; + let Ok(query_result) = connection + .interact(|conn: &mut rusqlite::Connection| -> Result { + let (sql, value) = Query::select() + .from(EventsTable::Table) + .columns([ + EventsTable::EventId, + EventsTable::Content, + EventsTable::Kind, + EventsTable::Pubkey, + EventsTable::CreatedAt, + EventsTable::Tags, + EventsTable::Sig, + ]) + .and_where(sea_query::Expr::col(EventsTable::EventId).eq(eid)) + .limit(1) + .build_rusqlite(SqliteQueryBuilder); + + let mut stmt = conn.prepare(sql.as_str()).unwrap(); + let row = stmt + .query_row::(&*value.as_params(), |row| Ok(EventRow::from(row))) + .unwrap(); + + let event = row.clone().to_event(); + + Ok(event) + }) + .await + else { + return Err(Error::internal_with_message( + "Failed to execute query 'get_event_by_id'", + )); + }; + + let Ok(event) = query_result else { + return Err(Error::internal_with_message( + "Failed to get event from row in 'get_event_by_id'", + )); + }; + + Ok(event) + } + + async fn wipe(&self) -> Result<(), Error> { + let Ok(connection) = self.get_connection().await else { + return Err(Error::internal_with_message("Unable to get DB connection")); + }; + + match connection + .interact(|conn| -> Result<(), Error> { + let Ok(result) = conn.set_db_config( + rusqlite::config::DbConfig::SQLITE_DBCONFIG_RESET_DATABASE, + true, + ) else { + return Err(Error::internal_with_message( + "Failed to set SQLITE_DBCONFIG_RESET_DATABASE to true", + )); + }; + let Ok(result) = conn.execute("VACUUM;", []) else { + return Err(Error::internal_with_message("Failed to run VACUUM command")); + }; + let Ok(result) = conn.set_db_config( + rusqlite::config::DbConfig::SQLITE_DBCONFIG_RESET_DATABASE, + false, + ) else { + return Err(Error::internal_with_message( + "Failed to set SQLITE_DBCONFIG_RESET_DATABASE to false", + )); + }; + + Ok(()) + }) + .await + { + Ok(_) => { + if NostrSqlite::run_migrations(&self.pool).await { + return Ok(()); + } + + Err(Error::internal_with_message("Failed to run migrations")) + } + Err(err) => Err(Error::internal_with_message(err.to_string())), + } } fn get_filter_query(&self, filter: &nostr::Filter) -> sea_query::SelectStatement { @@ -632,9 +919,12 @@ impl SqliteDb { query } - fn get_filters_query(&self, subscription: Subscription) -> Option { - subscription - .filters + fn get_filters_query( + &self, + filters: Vec, + order: SqOrder, + ) -> Option { + filters .iter() .map(|filter| { Query::select() @@ -651,67 +941,261 @@ impl SqliteDb { self.get_filter_query(filter), sea_query::Alias::new("events"), ) + .order_by(EventsTable::CreatedAt, order.to_owned()) .to_owned() }) .reduce(|mut result, query| result.union(sea_query::UnionType::All, query).to_owned()) } - async fn admin_delete_events(&self, event: Box) -> Result { - let event_ids: Vec = event.event_ids().map(|e| e.to_string()).collect(); - let (sql, values) = Query::delete() - .from_table(EventsTable::Table) - .and_where(sea_query::Expr::col(EventsTable::EventId).is_in(event_ids)) - .build_sqlx(sea_query::SqliteQueryBuilder); + async fn query(&self, filters: Vec, order: Order) -> Result, Error> { + log::debug!("making query from filters..."); - match sqlx::query_with(&sql, values).execute(&self.pool).await { - Ok(affected_rows) => { - let message = RelayMessage::Notice { - message: format!("{} events deleted", affected_rows.rows_affected()), - }; - Ok(message) - } - Err(e) => { - log::error!("[admin_delete_events] Failed to execute query: {}", e); - let message = RelayMessage::Notice { - message: "unable to delete events".to_string(), - }; - Ok(message) - } - } + // let event_ids: Vec = event.event_ids().map(|e| e.to_string()).collect(); + let Ok(connection) = self.get_connection().await else { + return Err(Error::internal_with_message("Unable to get DB connection")); + }; + + let sq_order = match order { + Order::Asc => SqOrder::Asc, + Order::Desc => SqOrder::Desc, + }; + let Some(sql_statement) = self.get_filters_query(filters, sq_order) else { + return Err(Error::internal_with_message("Failed to build SQL Query")); + }; + + let Ok(query_result) = connection + .interact(move |conn| -> Result, Error> { + let (sql, values) = sql_statement.build_rusqlite(SqliteQueryBuilder); + + let mut stmt = conn.prepare(sql.as_str()).unwrap(); + let mut rows = stmt.query(&*values.as_params()).unwrap(); + + let mut event_vec: Vec = vec![]; + while let Ok(Some(row)) = rows.next() { + let event = EventRow::from(row).to_event(); + event_vec.push(event); + } + + Ok(event_vec) + }) + .await + else { + return Err(Error::internal_with_message("Failed to execute query")); + }; + + query_result } - async fn count_events_by_filters(&self, subscription: Subscription) -> i32 { - if subscription.filters.is_empty() { - return 0; - } + async fn count(&self, filters: Vec) -> Result { + let Ok(connection) = self.get_connection().await else { + return Err(Error::internal_with_message("Unable to get DB connection")); + }; - let (sql, values) = self - .get_filters_query(subscription) - .unwrap() - .clear_selects() - .expr_as( - sea_query::Func::count(sea_query::Expr::col(( - EventsTable::Table, - EventsTable::EventId, - ))), - sea_query::Alias::new("count"), - ) - .build_sqlx(sea_query::SqliteQueryBuilder); + let Some(mut sql_statement) = self.get_filters_query(filters, SqOrder::Desc) else { + return Err(Error::internal_with_message("Failed to build SQL Query")); + }; - println!("count_filters SEA_QUERY built SQL: {}", sql.clone()); + let Ok(query_result) = connection + .interact(move |conn| { + let (sql, values) = sql_statement + .clear_selects() + .expr_as( + sea_query::Func::count(sea_query::Expr::col(( + EventsTable::Table, + EventsTable::EventId, + ))), + sea_query::Alias::new("count"), + ) + .build_rusqlite(SqliteQueryBuilder); - let counts = sqlx::query_as_with::<_, EventsCountRow, _>(&sql, values) - .fetch_one(&self.pool) + if let Ok(result) = conn.query_row(sql.as_str(), &*values.as_params(), |row| { + let count: usize = row.get(0).unwrap(); + Ok(count) + }) { + return Ok(result); + } + + Err(Error::internal_with_message( + "Failed to get event counts by filters", + )) + }) .await - .unwrap(); + else { + return Err(Error::internal_with_message("Failed to execute query")); + }; - dbg!(counts); + query_result + } - 1 + async fn event_ids_by_filters( + &self, + filters: Vec, + order: Order, + ) -> Result, Error> { + let Ok(connection) = self.get_connection().await else { + return Err(Error::internal_with_message("Unable to get DB connection")); + }; + + let sq_order = match order { + Order::Asc => SqOrder::Asc, + Order::Desc => SqOrder::Desc, + }; + + let Some(mut sql_statement) = self.get_filters_query(filters, sq_order) else { + return Err(Error::internal_with_message("Failed to build SQL Query")); + }; + + let Ok(query_result) = connection + .interact(move |conn| { + let (sql, values) = sql_statement + .clear_selects() + .column(EventsTable::EventId) + .build_rusqlite(SqliteQueryBuilder); + + let mut stmt = conn.prepare(sql.as_str()).unwrap(); + let mut rows = stmt.query(&*values.as_params()).unwrap(); + + let mut event_vec: Vec = vec![]; + while let Ok(Some(row)) = rows.next() { + let event_id_string: String = row.get(0).unwrap(); + let event_id = EventId::from_str(&event_id_string).unwrap(); + event_vec.push(event_id); + } + + Ok(event_vec) + }) + .await + else { + return Err(Error::internal_with_message("Failed to execute query")); + }; + + query_result + } + + async fn has_coordinate_been_deleted( + &self, + coordinate: &Coordinate, + timestamp: Timestamp, + ) -> Result { + Ok(true) } } -impl Noose for SqliteDb { +impl From for Error { + fn from(value: nostr_database::DatabaseError) -> Self { + Error::internal_with_message(value.to_string()) + } +} + +impl From for nostr_database::DatabaseError { + fn from(val: Error) -> Self { + nostr_database::DatabaseError::backend(val) + } +} + +#[async_trait] +impl NostrDatabase for NostrSqlite { + type Err = Error; + + /// Name of the backend database used (ex. lmdb, sqlite, indexeddb, ...) + fn backend(&self) -> Backend { + Backend::SQLite + } + + /// Database options + fn opts(&self) -> DatabaseOptions { + DatabaseOptions { events: true } + } + + /// Save [`Event`] into store + /// + /// Return `true` if event was successfully saved into database. + /// + /// **This method assume that [`Event`] was already verified** + async fn save_event(&self, event: &Event) -> Result { + self.save_event(event).await + } + + /// Check if [`Event`] has already been saved + async fn has_event_already_been_saved(&self, event_id: &EventId) -> Result { + self.has_event_already_been_saved(event_id).await + } + + /// Check if [`EventId`] has already been seen + async fn has_event_already_been_seen(&self, event_id: &EventId) -> Result { + self.has_event_already_been_seen(event_id).await + } + + /// Check if [`EventId`] has been deleted + async fn has_event_id_been_deleted(&self, event_id: &EventId) -> Result { + self.has_event_id_been_deleted(event_id).await + } + + /// Check if event with [`Coordinate`] has been deleted before [`Timestamp`] + async fn has_coordinate_been_deleted( + &self, + coordinate: &Coordinate, + timestamp: Timestamp, + ) -> Result { + todo!() + } + + /// Set [`EventId`] as seen by relay + /// + /// Useful for NIP65 (aka gossip) + async fn event_id_seen(&self, event_id: EventId, relay_url: Url) -> Result<(), Self::Err> { + self.set_event_id_seen(event_id, relay_url).await + } + + /// Get list of relays that have seen the [`EventId`] + async fn event_seen_on_relays( + &self, + event_id: EventId, + ) -> Result>, Self::Err> { + self.get_event_seen_on_relays(event_id).await + } + + /// Get [`Event`] by [`EventId`] + async fn event_by_id(&self, event_id: EventId) -> Result { + self.get_event_by_id(event_id).await + } + + /// Count number of [`Event`] found by filters + /// + /// Use `Filter::new()` or `Filter::default()` to count all events. + async fn count(&self, filters: Vec) -> Result { + self.count(filters).await + } + + /// Query store with filters + async fn query(&self, filters: Vec, order: Order) -> Result, Self::Err> { + self.query(filters, order).await + } + + /// Get event IDs by filters + async fn event_ids_by_filters( + &self, + filters: Vec, + order: Order, + ) -> Result, Self::Err> { + self.event_ids_by_filters(filters, order).await + } + + /// Get `negentropy` items + async fn negentropy_items( + &self, + filter: Filter, + ) -> Result, Self::Err> { + todo!() + } + + /// Wipe all data + async fn wipe(&self) -> Result<(), Self::Err> { + self.wipe().await + } +} + +impl Noose for NostrSqlite { async fn start(&mut self, pubsub: Arc) -> Result<(), Error> { let mut subscriber = pubsub.subscribe(channels::MSG_NOOSE).await; @@ -724,13 +1208,15 @@ impl Noose for SqliteDb { }, Command::DbReqFindEvent(client_id, subscriptioin) => { match self.find_event(subscriptioin).await { - Ok(events) => Command::DbResRelayMessage(client_id, events), + Ok(relay_messages) => { + Command::DbResRelayMessages(client_id, relay_messages) + } Err(e) => Command::ServiceError(e), } } - Command::DbReqDeleteEvents(client_id, event_ids) => { - match self.delete_events(event_ids).await { - Ok(status) => Command::DbResOkWithStatus(client_id, status), + Command::DbReqEventCounts(client_id, subscriptioin) => { + match self.counts(subscriptioin).await { + Ok(relay_message) => Command::DbResEventCounts(client_id, relay_message), Err(e) => Command::ServiceError(e), } } @@ -756,190 +1242,235 @@ impl Noose for SqliteDb { Ok(()) } - async fn migration_up(&self) { - SqliteDb::migrate(&self.pool).await; - } - - async fn write_event(&self, event: Box) -> Result { - log::info!("[Noose] write_event triggered"); - - let status = self.add_event(event).await.unwrap(); - - log::info!("[Noose] write event completed: {}", status.as_json()); - Ok(status) - } - - async fn delete_events(&self, event: Box) -> Result { - log::debug!("[Noose] delete_filters triggered"); - - let status = self.admin_delete_events(event).await.unwrap(); - - Ok(status) - } - - async fn find_event(&self, subscription: Subscription) -> Result, Error> { - log::debug!("making query from filters..."); - - let eose_message = - vec![nostr::RelayMessage::EndOfStoredEvents(subscription.id.clone()).as_json()]; - - if let Some(sql_statement) = self.get_filters_query(subscription.clone()) { - let (sql, values) = sql_statement.build_sqlx(sea_query::SqliteQueryBuilder); - log::info!("SEA_QUERY built SQL: {}", sql.clone()); - - match sqlx::query_as_with::<_, EventRow, _>(&sql, values) - .fetch_all(&self.pool) - .await - { - Ok(rows) => { - if rows.is_empty() { - return Ok(eose_message); - } else { - let relay_messages: Vec = rows - .iter() - .map(|row| row.to_string(subscription.id.clone())) - .collect(); - return Ok(relay_messages); - } - } - Err(e) => { - log::error!("{}", e); - return Err(Error::internal(e.into())); - } + async fn write_event(&self, event: Box) -> Result { + // TODO: Maybe do event validation and admin deletions here + match self.save_event(&event).await { + Ok(status) => { + let relay_message = nostr::RelayMessage::ok(event.id, status, ""); + Ok(relay_message) } + Err(err) => Err(Error::bad_request(err.to_string())), } + } - Ok(eose_message) + async fn find_event( + &self, + subscription: Subscription, + ) -> Result, Error> { + match self.query(subscription.filters, Order::Desc).await { + Ok(events) => { + let relay_messages = events + .into_iter() + .map(|event| nostr::RelayMessage::event(subscription.id.clone(), event)) + .collect(); + Ok(relay_messages) + } + Err(err) => Err(Error::bad_request(err.to_string())), + } + } + + async fn counts(&self, subscription: Subscription) -> Result { + match self.count(subscription.filters).await { + Ok(counts) => { + let relay_message = nostr::RelayMessage::count(subscription.id, counts); + Ok(relay_message) + } + Err(err) => Err(Error::internal_with_message(err.to_string())), + } } } #[cfg(test)] mod tests { - use super::Noose; - use super::SqliteDb; - use crate::utils::structs::Subscription; - - use nostr::key::FromSkStr; - use nostr::util::JsonUtil; + use crate::noose::sqlite::*; + use nostr::EventBuilder; #[tokio::test] - async fn find_event() { - let db = SqliteDb::new().await; + async fn create_db() { + let config = Arc::new(ServiceConfig::new()); + let db = NostrSqlite::new(config).await; - let t = std::time::Instant::now(); - let client_id = "test_id".to_string(); - - let cm = nostr::ClientMessage::from_json( - r#"["REQ","7b9bc4b6-701c-40b6-898f-4e7c6b5b1510",{"authors":["04c915daefee38317fa734444acee390a8269fe5810b2241e5e6dd343dfbecc9"],"kinds":[0]}]"#, - ).unwrap(); - let (sub_id, filters) = match cm { - nostr::ClientMessage::Req { - subscription_id, - filters, - } => (subscription_id, filters), - _ => panic!("sneed :("), - }; - let sub = Subscription::new(sub_id, filters); - db.find_event(sub).await.unwrap(); - println!( - "Time passed: {}", - (std::time::Instant::now() - t).as_millis() - ); + assert_eq!(db.pool.status().max_size, 64); } #[tokio::test] - async fn admin_delete_events() { - let db = SqliteDb::new().await; + async fn event_row_to_event() { + let event_id = + "076b27df50401f2af598cb727281e1e36401592570052c0fb37543f50aba5b69".to_string(); + let event_row = EventRow { + id: event_id.clone(), + pubkey: "da9cfc5b7644aae5b3e7df1d224ef4d4a206d66d6e3b5c99a8aeda3b642c4cef".to_string(), + created_at: 1706020262, + kind: 1, + tags: "[]".to_string(), + sig: "7f92a6ebdeae4a9bd7a6e19f57b49e2c499fece644c63c6ff5776bdbafec5c524cfad1eecedef11a8701df2a826453914d39a897e0f8276a6ffc40e266cae059".to_string(), + content: "hoh".to_string(), + }; - let admin_pubkey = "npub14d2a54za7dnfzktle40vw7kdx48vk3ljy3t7w7sdpk3segea65mq2t6kc4"; - let admin_secret = "nsec1rayezcsw7txmtu3smpsgs7m5fa3dazhx6lhdm44dxclveplhajpsalyx2l"; + let event = event_row.to_event(); - let admin_keys = nostr::Keys::from_sk_str(admin_secret).unwrap(); - - let event_1 = nostr::EventBuilder::new(nostr::Kind::TextNote, "this is event 1", vec![]) - .to_event(&nostr::Keys::generate()) - .unwrap(); - let event_2 = nostr::EventBuilder::new(nostr::Kind::TextNote, "this is event 2", vec![]) - .to_event(&nostr::Keys::generate()) - .unwrap(); - - let res = db.add_event(Box::new(event_1.clone())).await.unwrap(); - let res = db.add_event(Box::new(event_2.clone())).await.unwrap(); - - let e_ids = vec![event_1.id, event_2.id]; - let event = nostr::EventBuilder::delete(e_ids) - .to_event(&admin_keys) - .unwrap(); - - let message = db.admin_delete_events(Box::new(event)).await.unwrap(); - assert_eq!(message.as_json(), "[\"NOTICE\",\"2 events deleted\"]"); + assert_eq!(event.id.to_string(), event_id); } #[tokio::test] - async fn delete_events() { - let db = SqliteDb::new().await; + async fn get_event_by_id() { + let config = Arc::new(ServiceConfig::new()); + let db = NostrSqlite::new(config).await; - let t = std::time::Instant::now(); - let client_id = "test_id".to_string(); - - let my_keys = nostr::Keys::generate(); - - let eid = nostr::EventId::all_zeros(); - let tag_event = nostr::Tag::Event { - event_id: eid, - relay_url: None, - marker: None, - }; - let tag_url = nostr::Tag::AbsoluteURL(nostr::types::UncheckedUrl::new( - "http://foo.net".to_string(), - )); - let tag_hashtag = nostr::Tag::Hashtag("farm".to_string()); - - let event = nostr::EventBuilder::new_text_note( - "sneed feed and seed", - vec![tag_event, tag_url, tag_hashtag], - ) - .to_event(&my_keys) - .unwrap(); - - dbg!(&event.as_json()); - let resp = db.add_event(Box::new(event.clone())).await.unwrap(); - dbg!(resp); - - let delete_event = nostr::EventBuilder::delete(vec![event.id]) - .to_event(&my_keys) + let keys = nostr::Keys::generate(); + let event = EventBuilder::new(nostr::Kind::TextNote, "Hello", vec![]) + .to_event(&keys) .unwrap(); - dbg!(&delete_event); - let resp = db.add_event(Box::new(delete_event.clone())).await.unwrap(); - dbg!(resp); + // Insert event + let result = db.save_event(&event).await.unwrap(); + assert!(result); + + // Get event by id + let eid = event.id; + let result = db.get_event_by_id(eid).await.unwrap(); + + assert_eq!(result.id, eid); } #[tokio::test] - async fn count_events() { - let db = SqliteDb::new().await; + async fn event_seen_by_other_relays() { + let config = Arc::new(ServiceConfig::new()); + let db = NostrSqlite::new(config).await; - let t = std::time::Instant::now(); - let client_id = "test_id".to_string(); + let keys = nostr::Keys::generate(); + let event = EventBuilder::new(nostr::Kind::TextNote, "Hello to Other relay", vec![]) + .to_event(&keys) + .unwrap(); - let cm = nostr::ClientMessage::from_json( - r#"["COUNT","7b9bc4b6-701c-40b6-898f-4e7c6b5b1510",{"authors":["6be3c1446231fe6d117d72e29b60094bbb3eec029100c34f627dc4ebe8369a64"],"kinds":[1]}]"#, - ).unwrap(); - let (sub_id, filters) = match cm { - nostr::ClientMessage::Count { - subscription_id, - filters, - } => (subscription_id, filters), - _ => panic!("sneed :("), - }; + // Insert event + let result = db.save_event(&event).await.unwrap(); + assert!(result); - let sub = Subscription::new(sub_id, filters); - let num = db.count_events_by_filters(sub).await; - println!( - "Time passed: {}", - (std::time::Instant::now() - t).as_millis() - ); + // Set seen on other relays + let url_0 = Url::from_str("wss://relay.zhitno.st").unwrap(); + db.set_event_id_seen(event.id, url_0).await.unwrap(); - assert_eq!(num, 1); + let url_1 = Url::from_str("wss://relay.damus.io").unwrap(); + db.set_event_id_seen(event.id, url_1).await.unwrap(); + + // Get status of seen event + let result = db.get_event_seen_on_relays(event.id).await.unwrap(); + + dbg!(result); + + // Get event by id + let eid = event.id; + let result = db.get_event_by_id(eid).await.unwrap(); + + assert_eq!(result.id, eid); + } + + #[tokio::test] + async fn has_event_already_been_seen() { + let config = Arc::new(ServiceConfig::new()); + let db = NostrSqlite::new(config).await; + + let keys = nostr::Keys::generate(); + let event = EventBuilder::new(nostr::Kind::TextNote, "Hello to Other relay", vec![]) + .to_event(&keys) + .unwrap(); + + // Set seen on other relays + let url_0 = Url::from_str("wss://relay.zhitno.st").unwrap(); + db.set_event_id_seen(event.id, url_0).await.unwrap(); + + let url_1 = Url::from_str("wss://relay.damus.io").unwrap(); + db.set_event_id_seen(event.id, url_1).await.unwrap(); + + // Get status of seen event + let result = db.has_event_already_been_seen(&event.id).await.unwrap(); + + assert!(result); + } + + #[tokio::test] + async fn wipe_db() { + let config = Arc::new(ServiceConfig::new()); + let db = NostrSqlite::new(config).await; + + db.wipe().await.unwrap(); + } + + #[tokio::test] + async fn query() { + let config = Arc::new(ServiceConfig::new()); + let db = NostrSqlite::new(config).await; + + let keys = nostr::Keys::generate(); + let event = EventBuilder::new(nostr::Kind::TextNote, "Hello Filters", vec![]) + .to_event(&keys) + .unwrap(); + + // Insert event + let result = db.save_event(&event).await.unwrap(); + assert!(result); + + // Get event vec from filters + let filter = Filter::new().author(event.pubkey); + let filters = vec![filter]; + let result = db.query(filters, Order::Desc).await.unwrap(); + + assert!(!result.is_empty()); + assert_eq!(result[0].pubkey, event.pubkey); + } + + #[tokio::test] + async fn count() { + let config = Arc::new(ServiceConfig::new()); + let db = NostrSqlite::new(config).await; + + let keys = nostr::Keys::generate(); + let event_0 = EventBuilder::new(nostr::Kind::TextNote, "Hello Count", vec![]) + .to_event(&keys) + .unwrap(); + let event_1 = EventBuilder::new(nostr::Kind::TextNote, "Goodbye Count", vec![]) + .to_event(&keys) + .unwrap(); + + // Insert events + let result = db.save_event(&event_0).await.unwrap(); + assert!(result); + let result = db.save_event(&event_1).await.unwrap(); + assert!(result); + + // Get event vec from filters + let filter = Filter::new().author(event_0.pubkey); + let filters = vec![filter]; + let result = db.count(filters).await.unwrap(); + + assert_eq!(result, 2); + } + + #[tokio::test] + async fn event_ids_by_filter() { + let config = Arc::new(ServiceConfig::new()); + let db = NostrSqlite::new(config).await; + + let keys = nostr::Keys::generate(); + let event_0 = EventBuilder::new(nostr::Kind::TextNote, "Hello Count", vec![]) + .to_event(&keys) + .unwrap(); + let event_1 = EventBuilder::new(nostr::Kind::TextNote, "Goodbye Count", vec![]) + .to_event(&keys) + .unwrap(); + + // Insert events + let result = db.save_event(&event_0).await.unwrap(); + assert!(result); + let result = db.save_event(&event_1).await.unwrap(); + assert!(result); + + // Get event vec from filters + let filter = Filter::new().author(event_0.pubkey); + let filters = vec![filter]; + let result = db.event_ids_by_filters(filters, Order::Desc).await.unwrap(); + + assert_eq!(result.len(), 2); } } diff --git a/src/relay/ws.rs b/src/relay/ws.rs index be8d512..7401f21 100644 --- a/src/relay/ws.rs +++ b/src/relay/ws.rs @@ -67,17 +67,26 @@ pub async fn client_connection( } // } } - crate::bussy::Command::DbResRelayMessage(client_id, events) => { + crate::bussy::Command::DbResRelayMessages(client_id, relay_messages) => { if client.client_id == client_id { if let Some(sender) = &client.client_connection { if !sender.is_closed() { - for event in events { - sender.send(Ok(Message::text(event))).unwrap(); + for message in relay_messages { + sender.send(Ok(Message::text(message.as_json()))).unwrap(); } } } } } + crate::bussy::Command::DbResEventCounts(client_id, relay_message) => { + if client.client_id == client_id { + if let Some(sender) = &client.client_connection { + if !sender.is_closed() { + sender.send(Ok(Message::text(relay_message.as_json()))).unwrap(); + } + } + } + } crate::bussy::Command::DbResOkWithStatus(client_id, status) => { if client.client_id == client_id { if let Some(sender) = &client.client_connection { @@ -177,7 +186,7 @@ async fn socket_on_message(context: &Context, client: &mut Client, msg: Message) Err(e) => { log::error!("error while parsing client message request: {}", e); - let response = nostr::RelayMessage::new_notice("Invalid message"); + let response = nostr::RelayMessage::notice("Invalid message"); let message = Message::text(response.as_json()); send(client, message); @@ -212,7 +221,7 @@ async fn handle_msg(context: &Context, client: &mut Client, client_message: Clie ClientMessage::Count { subscription_id, filters, - } => handle_count(client, subscription_id, filters).await, + } => handle_count(context, client, subscription_id, filters).await, ClientMessage::Close(subscription_id) => handle_close(client, subscription_id).await, ClientMessage::Auth(event) => handle_auth(client, event).await, _ => (), @@ -224,7 +233,7 @@ async fn handle_event(context: &Context, client: &Client, event: Box) { if let Err(err) = event.verify() { let relay_message = - nostr::RelayMessage::new_ok(event.id, false, "Failed to verify event signature"); + nostr::RelayMessage::ok(event.id, false, "Failed to verify event signature"); let message = crate::bussy::Message { source: channels::MSG_RELAY, content: crate::bussy::Command::PipelineResRelayMessageOk( @@ -253,7 +262,7 @@ async fn handle_req( client: &mut Client, subscription_id: SubscriptionId, filters: Vec, -) { +) { let subscription = Subscription::new(subscription_id.clone(), filters); let needs_historical_events = subscription.needs_historical_events(); @@ -263,6 +272,7 @@ async fn handle_req( client.ip(), &subscription_error.message ); + let message = format!( "[\"CLOSED\", \"{}\", \"{}\"]", subscription_id, subscription_error.message @@ -282,6 +292,7 @@ async fn handle_req( return; }; + log::info!("[SUBSCRIPTION] needs historical events"); if needs_historical_events { context .pubsub @@ -296,12 +307,24 @@ async fn handle_req( } } -async fn handle_count(client: &Client, subscription_id: SubscriptionId, filters: Vec) { - // context.pubsub.send(new nostr event) then handle possible errors +async fn handle_count( + context: &Context, + client: &Client, + subscription_id: SubscriptionId, + filters: Vec, +) { let subscription = Subscription::new(subscription_id, filters); - let message = Message::text("COUNT not implemented"); - send(client, message); + context + .pubsub + .publish( + channels::MSG_NOOSE, + crate::bussy::Message { + source: channels::MSG_RELAY, + content: crate::bussy::Command::DbReqEventCounts(client.client_id, subscription), + }, + ) + .await } async fn handle_close(client: &mut Client, subscription_id: SubscriptionId) { diff --git a/src/utils/config.rs b/src/utils/config.rs index 662ac4d..287722b 100644 --- a/src/utils/config.rs +++ b/src/utils/config.rs @@ -1,8 +1,11 @@ +use std::path::PathBuf; + use nostr::{key::FromPkStr, secp256k1::XOnlyPublicKey}; #[derive(Clone, Debug)] pub struct Config { admin_pubkey: XOnlyPublicKey, + db_path: PathBuf, } impl Default for Config { @@ -13,32 +16,36 @@ impl Default for Config { impl Config { pub fn new() -> Self { - if let Ok(env_admin_pk) = std::env::var("ADMIN_PUBKEY") { - match nostr::Keys::from_pk_str(&env_admin_pk) { - Ok(admin_keys) => { - return Self { - admin_pubkey: admin_keys.public_key(), - }; - } - Err(e) => { - panic!("Unable to parse ADMIN_PUBKEY: {}", e); - } - } - } + let admin_pubkey = std::env::var("ADMIN_PUBKEY") + .map(|env_pk| nostr::Keys::from_pk_str(&env_pk)) + .and_then(|result| result.map_err(|err| panic!("{}", err))) + .unwrap() + .public_key(); - panic!("Environment variable ADMIN_PUBKEY not defined"); + let db_path = std::env::var("DATABASE_URL") + .map(PathBuf::from) + .unwrap(); + + Self { + admin_pubkey, + db_path, + } } pub fn get_admin_pubkey(&self) -> &XOnlyPublicKey { &self.admin_pubkey } + pub fn get_db_path(&self) -> PathBuf { + self.db_path.clone() + } + pub fn get_relay_config_json(&self) -> serde_json::Value { serde_json::json!({ "contact": "klink@zhitno.st", "name": "zhitno.st", "description": "Very *special* nostr relay", - "supported_nips": [ 1, 9, 11, 12, 15, 16, 20, 22, 28, 33 ], + "supported_nips": [ 1, 9, 11, 12, 15, 16, 20, 22, 28, 33, 45 ], "software": "git+https://git.zhitno.st/Klink/sneedstr.git", "version": "0.1.0" }) diff --git a/src/utils/error.rs b/src/utils/error.rs index 4f34cde..e50fbc5 100644 --- a/src/utils/error.rs +++ b/src/utils/error.rs @@ -4,6 +4,7 @@ use std::{ convert::From, fmt::{self, Display}, }; +use std::error::Error as StdError; use validator::ValidationErrors; use warp::{http::StatusCode, reject::Reject}; @@ -16,6 +17,16 @@ pub struct Error { pub sneedstr_version: Option, } +impl StdError for Error { + fn source(&self) -> Option<&(dyn StdError + 'static)> { + None + } + + fn cause(&self) -> Option<&dyn StdError> { + self.source() + } +} + impl Error { pub fn new(code: StatusCode, message: String) -> Self { Self { @@ -74,12 +85,12 @@ impl Error { } impl fmt::Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}: {}", self.status_code(), &self.message)?; - if let Some(val) = &self.sneedstr_version { - write!(f, "\ndiem ledger version: {}", val)?; - } - Ok(()) + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "Error {{ code: {}, message: '{}', sneedstr_version: {:?} }}", + self.code, self.message, self.sneedstr_version + ) } } diff --git a/src/utils/mod.rs b/src/utils/mod.rs index f94bbcf..87a5861 100644 --- a/src/utils/mod.rs +++ b/src/utils/mod.rs @@ -1,7 +1,7 @@ pub mod crypto; pub mod error; pub mod filter; -// mod nostr_filter_helpers; +mod nostr_filter_helpers; pub mod config; pub mod rejection_handler; pub mod response; diff --git a/src/utils/nostr_filter_helpers.rs b/src/utils/nostr_filter_helpers.rs index 3b4a2de..a001dc9 100644 --- a/src/utils/nostr_filter_helpers.rs +++ b/src/utils/nostr_filter_helpers.rs @@ -1,27 +1,27 @@ -use nostr::{Event, Filter, Kind, Tag}; +use nostr::{key::FromPkStr, Event, Filter, Kind}; fn ids_match(filter: &Filter, event: &Event) -> bool { if filter.ids.is_empty() { - println!("[FILTER][IDS] skipped"); + log::info!("[FILTER][ids_match] skipped"); return true; } - println!( - "[FILTER][IDS] matched: {:?}", - filter.ids.iter().any(|id| id == &event.id.to_string()) + log::info!( + "[FILTER][ids_match] matched: {:?}", + filter.ids.iter().any(|id| id == &event.id) ); - filter.ids.iter().any(|id| id == &event.id.to_string()) + filter.ids.iter().any(|id| id == &event.id) } fn kind_match(filter: &Filter, kind: Kind) -> bool { if filter.kinds.is_empty() { - println!("[FILTER][KINDS] skipped"); + log::debug!("[FILTER][kind_match] skipped"); return true; } - println!( - "[FILTER][KIND] matched: {:?}", + log::debug!( + "[FILTER][kind_match] matched: {:?}", filter.kinds.iter().any(|k| k == &kind) ); @@ -29,122 +29,142 @@ fn kind_match(filter: &Filter, kind: Kind) -> bool { } fn pubkeys_match(filter: &Filter, event: &Event) -> bool { - if filter.pubkeys.is_empty() { - println!("[FILTER][PUBKEYS] skipped"); - return true; + log::debug!( + "[FILTER][pubkeys_match] matched: {:?}", + if let Some((p_tag, p_set)) = filter.generic_tags.get_key_value(&nostr::Alphabet::P) { + if p_set.is_empty() { + log::debug!("[FILTER][PUBKEYS] skipped"); + return true; + } + + return p_set.iter().any(|pk| match pk { + nostr::GenericTagValue::Pubkey(pk) => pk == &event.pubkey, + _ => false, + }); + } + ); + if let Some((p_tag, p_set)) = filter.generic_tags.get_key_value(&nostr::Alphabet::P) { + if p_set.is_empty() { + log::debug!("[FILTER][PUBKEYS] skipped"); + return true; + } + + return p_set.iter().any(|pk| match pk { + nostr::GenericTagValue::Pubkey(pk) => pk == &event.pubkey, + _ => false, + }); } - println!( - "[FILTER][PUBKEYS] matched: {:?}", - filter.pubkeys.iter().any(|pk| pk == &event.pubkey) - ); - filter.pubkeys.iter().any(|pk| pk == &event.pubkey) + false } fn authors_match(filter: &Filter, event: &Event) -> bool { - dbg!(filter); if filter.authors.is_empty() { - println!("[FILTER][AUTHORS] skipped"); + log::debug!("[FILTER][authors_match] skipped"); return true; } - println!( - "[FILTER][AUTHORS] matched: {:?}", - filter - .authors - .iter() - .any(|author| author == &event.pubkey.to_string()) + log::debug!( + "[FILTER][authors_match] matched: {:?}", + filter.authors.iter().any(|author| author == &event.pubkey) ); - filter - .authors - .iter() - .any(|author| author == &event.pubkey.to_string()) + filter.authors.iter().any(|author| author == &event.pubkey) } fn delegated_authors_match(filter: &Filter, event: &Event) -> bool { - // Optional implementation - - // let delegated_authors_match = filter.authors.iter().any(|author| { - // event.tags.iter().any(|tag| match tag { - // Tag::Delegation { - // delegator_pk, - // conditions, - // sig, - // } => filter - // .authors - // .iter() - // .any(|author| author == &delegator_pk.to_string()), - // _ => false, - // }) - // }); - println!( - "[FILTER][DELEGATED_AUTHORS] matched: {:?}", - event.tags.iter().any(|tag| match tag { - Tag::Delegation { - delegator_pk, - conditions, - sig, - } => filter - .authors - .iter() - .any(|author| author == &delegator_pk.to_string()), - _ => false, + log::debug!( + "[FILTER][delegated_authors_match] matched: {:?}", + event.tags.iter().any(|tag| { + if tag.kind() == nostr::TagKind::Delegation { + let tag = tag.as_vec(); + if let Ok(event_pubkey) = nostr::Keys::from_pk_str(&tag[1]) { + let pk = event_pubkey.public_key(); + return filter.authors.iter().any(|author| author == &pk); + } + return false; + } + false }) ); - event.tags.iter().any(|tag| match tag { - Tag::Delegation { - delegator_pk, - conditions, - sig, - } => filter - .authors - .iter() - .any(|author| author == &delegator_pk.to_string()), - _ => true, + event.tags.iter().any(|tag| { + if tag.kind() == nostr::TagKind::Delegation { + let tag = tag.as_vec(); + if let Ok(event_pubkey) = nostr::Keys::from_pk_str(&tag[1]) { + let pk = event_pubkey.public_key(); + return filter.authors.iter().any(|author| author == &pk); + } + return false; + } + false }) } fn tag_match(filter: &Filter, event: &Event) -> bool { - println!( - "[FILTER][TAG] matched: {:?}", - filter.generic_tags.iter().any(|(key, value)| { - event.tags.iter().any(|tag| { - let kv = tag.as_vec(); - key.to_string() == kv[0] && value.iter().any(|vv| vv == &kv[1]) + if filter.generic_tags.is_empty() && event.tags.is_empty() { + return true; + } + log::debug!( + "[FILTER][tag_match] matched: {:?}", + filter + .generic_tags + .iter() + .any(|(filter_tag_key, filter_tag_value)| { + event.tags.iter().any(|event_tag| { + let event_tag = event_tag.as_vec(); + let event_tag_key = event_tag[0].clone(); + let event_tag_value = event_tag[1].clone(); + + if filter_tag_key.to_string() == event_tag_key { + return filter_tag_value + .iter() + .any(|f_tag_val| f_tag_val.to_string() == event_tag_value); + }; + + false + }) + }) + ); + + filter + .generic_tags + .iter() + .any(|(filter_tag_key, filter_tag_value)| { + event.tags.iter().any(|event_tag| { + let event_tag = event_tag.as_vec(); + let event_tag_key = event_tag[0].clone(); + let event_tag_value = event_tag[1].clone(); + + if filter_tag_key.to_string() == event_tag_key { + return filter_tag_value + .iter() + .any(|f_tag_val| f_tag_val.to_string() == event_tag_value); + }; + + false }) }) - ); - - filter.generic_tags.iter().any(|(key, value)| { - event.tags.iter().any(|tag| { - let kv = tag.as_vec(); - key.to_string() == kv[0] && value.iter().any(|vv| vv == &kv[1]) - }) - }); - - true // TODO: Fix delegated authors check } pub fn interested_in_event(filter: &Filter, event: &Event) -> bool { ids_match(filter, event) && filter.since.map_or( { - println!("[FILTER][SINCE][default] matched: {:?}", true); + log::info!("[FILTER][SINCE][default] matched: {:?}", true); true }, |t| { - println!("[FILTER][SINCE] matched: {:?}", event.created_at >= t); + log::info!("[FILTER][SINCE] matched: {:?}", event.created_at >= t); event.created_at >= t }, ) && filter.until.map_or( { - println!("[FILTER][UNTIL][default] matched: {:?}", true); + log::info!("[FILTER][UNTIL][default] matched: {:?}", true); true }, |t| { - println!("[FILTER][UNTIL] matched: {:?}", event.created_at <= t); + log::info!("[FILTER][UNTIL] matched: {:?}", event.created_at <= t); event.created_at <= t }, ) @@ -154,3 +174,27 @@ pub fn interested_in_event(filter: &Filter, event: &Event) -> bool { || delegated_authors_match(filter, event)) && tag_match(filter, event) } + +#[cfg(test)] +mod tests { + use crate::utils::nostr_filter_helpers::interested_in_event; + + #[test] + fn check_simple_match() { + let my_keys = nostr::Keys::generate(); + let event = nostr::EventBuilder::text_note("hello", []) + .to_event(&my_keys) + .unwrap(); + + let k = nostr::Kind::TextNote; + let filter = nostr::Filter::new() + .kinds(vec![k]) + .authors(vec![event.pubkey]); + + let res = interested_in_event(&filter, &event); + + dbg!(&res); + + assert!(res); + } +} diff --git a/src/utils/structs.rs b/src/utils/structs.rs index 790292d..e2b91a2 100644 --- a/src/utils/structs.rs +++ b/src/utils/structs.rs @@ -1,5 +1,5 @@ use super::{config::Config, error::Error}; -// use super::nostr_filter_helpers; +use crate::utils::nostr_filter_helpers; use crate::PubSub; use nostr::{Event, Filter, SubscriptionId}; @@ -34,11 +34,12 @@ impl Subscription { pub fn interested_in_event(&self, event: &Event) -> bool { log::info!("[Subscription] Checking if client is interested in the new event"); for filter in &self.filters { - if filter.match_event(event) { + if nostr_filter_helpers::interested_in_event(filter, event) { log::info!("[Subscription] found filter that matches the event"); return true; } } + false } }