From 3a9f4a7e9853308bc067c092f26ec2aa6ecd80f0 Mon Sep 17 00:00:00 2001 From: Mike Dilger Date: Tue, 20 Feb 2024 06:41:03 +1300 Subject: [PATCH] Allow (was scraper) filters with limit<10 --- docs/BEHAVIOR.md | 1 + docs/CONFIG.md | 1 + src/nostr.rs | 2 +- src/store/mod.rs | 2 +- 4 files changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/BEHAVIOR.md b/docs/BEHAVIOR.md index 85aa077..4ab6a04 100644 --- a/docs/BEHAVIOR.md +++ b/docs/BEHAVIOR.md @@ -34,6 +34,7 @@ Filters which are broad are considered scrapers and are not serviced. Filters mu - A non-empty `authors` list is set and a non-empty `kinds` list is set - A non-empty `authors` list is set and at least one tag is set. - A non-empty `kinds` list is set and at least one tag is set. +- Has a limit <= 10. If you wish to change these rules, change the source code at `nostr.rs:screen_outgoing_event()` diff --git a/docs/CONFIG.md b/docs/CONFIG.md index 1b7ae70..f2f23d1 100644 --- a/docs/CONFIG.md +++ b/docs/CONFIG.md @@ -111,6 +111,7 @@ This is a boolean indicating whether or not scraping is allowed. Scraping is any - A non-empty `authors` list is set and a non-empty `kinds` list is set - A non-empty `authors` list is set and at least one tag is set. - A non-empty `kinds` list is set and at least one tag is set. +- Has a limit <= 10 Filter that fail to match these conditions will be rejected if `allow_scraping` is false. diff --git a/src/nostr.rs b/src/nostr.rs index 2ec2b77..ba8b0c6 100644 --- a/src/nostr.rs +++ b/src/nostr.rs @@ -191,7 +191,7 @@ impl WebSocketService { NostrReplyPrefix::Restricted, PERSONAL_MSG.to_owned(), ) - }, + } _ => NostrReply::Ok(id, false, NostrReplyPrefix::Error, format!("{}", e)), }, }; diff --git a/src/store/mod.rs b/src/store/mod.rs index 3576a94..aaca435 100644 --- a/src/store/mod.rs +++ b/src/store/mod.rs @@ -354,7 +354,7 @@ impl Store { } } } - } else if self.allow_scraping { + } else if self.allow_scraping || filter.limit() <= 10 { // This is INEFFICIENT as it scans through EVERY EVENT // but the filter is a scraper and we don't have a lot of support // for scrapers.