Two new config variables for allowing short scrapes

This commit is contained in:
Mike Dilger 2024-03-05 08:27:42 +13:00
parent 3ee285b7b7
commit 437782e770
7 changed files with 64 additions and 8 deletions

View File

@ -37,7 +37,7 @@ fn main() -> Result<(), Error> {
config.allow_scraping = true;
// Setup store
let store = Store::new(&config.data_directory, config.allow_scraping)?;
let store = Store::new(&config)?;
let mut buffer: [u8; 128] = [0; 128];
let (_incount, _outcount, filter) = Filter::from_json(b"{}", &mut buffer)?;

View File

@ -63,7 +63,7 @@ async fn main() -> Result<(), Error> {
log::debug!(target: "Server", "Loaded config file.");
// Setup store
let store = Store::new(&config.data_directory, config.allow_scraping)?;
let store = Store::new(&config)?;
let _ = GLOBALS.store.set(store);
// TLS setup

View File

@ -20,6 +20,8 @@ pub struct FriendlyConfig {
pub user_hex_keys: Vec<String>,
pub verify_events: bool,
pub allow_scraping: bool,
pub allow_scrape_if_limited_to: u32,
pub allow_scrape_if_max_seconds: u64,
pub max_subscriptions: usize,
pub serve_ephemeral: bool,
pub serve_relay_lists: bool,
@ -45,6 +47,8 @@ impl Default for FriendlyConfig {
user_hex_keys: vec![],
verify_events: true,
allow_scraping: false,
allow_scrape_if_limited_to: 100,
allow_scrape_if_max_seconds: 3600,
max_subscriptions: 32,
serve_ephemeral: true,
serve_relay_lists: true,
@ -72,6 +76,8 @@ impl FriendlyConfig {
user_hex_keys,
verify_events,
allow_scraping,
allow_scrape_if_limited_to,
allow_scrape_if_max_seconds,
max_subscriptions,
serve_ephemeral,
serve_relay_lists,
@ -115,6 +121,8 @@ impl FriendlyConfig {
user_hex_keys,
verify_events,
allow_scraping,
allow_scrape_if_limited_to,
allow_scrape_if_max_seconds,
max_subscriptions,
serve_ephemeral,
serve_relay_lists,
@ -142,6 +150,8 @@ pub struct Config {
pub user_hex_keys: Vec<String>,
pub verify_events: bool,
pub allow_scraping: bool,
pub allow_scrape_if_limited_to: u32,
pub allow_scrape_if_max_seconds: u64,
pub max_subscriptions: usize,
pub serve_ephemeral: bool,
pub serve_relay_lists: bool,

View File

@ -3,6 +3,7 @@ pub use event_store::EventStore;
mod migrations;
use crate::config::Config;
use crate::error::{ChorusError, Error};
use crate::ip::IpData;
use crate::types::{Event, Filter, Id, Kind, Pubkey, Time};
@ -29,11 +30,13 @@ pub struct Store {
deleted_events: Database<UnalignedSlice<u8>, Unit>,
ip_data: Database<UnalignedSlice<u8>, UnalignedSlice<u8>>,
allow_scraping: bool,
allow_scrape_if_limited_to: u32,
allow_scrape_if_max_seconds: u64,
}
impl Store {
/// Setup persistent storage
pub fn new(data_directory: &str, allow_scraping: bool) -> Result<Store, Error> {
pub fn new(config: &Config) -> Result<Store, Error> {
let mut builder = EnvOpenOptions::new();
unsafe {
builder.flags(EnvFlags::NO_TLS);
@ -41,7 +44,7 @@ impl Store {
builder.max_dbs(32);
builder.map_size(1048576 * 1024 * 24); // 24 GB
let dir = format!("{}/lmdb", data_directory);
let dir = format!("{}/lmdb", &config.data_directory);
fs::create_dir_all(&dir)?;
let env = match builder.open(&dir) {
@ -111,7 +114,7 @@ impl Store {
txn.commit()?;
let event_map_file = format!("{}/event.map", data_directory);
let event_map_file = format!("{}/event.map", &config.data_directory);
let events = EventStore::new(event_map_file)?;
let store = Store {
@ -126,7 +129,9 @@ impl Store {
deleted_offsets,
deleted_events,
ip_data,
allow_scraping,
allow_scraping: config.allow_scraping,
allow_scrape_if_limited_to: config.allow_scrape_if_limited_to,
allow_scrape_if_max_seconds: config.allow_scrape_if_max_seconds,
};
// This is in migrations.rs
@ -465,8 +470,9 @@ impl Store {
} else {
// SCRAPE:
let maxtime = filter.until().0.min(Time::now().0);
let allow =
self.allow_scraping || filter.limit() <= 100 || (maxtime - filter.since().0) < 3600;
let allow = self.allow_scraping ||
filter.limit() <= self.allow_scrape_if_limited_to ||
(maxtime - filter.since().0) < self.allow_scrape_if_max_seconds;
if !allow {
return Err(ChorusError::Scraper.into());
}

View File

@ -136,6 +136,28 @@ verify_events = true
allow_scraping = false
# This is a u32 count of events indicating a filter `limit` value under which a scrape is
# allowed, irrespective of the `allow_scraping` setting. Such scrapes are not expensive due
# to the limit.
#
# See `allow_scraping` to learn the definition of a scrape.
#
# The default is 100.
#
allow_scrape_if_limited_to = 100
# This is a u64 number of seconds indicating a filter time range under which a scrape is
# allowed, irrespective of the `allow_scraping` setting. Such scrapes are rarely expensive
# due to the short time period.
#
# See `allow_scraping` to learn the definition of a scrape.
#
# The default is 3600.
#
allow_scrape_if_max_seconds = 100
# This is an integer indicating the maximum number of subscriptions a connection can have open
# at a given time.
#

View File

@ -121,6 +121,22 @@ The purpose of this setting is as a temporary setting that allows you to dump ev
Default is false.
### allow_scrape_if_limited_to
This is a u32 count of events indicating a filter `limit` value under which a scrape is allowed, irrespective of the `allow_scraping` setting. Such scrapes are not expensive due to the limit.
See `allow_scraping` to learn the definition of a scrape.
The default is 100.
### allow_scrape_if_max_seconds
This is a u64 number of seconds indicating a filter time range under which a scrape is allowed, irrespective of the `allow_scraping` setting. Such scrapes are rarely expensive due to the short time period.
See `allow_scraping` to learn the definition of a scrape.
The default is 3600.
### max_subscriptions
This is an integer indicating the maximum number of subscriptions a connection can have open at a given time.

View File

@ -14,6 +14,8 @@ user_hex_keys = [
]
verify_events = true
allow_scraping = true
allow_scrape_if_limited_to = 100
allow_scrape_if_max_seconds = 3600
max_subscriptions = 32
serve_ephemeral = true
serve_relay_lists = true