Option to allow scraping

This commit is contained in:
Mike Dilger 2024-02-17 10:26:31 +13:00
parent 60da4fabb9
commit d6d8d3ffcf
4 changed files with 27 additions and 2 deletions

View File

@ -12,4 +12,8 @@ FriendlyConfig(
"ee11a5dff40c19a555f41fe42b48f00e618c91225622ae37b6c2bb67b76c4e49"
],
verify_events: true,
// This is a bad idea in production, but useful for testing or for dumping
// your entire relay
allow_scraping: true,
)

View File

@ -15,6 +15,7 @@ pub struct FriendlyConfig {
pub public_key_hex: Option<String>,
pub user_hex_keys: Vec<String>,
pub verify_events: bool,
pub allow_scraping: bool,
}
impl Default for FriendlyConfig {
@ -31,6 +32,7 @@ impl Default for FriendlyConfig {
public_key_hex: None,
user_hex_keys: vec![],
verify_events: true,
allow_scraping: false,
}
}
}
@ -49,6 +51,7 @@ impl FriendlyConfig {
public_key_hex,
user_hex_keys,
verify_events,
allow_scraping,
} = self;
let mut public_key: Option<Pubkey> = None;
@ -74,6 +77,7 @@ impl FriendlyConfig {
user_keys,
user_hex_keys,
verify_events,
allow_scraping,
})
}
}
@ -92,4 +96,5 @@ pub struct Config {
pub user_keys: Vec<Pubkey>,
pub user_hex_keys: Vec<String>,
pub verify_events: bool,
pub allow_scraping: bool,
}

View File

@ -55,7 +55,7 @@ async fn main() -> Result<(), Error> {
log::debug!("Loaded config file.");
// Setup store
let store = Store::new(&config.data_directory)?;
let store = Store::new(&config.data_directory, config.allow_scraping)?;
let _ = GLOBALS.store.set(store);
// TLS setup

View File

@ -18,11 +18,12 @@ pub struct Store {
atci: Database<UnalignedSlice<u8>, OwnedType<usize>>,
ktci: Database<UnalignedSlice<u8>, OwnedType<usize>>,
deleted: Database<U64<BigEndian>, Unit>,
allow_scraping: bool,
}
impl Store {
/// Setup persistent storage
pub fn new(data_directory: &str) -> Result<Store, Error> {
pub fn new(data_directory: &str, allow_scraping: bool) -> Result<Store, Error> {
let mut builder = EnvOpenOptions::new();
unsafe {
builder.flags(EnvFlags::NO_TLS);
@ -82,6 +83,7 @@ impl Store {
atci,
ktci,
deleted,
allow_scraping,
})
}
@ -291,6 +293,20 @@ impl Store {
}
}
}
} else if self.allow_scraping {
// This is INEFFICIENT as it scans through EVERY EVENT
// but the filter is a scraper and we don't have a lot of support
// for scrapers.
let txn = self.env.read_txn()?;
let iter = self.ids.iter(&txn)?;
for result in iter {
let (_key, offset) = result?;
if let Some(event) = self.events.get_event_by_offset(offset)? {
if filter.event_matches(&event)? {
output.push(event);
}
}
}
} else {
return Err(ChorusError::Scraper.into());
}