diff --git a/config.json b/config.json new file mode 100644 index 0000000..d0f4481 --- /dev/null +++ b/config.json @@ -0,0 +1,11 @@ +{ + "db": { + "db_path": "server_db" + + }, + "distributors": { + "Lox": "127.0.0.1:8002" + }, + "port": 8003, + "require_bridge_token": false +} diff --git a/src/bin/server.rs b/src/bin/server.rs index 94682d9..3dbdbf6 100644 --- a/src/bin/server.rs +++ b/src/bin/server.rs @@ -1,10 +1,4 @@ -use troll_patrol::{ - extra_info::{self, ExtraInfo}, - //negative_report::SerializableNegativeReport, - //positive_report::SerializablePositiveReport, - request_handler::handle, - *, -}; +use troll_patrol::{request_handler::handle, *}; use clap::Parser; use futures::future; @@ -16,7 +10,7 @@ use hyper::{ use serde::Deserialize; use sled::Db; use std::{ - collections::HashSet, convert::Infallible, fs::File, io::BufReader, net::SocketAddr, + collections::BTreeMap, convert::Infallible, fs::File, io::BufReader, net::SocketAddr, path::PathBuf, time::Duration, }; use tokio::{ @@ -43,6 +37,8 @@ struct Args { #[derive(Debug, Deserialize)] pub struct Config { pub db: DbConfig, + // map of distributor name to IP:port to contact it + pub distributors: BTreeMap, //require_bridge_token: bool, port: u16, } @@ -61,36 +57,10 @@ impl Default for DbConfig { } } -async fn update_extra_infos(db: &Db) { - // Track which files have been processed. This is slightly redundant - // because we're only downloading files we don't already have, but it - // might be a good idea to check in case we downloaded a file but didn't - // process it for some reason. - let mut processed_extra_infos_files = match db.get(b"extra_infos_files").unwrap() { - Some(v) => bincode::deserialize(&v).unwrap(), - None => HashSet::::new(), - }; - - let new_files = extra_info::download_extra_infos().await.unwrap(); - - let mut new_extra_infos = HashSet::::new(); - - // Make set of new extra-infos - for extra_info_file in &new_files { - extra_info::add_extra_infos(&extra_info_file, &mut new_extra_infos); - processed_extra_infos_files.insert(extra_info_file.to_string()); - } - - // Add new extra-infos data to database - for extra_info in new_extra_infos { - add_extra_info_to_db(&db, extra_info); - } - - db.insert( - b"extra_infos_files", - bincode::serialize(&processed_extra_infos_files).unwrap(), - ) - .unwrap(); +async fn update_daily_info(db: &Db, distributors: &BTreeMap) { + update_extra_infos(&db).await; + update_negative_reports(&db, &distributors).await; + update_positive_reports(&db, &distributors).await; } async fn create_context_manager( diff --git a/src/lib.rs b/src/lib.rs index a656ebd..4e5fef9 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,3 +1,4 @@ +use hyper::{Body, Client, Method, Request}; use lazy_static::lazy_static; use serde::{Deserialize, Serialize}; use sled::Db; @@ -31,7 +32,7 @@ pub fn get_date() -> u32 { .unwrap() } -#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)] +#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)] pub enum BridgeDistributor { Lox, } @@ -48,8 +49,8 @@ pub struct BridgeInfo { /// first Julian date we started collecting data on this bridge pub first_seen: u32, - /// flag indicating whether the bridge is believed to be blocked - pub is_blocked: bool, + /// list of countries where the bridge is believed to be blocked + pub blocked_in: Vec, /// map of dates to data for that day pub info_by_day: HashMap, @@ -61,7 +62,7 @@ impl BridgeInfo { fingerprint: fingerprint, nickname: nickname.to_string(), first_seen: get_date(), - is_blocked: false, + blocked_in: Vec::::new(), info_by_day: HashMap::::new(), } } @@ -75,7 +76,10 @@ impl fmt::Display for BridgeInfo { ); str.push_str(format!("nickname: {}\n", self.nickname).as_str()); str.push_str(format!("first_seen: {}\n", self.first_seen).as_str()); - str.push_str(format!("is_blocked: {}\n", self.is_blocked).as_str()); + str.push_str("blocked_in:"); + for country in &self.blocked_in { + str.push_str(format!("\n {}", country).as_str()); + } str.push_str("info_by_day:"); for day in self.info_by_day.keys() { str.push_str(format!("\n day: {}", day).as_str()); @@ -180,6 +184,8 @@ impl fmt::Display for DailyBridgeInfo { } } +// Process extra-infos + /// Adds the extra-info data for a single bridge to the database. If the /// database already contains an extra-info for this bridge for thid date, /// but this extra-info contains different data for some reason, use the @@ -210,6 +216,41 @@ pub fn add_extra_info_to_db(db: &Db, extra_info: ExtraInfo) { .unwrap(); } +/// Download new extra-infos files and add their data to the database +pub async fn update_extra_infos(db: &Db) { + // Track which files have been processed. This is slightly redundant + // because we're only downloading files we don't already have, but it + // might be a good idea to check in case we downloaded a file but didn't + // process it for some reason. + let mut processed_extra_infos_files = match db.get(b"extra_infos_files").unwrap() { + Some(v) => bincode::deserialize(&v).unwrap(), + None => HashSet::::new(), + }; + + let new_files = extra_info::download_extra_infos().await.unwrap(); + + let mut new_extra_infos = HashSet::::new(); + + // Make set of new extra-infos + for extra_info_file in &new_files { + extra_info::add_extra_infos(&extra_info_file, &mut new_extra_infos); + processed_extra_infos_files.insert(extra_info_file.to_string()); + } + + // Add new extra-infos data to database + for extra_info in new_extra_infos { + add_extra_info_to_db(&db, extra_info); + } + + db.insert( + b"extra_infos_files", + bincode::serialize(&processed_extra_infos_files).unwrap(), + ) + .unwrap(); +} + +// Process negative reports + /// Negative reports can be deduplicated, so we store to-be-processed /// negative reports as a map of [report] to [count of report]. Add this /// NR to that map (or create a new map if necessary). @@ -244,6 +285,94 @@ pub fn save_negative_report_to_process(db: &Db, nr: NegativeReport) { .unwrap(); } +/// Sends a collection of negative reports to the Lox Authority and returns the +/// number of valid reports returned by the server. The negative reports in the +/// collection should all have the same bridge fingerprint, date, country, and +/// distributor. +pub async fn verify_negative_reports( + distributors: &BTreeMap, + reports: &BTreeMap, +) -> u32 { + // Don't make a network call if we don't have any reports anyway + if reports.is_empty() { + return 0; + } + // Get one report, assume the rest have the same distributor + let first_report: SerializableNegativeReport = + serde_json::from_str(reports.first_key_value().unwrap().0).unwrap(); + let distributor = first_report.distributor; + let client = Client::new(); + let uri: String = (distributors.get(&distributor).unwrap().to_owned() + "/verifynegative") + .parse() + .unwrap(); + let req = Request::builder() + .method(Method::POST) + .uri(uri) + .body(Body::from(serde_json::to_string(&reports).unwrap())) + .unwrap(); + let resp = client.request(req).await.unwrap(); + let buf = hyper::body::to_bytes(resp).await.unwrap(); + serde_json::from_slice(&buf).unwrap() +} + +/// Process today's negative reports and store the count of verified reports in +/// the database. +pub async fn update_negative_reports(db: &Db, distributors: &BTreeMap) { + let mut all_negative_reports = match db.get("nrs-to-process").unwrap() { + Some(v) => bincode::deserialize(&v).unwrap(), + None => BTreeMap::>::new(), + }; + for bridge_date in all_negative_reports.keys() { + // We could parse the fingerprint and date: + //let fingerprint: [u8; 20] = array_bytes::hex2array(&bridge_date[0..40]).unwrap(); + //let date: u32 = &bridge_date[41..].parse().unwrap(); + // but instead, let's just get it from the first report + let reports = all_negative_reports.get(bridge_date).unwrap(); + if !reports.is_empty() { + let first_report: SerializableNegativeReport = + serde_json::from_str(reports.first_key_value().unwrap().0).unwrap(); + let fingerprint = first_report.fingerprint; + let date = first_report.date; + let country = first_report.country; + let count_valid = verify_negative_reports(&distributors, reports).await; + let mut count_per_country = BTreeMap::::new(); + count_per_country.insert(country, count_valid).unwrap(); + let mut bridge_info = match db.get(&fingerprint).unwrap() { + Some(v) => bincode::deserialize(&v).unwrap(), + // It should already exist, unless the bridge hasn't published + // any bridge stats. + None => BridgeInfo::new(fingerprint, &"".to_string()), + }; + // Add the new report count to it + if bridge_info.info_by_day.contains_key(&date) { + let daily_bridge_info = bridge_info.info_by_day.get_mut(&date).unwrap(); + daily_bridge_info.add_info(BridgeInfoType::NegativeReports, &count_per_country); + // Commit changes to database + db.insert(fingerprint, bincode::serialize(&bridge_info).unwrap()) + .unwrap(); + } else { + // No existing entry; make a new one. + let mut daily_bridge_info = DailyBridgeInfo::new(); + daily_bridge_info.add_info(BridgeInfoType::NegativeReports, &count_per_country); + bridge_info.info_by_day.insert(date, daily_bridge_info); + // Commit changes to database + db.insert(fingerprint, bincode::serialize(&bridge_info).unwrap()) + .unwrap(); + } + } + } + // TODO: Would it be cheaper to just recreate it? + all_negative_reports.clear(); + // Remove the now-processed reports from the database + db.insert( + "nrs-to-process", + bincode::serialize(&all_negative_reports).unwrap(), + ) + .unwrap(); +} + +// Process positive reports + /// We store to-be-processed positive reports as a vector. Add this PR /// to that vector (or create a new vector if necessary). pub fn save_positive_report_to_process(db: &Db, pr: PositiveReport) { @@ -271,3 +400,89 @@ pub fn save_positive_report_to_process(db: &Db, pr: PositiveReport) { db.insert("prs-to-process", bincode::serialize(&reports).unwrap()) .unwrap(); } + +/// Sends a collection of positive reports to the Lox Authority and returns the +/// number of valid reports returned by the server. The positive reports in the +/// collection should all have the same bridge fingerprint, date, and country. +pub async fn verify_positive_reports( + distributors: &BTreeMap, + reports: &Vec, +) -> u32 { + // Don't make a network call if we don't have any reports anyway + if reports.is_empty() { + return 0; + } + let client = Client::new(); + let uri: String = (distributors + .get(&BridgeDistributor::Lox) + .unwrap() + .to_owned() + + "/verifypositive") + .parse() + .unwrap(); + let req = Request::builder() + .method(Method::POST) + .uri(uri) + .body(Body::from(serde_json::to_string(&reports).unwrap())) + .unwrap(); + let resp = client.request(req).await.unwrap(); + let buf = hyper::body::to_bytes(resp).await.unwrap(); + serde_json::from_slice(&buf).unwrap() +} + +/// Process today's positive reports and store the count of verified reports in +/// the database. +pub async fn update_positive_reports(db: &Db, distributors: &BTreeMap) { + let mut all_positive_reports = match db.get("prs-to-process").unwrap() { + Some(v) => bincode::deserialize(&v).unwrap(), + None => BTreeMap::>::new(), + }; + for bridge_date in all_positive_reports.keys() { + // We could parse the fingerprint and date: + //let fingerprint: [u8; 20] = array_bytes::hex2array(&bridge_date[0..40]).unwrap(); + //let date: u32 = &bridge_date[41..].parse().unwrap(); + // but instead, let's just get it from the first report + let reports = all_positive_reports.get(bridge_date).unwrap(); + if !reports.is_empty() { + let first_report = &reports[0]; + let fingerprint = first_report.fingerprint; + let date = first_report.date; + let country = first_report.country.clone(); + let count_valid = verify_positive_reports(&distributors, reports).await; + let mut count_per_country = BTreeMap::::new(); + count_per_country.insert(country, count_valid).unwrap(); + let mut bridge_info = match db.get(&fingerprint).unwrap() { + Some(v) => bincode::deserialize(&v).unwrap(), + // It should already exist, unless the bridge hasn't published + // any bridge stats. + None => BridgeInfo::new(fingerprint, &"".to_string()), + }; + // Add the new report count to it + if bridge_info.info_by_day.contains_key(&date) { + let daily_bridge_info = bridge_info.info_by_day.get_mut(&date).unwrap(); + daily_bridge_info.add_info(BridgeInfoType::PositiveReports, &count_per_country); + // Commit changes to database + db.insert(fingerprint, bincode::serialize(&bridge_info).unwrap()) + .unwrap(); + } else { + // No existing entry; make a new one. + let mut daily_bridge_info = DailyBridgeInfo::new(); + daily_bridge_info.add_info(BridgeInfoType::PositiveReports, &count_per_country); + bridge_info.info_by_day.insert(date, daily_bridge_info); + // Commit changes to database + db.insert(fingerprint, bincode::serialize(&bridge_info).unwrap()) + .unwrap(); + } + } + } + // TODO: Would it be cheaper to just recreate it? + all_positive_reports.clear(); + // Remove the now-processed reports from the database + db.insert( + "prs-to-process", + bincode::serialize(&all_positive_reports).unwrap(), + ) + .unwrap(); +} + +// TODO: function to mark a bridge as blocked