Compare commits
7 Commits
e889cba878
...
a504e8aef2
Author | SHA1 | Date |
---|---|---|
|
a504e8aef2 | |
|
12519a344b | |
|
10ff088b5a | |
|
ec4dc5ca29 | |
|
31b27a291a | |
|
b2eb244757 | |
|
273aaab38d |
126
src/analysis.rs
126
src/analysis.rs
|
@ -1,14 +1,12 @@
|
|||
use crate::{BridgeInfo, BridgeInfoType};
|
||||
use lox_library::proto::{level_up::LEVEL_INTERVAL, trust_promotion::UNTRUSTED_INTERVAL};
|
||||
use nalgebra::DVector;
|
||||
use statrs::distribution::{Continuous, MultivariateNormal};
|
||||
use statrs::distribution::{Continuous, MultivariateNormal, Normal};
|
||||
use std::{
|
||||
cmp::min,
|
||||
collections::{BTreeMap, HashSet},
|
||||
};
|
||||
|
||||
const SCALE_BRIDGE_IPS: u32 = 8;
|
||||
|
||||
/// Provides a function for predicting which countries block this bridge
|
||||
pub trait Analyzer {
|
||||
/// Evaluate open-entry bridge. Returns true if blocked, false otherwise.
|
||||
|
@ -54,7 +52,6 @@ pub fn blocked_in(
|
|||
confidence: f64,
|
||||
date: u32,
|
||||
) -> HashSet<String> {
|
||||
// TODO: Re-evaluate past days if we have backdated reports
|
||||
let mut blocked_in = HashSet::<String>::new();
|
||||
let today = date;
|
||||
let age = today - bridge_info.first_seen;
|
||||
|
@ -71,7 +68,7 @@ pub fn blocked_in(
|
|||
None => &new_map_binding,
|
||||
};
|
||||
let bridge_ips_today = match today_info.get(&BridgeInfoType::BridgeIps) {
|
||||
Some(&v) => v / SCALE_BRIDGE_IPS,
|
||||
Some(&v) => v,
|
||||
None => 0,
|
||||
};
|
||||
let negative_reports_today = match today_info.get(&BridgeInfoType::NegativeReports) {
|
||||
|
@ -98,7 +95,7 @@ pub fn blocked_in(
|
|||
None => &new_map_binding,
|
||||
};
|
||||
bridge_ips[i as usize] = match day_info.get(&BridgeInfoType::BridgeIps) {
|
||||
Some(&v) => v / SCALE_BRIDGE_IPS,
|
||||
Some(&v) => v,
|
||||
None => 0,
|
||||
};
|
||||
negative_reports[i as usize] = match day_info.get(&BridgeInfoType::NegativeReports)
|
||||
|
@ -264,7 +261,7 @@ impl NormalAnalyzer {
|
|||
sum +=
|
||||
(var1[index] as f64 - var1_mean) * (var2[index] as f64 - var2_mean);
|
||||
}
|
||||
sum / var1.len() as f64
|
||||
sum / (var1.len() - 1) as f64
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -286,8 +283,7 @@ impl Analyzer for NormalAnalyzer {
|
|||
negative_reports_today: u32,
|
||||
) -> bool {
|
||||
negative_reports_today > self.max_threshold
|
||||
|| f64::from(negative_reports_today)
|
||||
> self.scaling_factor * f64::from(bridge_ips_today) * SCALE_BRIDGE_IPS as f64
|
||||
|| f64::from(negative_reports_today) > self.scaling_factor * f64::from(bridge_ips_today)
|
||||
}
|
||||
|
||||
/// Evaluate invite-only bridge based on last 30 days
|
||||
|
@ -305,22 +301,52 @@ impl Analyzer for NormalAnalyzer {
|
|||
let alpha = 1.0 - confidence;
|
||||
|
||||
let (mean_vec, sd_vec, cov_mat) = Self::stats(&[bridge_ips, negative_reports]);
|
||||
let bridge_ips_mean = mean_vec[0];
|
||||
let negative_reports_mean = mean_vec[1];
|
||||
let bridge_ips_sd = sd_vec[0];
|
||||
let negative_reports_sd = sd_vec[1];
|
||||
|
||||
// Artificially create data for alternative hypothesis
|
||||
let num_days = bridge_ips.len() as usize;
|
||||
let mut bridge_ips_blocked = vec![0; num_days];
|
||||
let mut negative_reports_blocked = vec![0; num_days];
|
||||
let bridge_ips_deviation = (2.0 * bridge_ips_sd).round() as u32;
|
||||
for i in 0..num_days {
|
||||
// Suppose bridge stats will go down by 2 SDs
|
||||
bridge_ips_blocked[i] = if bridge_ips_deviation > bridge_ips[i] {
|
||||
0
|
||||
} else {
|
||||
bridge_ips[i] - bridge_ips_deviation
|
||||
};
|
||||
// Suppose negative reports will go up by 2 SDs
|
||||
negative_reports_blocked[i] =
|
||||
negative_reports[i] + (2.0 * negative_reports_sd).round() as u32;
|
||||
}
|
||||
let (mean_vec_blocked, _sd_vec_blocked, cov_mat_blocked) =
|
||||
Self::stats(&[&bridge_ips_blocked, &negative_reports_blocked]);
|
||||
|
||||
let mvn = MultivariateNormal::new(mean_vec, cov_mat).unwrap();
|
||||
if mvn.pdf(&DVector::from_vec(vec![
|
||||
let pdf = mvn.pdf(&DVector::from_vec(vec![
|
||||
bridge_ips_today as f64,
|
||||
negative_reports_today as f64,
|
||||
])) < alpha
|
||||
{
|
||||
(negative_reports_today as f64) > negative_reports_mean + negative_reports_sd
|
||||
|| (bridge_ips_today as f64) < bridge_ips_mean - bridge_ips_sd
|
||||
} else {
|
||||
false
|
||||
}
|
||||
]));
|
||||
|
||||
let mvn_blocked = MultivariateNormal::new(mean_vec_blocked, cov_mat_blocked).unwrap();
|
||||
let pdf_blocked = mvn_blocked.pdf(&DVector::from_vec(vec![
|
||||
bridge_ips_today as f64,
|
||||
negative_reports_today as f64,
|
||||
]));
|
||||
|
||||
// Also model negative reports in isolation
|
||||
let nr_normal = Normal::new(negative_reports_mean, negative_reports_sd).unwrap();
|
||||
let nr_pdf = nr_normal.pdf(negative_reports_today as f64);
|
||||
let nr_normal_blocked = Normal::new(
|
||||
negative_reports_mean + 2.0 * negative_reports_sd,
|
||||
negative_reports_sd,
|
||||
)
|
||||
.unwrap();
|
||||
let nr_pdf_blocked = nr_normal_blocked.pdf(negative_reports_today as f64);
|
||||
|
||||
(pdf / pdf_blocked).ln() < alpha || (nr_pdf / nr_pdf_blocked).ln() < alpha
|
||||
}
|
||||
|
||||
/// Evaluate invite-only bridge with lv3+ users submitting positive reports
|
||||
|
@ -342,25 +368,67 @@ impl Analyzer for NormalAnalyzer {
|
|||
|
||||
let (mean_vec, sd_vec, cov_mat) =
|
||||
Self::stats(&[bridge_ips, negative_reports, positive_reports]);
|
||||
let bridge_ips_mean = mean_vec[0];
|
||||
let negative_reports_mean = mean_vec[1];
|
||||
let positive_reports_mean = mean_vec[2];
|
||||
let bridge_ips_sd = sd_vec[0];
|
||||
let negative_reports_sd = sd_vec[1];
|
||||
let positive_reports_sd = sd_vec[2];
|
||||
|
||||
// Artificially create data for alternative hypothesis
|
||||
let num_days = bridge_ips.len() as usize;
|
||||
let mut bridge_ips_blocked = vec![0; num_days];
|
||||
let mut negative_reports_blocked = vec![0; num_days];
|
||||
let mut positive_reports_blocked = vec![0; num_days];
|
||||
let bridge_ips_deviation = (2.0 * bridge_ips_sd).round() as u32;
|
||||
let positive_reports_deviation = (2.0 * positive_reports_sd).round() as u32;
|
||||
for i in 0..num_days {
|
||||
// Suppose positive reports will go down by 2 SDs
|
||||
positive_reports_blocked[i] = if positive_reports_deviation > positive_reports[i] {
|
||||
0
|
||||
} else {
|
||||
positive_reports[i] - positive_reports_deviation
|
||||
};
|
||||
// Suppose bridge stats will go down by 2 SDs
|
||||
bridge_ips_blocked[i] = if bridge_ips_deviation > bridge_ips[i] {
|
||||
0
|
||||
} else {
|
||||
bridge_ips[i] - bridge_ips_deviation
|
||||
};
|
||||
// Suppose each user who would have submitted a positive report but
|
||||
// didn't submits a negative report instead.
|
||||
negative_reports_blocked[i] =
|
||||
negative_reports[i] + positive_reports[i] - positive_reports_blocked[i];
|
||||
}
|
||||
let (mean_vec_blocked, _sd_vec_blocked, cov_mat_blocked) = Self::stats(&[
|
||||
&bridge_ips_blocked,
|
||||
&negative_reports_blocked,
|
||||
&positive_reports_blocked,
|
||||
]);
|
||||
|
||||
let mvn = MultivariateNormal::new(mean_vec, cov_mat).unwrap();
|
||||
if mvn.pdf(&DVector::from_vec(vec![
|
||||
let pdf = mvn.pdf(&DVector::from_vec(vec![
|
||||
bridge_ips_today as f64,
|
||||
negative_reports_today as f64,
|
||||
positive_reports_today as f64,
|
||||
])) < alpha
|
||||
{
|
||||
(negative_reports_today as f64) > negative_reports_mean + negative_reports_sd
|
||||
|| (bridge_ips_today as f64) < bridge_ips_mean - bridge_ips_sd
|
||||
|| (positive_reports_today as f64) < positive_reports_mean - positive_reports_sd
|
||||
} else {
|
||||
false
|
||||
}
|
||||
]));
|
||||
|
||||
let mvn_blocked = MultivariateNormal::new(mean_vec_blocked, cov_mat_blocked).unwrap();
|
||||
let pdf_blocked = mvn_blocked.pdf(&DVector::from_vec(vec![
|
||||
bridge_ips_today as f64,
|
||||
negative_reports_today as f64,
|
||||
positive_reports_today as f64,
|
||||
]));
|
||||
|
||||
// Also model negative reports in isolation
|
||||
let nr_normal = Normal::new(negative_reports_mean, negative_reports_sd).unwrap();
|
||||
let nr_pdf = nr_normal.pdf(negative_reports_today as f64);
|
||||
// Note we do NOT make this a function of positive signals
|
||||
let nr_normal_blocked = Normal::new(
|
||||
negative_reports_mean + 2.0 * negative_reports_sd,
|
||||
negative_reports_sd,
|
||||
)
|
||||
.unwrap();
|
||||
let nr_pdf_blocked = nr_normal_blocked.pdf(negative_reports_today as f64);
|
||||
|
||||
(pdf / pdf_blocked).ln() < alpha || (nr_pdf / nr_pdf_blocked).ln() < alpha
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,7 +25,7 @@ pub struct ExtraInfo {
|
|||
/// Map of country codes and how many users (rounded up to a multiple of
|
||||
/// 8) have connected to that bridge during the day.
|
||||
/// Uses BTreeMap instead of HashMap so ExtraInfo can implement Hash.
|
||||
pub bridge_ips: BTreeMap<String, u32>, // TODO: What size for count?
|
||||
pub bridge_ips: BTreeMap<String, u32>,
|
||||
}
|
||||
|
||||
impl ExtraInfo {
|
||||
|
|
22
src/lib.rs
22
src/lib.rs
|
@ -28,7 +28,7 @@ use positive_report::*;
|
|||
lazy_static! {
|
||||
// known country codes based on Tor geoIP database
|
||||
// Produced with `cat /usr/share/tor/geoip{,6} | grep -v ^# | grep -o ..$ | sort | uniq | tr '[:upper:]' '[:lower:]' | tr '\n' ',' | sed 's/,/","/g'`
|
||||
pub static ref COUNTRY_CODES: HashSet<&'static str> = HashSet::from(["??","ad","ae","af","ag","ai","al","am","ao","ap","aq","ar","as","at","au","aw","ax","az","ba","bb","bd","be","bf","bg","bh","bi","bj","bl","bm","bn","bo","bq","br","bs","bt","bv","bw","by","bz","ca","cc","cd","cf","cg","ch","ci","ck","cl","cm","cn","co","cr","cs","cu","cv","cw","cx","cy","cz","de","dj","dk","dm","do","dz","ec","ee","eg","eh","er","es","et","eu","fi","fj","fk","fm","fo","fr","ga","gb","gd","ge","gf","gg","gh","gi","gl","gm","gn","gp","gq","gr","gs","gt","gu","gw","gy","hk","hm","hn","hr","ht","hu","id","ie","il","im","in","io","iq","ir","is","it","je","jm","jo","jp","ke","kg","kh","ki","km","kn","kp","kr","kw","ky","kz","la","lb","lc","li","lk","lr","ls","lt","lu","lv","ly","ma","mc","md","me","mf","mg","mh","mk","ml","mm","mn","mo","mp","mq","mr","ms","mt","mu","mv","mw","mx","my","mz","na","nc","ne","nf","ng","ni","nl","no","np","nr","nu","nz","om","pa","pe","pf","pg","ph","pk","pl","pm","pn","pr","ps","pt","pw","py","qa","re","ro","rs","ru","rw","sa","sb","sc","sd","se","sg","sh","si","sj","sk","sl","sm","sn","so","sr","ss","st","sv","sx","sy","sz","tc","td","tf","tg","th","tj","tk","tl","tm","tn","to","tr","tt","tv","tw","tz","ua","ug","um","us","uy","uz","va","vc","ve","vg","vi","vn","vu","wf","ws","ye","yt","za","zm","zw"]);
|
||||
pub static ref COUNTRY_CODES: HashSet<&'static str> = HashSet::from(["??","ac","ad","ae","af","ag","ai","al","am","an","ao","ap","aq","ar","as","at","au","aw","ax","az","ba","bb","bd","be","bf","bg","bh","bi","bj","bl","bm","bn","bo","bq","br","bs","bt","bv","bw","by","bz","ca","cc","cd","cf","cg","ch","ci","ck","cl","cm","cn","co","cr","cs","cu","cv","cw","cx","cy","cz","de","dg","dj","dk","dm","do","dz","ea","ec","ee","eg","eh","er","es","et","eu","fi","fj","fk","fm","fo","fr","ga","gb","gd","ge","gf","gg","gh","gi","gl","gm","gn","gp","gq","gr","gs","gt","gu","gw","gy","hk","hm","hn","hr","ht","hu","ic","id","ie","il","im","in","io","iq","ir","is","it","je","jm","jo","jp","ke","kg","kh","ki","km","kn","kp","kr","kw","ky","kz","la","lb","lc","li","lk","lr","ls","lt","lu","lv","ly","ma","mc","md","me","mf","mg","mh","mk","ml","mm","mn","mo","mp","mq","mr","ms","mt","mu","mv","mw","mx","my","mz","na","nc","ne","nf","ng","ni","nl","no","np","nr","nu","nz","om","pa","pe","pf","pg","ph","pk","pl","pm","pn","pr","ps","pt","pw","py","qa","re","ro","rs","ru","rw","sa","sb","sc","sd","se","sg","sh","si","sj","sk","sl","sm","sn","so","sr","ss","st","sv","sx","sy","sz","ta","tc","td","tf","tg","th","tj","tk","tl","tm","tn","to","tr","tt","tv","tw","tz","ua","ug","uk","um","un","us","uy","uz","va","vc","ve","vg","vi","vn","vu","wf","ws","ye","yt","za","zm","zw"]);
|
||||
}
|
||||
|
||||
/// We will accept reports up to this many days old.
|
||||
|
@ -486,12 +486,10 @@ pub async fn update_negative_reports(db: &Db, distributors: &BTreeMap<BridgeDist
|
|||
.unwrap();
|
||||
}
|
||||
}
|
||||
// TODO: Would it be cheaper to just recreate it?
|
||||
all_negative_reports.clear();
|
||||
// Remove the now-processed reports from the database
|
||||
db.insert(
|
||||
"nrs-to-process",
|
||||
bincode::serialize(&all_negative_reports).unwrap(),
|
||||
bincode::serialize(&BTreeMap::<String, Vec<SerializableNegativeReport>>::new()).unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
@ -601,12 +599,10 @@ pub async fn update_positive_reports(db: &Db, distributors: &BTreeMap<BridgeDist
|
|||
.unwrap();
|
||||
}
|
||||
}
|
||||
// TODO: Would it be cheaper to just recreate it?
|
||||
all_positive_reports.clear();
|
||||
// Remove the now-processed reports from the database
|
||||
db.insert(
|
||||
"prs-to-process",
|
||||
bincode::serialize(&all_positive_reports).unwrap(),
|
||||
bincode::serialize(&BTreeMap::<String, Vec<SerializablePositiveReport>>::new()).unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
@ -634,7 +630,16 @@ pub fn guess_blockages(
|
|||
let mut bridge_info: BridgeInfo =
|
||||
bincode::deserialize(&db.get(fingerprint).unwrap().unwrap()).unwrap();
|
||||
let mut new_blockages = HashSet::<String>::new();
|
||||
let blocked_in = analysis::blocked_in(analyzer, &bridge_info, confidence, get_date());
|
||||
// Re-evaluate the last MAX_BACKDATE + 1 days in case we received new
|
||||
// reports for those days. For efficiency, we could instead keep track
|
||||
// of which bridges received new reports and only re-evaluate those.
|
||||
for i in 0..MAX_BACKDATE + 1 {
|
||||
let blocked_in = analysis::blocked_in(
|
||||
analyzer,
|
||||
&bridge_info,
|
||||
confidence,
|
||||
get_date() - MAX_BACKDATE - 1 + i,
|
||||
);
|
||||
for country in blocked_in {
|
||||
let bridge_country_info = bridge_info.info_by_country.get_mut(&country).unwrap();
|
||||
if !bridge_country_info.blocked {
|
||||
|
@ -643,6 +648,7 @@ pub fn guess_blockages(
|
|||
bridge_country_info.blocked = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
blockages.insert(fingerprint, new_blockages);
|
||||
|
||||
// Commit changes to database
|
||||
|
|
211
src/tests.rs
211
src/tests.rs
|
@ -155,7 +155,7 @@ async fn test_extra_infos() {
|
|||
array_bytes::hex2array("72E12B89136B45BBC81D1EF0AC7DDDBB91B148DB").unwrap();
|
||||
|
||||
// Open test database
|
||||
let db: Db = sled::open("test_db").unwrap();
|
||||
let db: Db = sled::open("test_db_ei").unwrap();
|
||||
|
||||
// Delete all data in test DB
|
||||
db.clear().unwrap();
|
||||
|
@ -337,7 +337,7 @@ fn test_negative_reports() {
|
|||
// (Also test encryption and decryption.)
|
||||
|
||||
// Open test database
|
||||
let db: Db = sled::open("test_db").unwrap();
|
||||
let db: Db = sled::open("test_db_nr").unwrap();
|
||||
|
||||
// Delete all data in test DB
|
||||
db.clear().unwrap();
|
||||
|
@ -785,7 +785,7 @@ fn test_positive_reports() {
|
|||
.unwrap();
|
||||
|
||||
// Open test database
|
||||
let db: Db = sled::open("test_db").unwrap();
|
||||
let db: Db = sled::open("test_db_pr").unwrap();
|
||||
|
||||
// Delete all data in test DB
|
||||
db.clear().unwrap();
|
||||
|
@ -1119,7 +1119,210 @@ fn test_analysis() {
|
|||
);
|
||||
}
|
||||
|
||||
// TODO: More tests
|
||||
{
|
||||
let mut date = get_date();
|
||||
|
||||
// New bridge info
|
||||
let mut bridge_info = BridgeInfo::new([0; 20], &String::default());
|
||||
|
||||
bridge_info
|
||||
.info_by_country
|
||||
.insert("ru".to_string(), BridgeCountryInfo::new());
|
||||
let analyzer = analysis::NormalAnalyzer::new(5, 0.25);
|
||||
let confidence = 0.95;
|
||||
|
||||
let mut blocking_countries = HashSet::<String>::new();
|
||||
|
||||
// No data today
|
||||
assert_eq!(
|
||||
blocked_in(&analyzer, &bridge_info, confidence, date),
|
||||
blocking_countries
|
||||
);
|
||||
|
||||
for i in 1..30 {
|
||||
// 9-32 connections, 0-3 negative reports each day
|
||||
date += 1;
|
||||
bridge_info.info_by_country.get_mut("ru").unwrap().add_info(
|
||||
BridgeInfoType::BridgeIps,
|
||||
date,
|
||||
8 * (i % 3 + 2),
|
||||
);
|
||||
bridge_info.info_by_country.get_mut("ru").unwrap().add_info(
|
||||
BridgeInfoType::NegativeReports,
|
||||
date,
|
||||
i % 4,
|
||||
);
|
||||
assert_eq!(
|
||||
blocked_in(&analyzer, &bridge_info, confidence, date),
|
||||
blocking_countries
|
||||
);
|
||||
}
|
||||
|
||||
// Data similar to previous days:
|
||||
// 24 connections, 2 negative reports
|
||||
date += 1;
|
||||
bridge_info.info_by_country.get_mut("ru").unwrap().add_info(
|
||||
BridgeInfoType::BridgeIps,
|
||||
date,
|
||||
24,
|
||||
);
|
||||
bridge_info.info_by_country.get_mut("ru").unwrap().add_info(
|
||||
BridgeInfoType::NegativeReports,
|
||||
date,
|
||||
2,
|
||||
);
|
||||
|
||||
// Should not be blocked because we have similar data.
|
||||
assert_eq!(
|
||||
blocked_in(&analyzer, &bridge_info, confidence, date),
|
||||
blocking_countries
|
||||
);
|
||||
|
||||
// Data different from previous days:
|
||||
// 104 connections, 1 negative report
|
||||
date += 1;
|
||||
bridge_info.info_by_country.get_mut("ru").unwrap().add_info(
|
||||
BridgeInfoType::BridgeIps,
|
||||
date,
|
||||
104,
|
||||
);
|
||||
bridge_info.info_by_country.get_mut("ru").unwrap().add_info(
|
||||
BridgeInfoType::NegativeReports,
|
||||
date,
|
||||
1,
|
||||
);
|
||||
|
||||
// This should not be blocked even though it's very different because
|
||||
// it's different in the good direction.
|
||||
assert_eq!(
|
||||
blocked_in(&analyzer, &bridge_info, confidence, date),
|
||||
blocking_countries
|
||||
);
|
||||
|
||||
// Data different from previous days:
|
||||
// 800 connections, 12 negative reports
|
||||
date += 1;
|
||||
bridge_info.info_by_country.get_mut("ru").unwrap().add_info(
|
||||
BridgeInfoType::BridgeIps,
|
||||
date,
|
||||
800,
|
||||
);
|
||||
bridge_info.info_by_country.get_mut("ru").unwrap().add_info(
|
||||
BridgeInfoType::NegativeReports,
|
||||
date,
|
||||
12,
|
||||
);
|
||||
blocking_countries.insert("ru".to_string());
|
||||
|
||||
// The censor artificially inflated bridge stats to prevent detection.
|
||||
// Ensure we still detect the censorship from negative reports.
|
||||
assert_eq!(
|
||||
blocked_in(&analyzer, &bridge_info, confidence, date),
|
||||
blocking_countries
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
let mut date = get_date();
|
||||
|
||||
// New bridge info
|
||||
let mut bridge_info = BridgeInfo::new([0; 20], &String::default());
|
||||
|
||||
bridge_info
|
||||
.info_by_country
|
||||
.insert("ru".to_string(), BridgeCountryInfo::new());
|
||||
let analyzer = analysis::NormalAnalyzer::new(5, 0.25);
|
||||
let confidence = 0.95;
|
||||
|
||||
let mut blocking_countries = HashSet::<String>::new();
|
||||
|
||||
// No data today
|
||||
assert_eq!(
|
||||
blocked_in(&analyzer, &bridge_info, confidence, date),
|
||||
blocking_countries
|
||||
);
|
||||
|
||||
for i in 1..30 {
|
||||
// 9-32 connections, 0-3 negative reports each day
|
||||
date += 1;
|
||||
bridge_info.info_by_country.get_mut("ru").unwrap().add_info(
|
||||
BridgeInfoType::BridgeIps,
|
||||
date,
|
||||
8 * (i % 3 + 2),
|
||||
);
|
||||
bridge_info.info_by_country.get_mut("ru").unwrap().add_info(
|
||||
BridgeInfoType::NegativeReports,
|
||||
date,
|
||||
i % 4,
|
||||
);
|
||||
assert_eq!(
|
||||
blocked_in(&analyzer, &bridge_info, confidence, date),
|
||||
blocking_countries
|
||||
);
|
||||
}
|
||||
|
||||
// Data similar to previous days:
|
||||
// 24 connections, 2 negative reports
|
||||
date += 1;
|
||||
bridge_info.info_by_country.get_mut("ru").unwrap().add_info(
|
||||
BridgeInfoType::BridgeIps,
|
||||
date,
|
||||
24,
|
||||
);
|
||||
bridge_info.info_by_country.get_mut("ru").unwrap().add_info(
|
||||
BridgeInfoType::NegativeReports,
|
||||
date,
|
||||
2,
|
||||
);
|
||||
|
||||
// Should not be blocked because we have similar data.
|
||||
assert_eq!(
|
||||
blocked_in(&analyzer, &bridge_info, confidence, date),
|
||||
blocking_countries
|
||||
);
|
||||
|
||||
// Data different from previous days:
|
||||
// 104 connections, 1 negative report
|
||||
date += 1;
|
||||
bridge_info.info_by_country.get_mut("ru").unwrap().add_info(
|
||||
BridgeInfoType::BridgeIps,
|
||||
date,
|
||||
104,
|
||||
);
|
||||
bridge_info.info_by_country.get_mut("ru").unwrap().add_info(
|
||||
BridgeInfoType::NegativeReports,
|
||||
date,
|
||||
1,
|
||||
);
|
||||
|
||||
// This should not be blocked even though it's very different because
|
||||
// it's different in the good direction.
|
||||
assert_eq!(
|
||||
blocked_in(&analyzer, &bridge_info, confidence, date),
|
||||
blocking_countries
|
||||
);
|
||||
|
||||
// Data different from previous days:
|
||||
// 0 connections, 0 negative reports
|
||||
date += 1;
|
||||
bridge_info.info_by_country.get_mut("ru").unwrap().add_info(
|
||||
BridgeInfoType::BridgeIps,
|
||||
date,
|
||||
0,
|
||||
);
|
||||
bridge_info.info_by_country.get_mut("ru").unwrap().add_info(
|
||||
BridgeInfoType::NegativeReports,
|
||||
date,
|
||||
0,
|
||||
);
|
||||
blocking_countries.insert("ru".to_string());
|
||||
|
||||
// This should be blocked because it's different in the bad direction.
|
||||
assert_eq!(
|
||||
blocked_in(&analyzer, &bridge_info, confidence, date),
|
||||
blocking_countries
|
||||
);
|
||||
}
|
||||
|
||||
// TODO: Test stage 3 analysis
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue