Compare commits

...

5 Commits

8 changed files with 505 additions and 80 deletions

View File

@ -21,6 +21,7 @@ hyper-util = { version = "0.1", features = ["full"] }
julianday = "1.2.0"
lazy_static = "1"
lox-library = { git = "https://gitlab.torproject.org/vecna/lox.git", version = "0.1.0" }
rand = { version = "0.8" }
#select = "0.6.0"
serde = "1.0.197"
serde_json = "1.0"
@ -34,4 +35,3 @@ tokio-cron = "0.1.2"
[dev-dependencies]
base64 = "0.21.7"
rand = "0.8.5"

View File

@ -6,6 +6,9 @@
"distributors": {
"Lox": "127.0.0.1:8002"
},
"extra_infos_base_url": "https://collector.torproject.org/recent/bridge-descriptors/extra-infos/",
"confidence": 0.95,
"port": 8003,
"require_bridge_token": false
"require_bridge_token": false,
"updater_schedule": "* * 22 * * * *"
}

View File

@ -3,14 +3,14 @@ use std::collections::HashSet;
/// Provides a function for predicting which countries block this bridge
pub trait Analyzer {
fn blocked_in(&self, bridge_info: &BridgeInfo) -> HashSet<String>;
fn blocked_in(&self, bridge_info: &BridgeInfo, confidence: f64) -> HashSet<String>;
}
pub struct ExampleAnalyzer {}
/// Dummy example which just tells us about blockages we already know about
impl Analyzer for ExampleAnalyzer {
fn blocked_in(&self, bridge_info: &BridgeInfo) -> HashSet<String> {
fn blocked_in(&self, bridge_info: &BridgeInfo, confidence: f64) -> HashSet<String> {
let mut blocked_in = HashSet::<String>::new();
for (country, info) in &bridge_info.info_by_country {
if info.blocked {

View File

@ -41,8 +41,11 @@ pub struct Config {
// map of distributor name to IP:port to contact it
pub distributors: BTreeMap<BridgeDistributor, String>,
extra_infos_base_url: String,
// confidence required to consider a bridge blocked
confidence: f64,
//require_bridge_token: bool,
port: u16,
updater_schedule: String,
}
#[derive(Debug, Deserialize)]
@ -63,13 +66,14 @@ async fn update_daily_info(
db: &Db,
distributors: &BTreeMap<BridgeDistributor, String>,
extra_infos_base_url: &str,
confidence: f64,
) {
update_extra_infos(&db, &extra_infos_base_url)
.await
.unwrap();
update_negative_reports(&db, &distributors).await;
update_positive_reports(&db, &distributors).await;
let new_blockages = guess_blockages(&db, &analyzer::ExampleAnalyzer {});
let new_blockages = guess_blockages(&db, &analyzer::ExampleAnalyzer {}, confidence);
report_blockages(&distributors, new_blockages).await;
}
@ -81,11 +85,12 @@ async fn create_context_manager(
db_config: DbConfig,
distributors: BTreeMap<BridgeDistributor, String>,
extra_infos_base_url: &str,
confidence: f64,
context_rx: mpsc::Receiver<Command>,
mut kill: broadcast::Receiver<()>,
) {
tokio::select! {
create_context = context_manager(db_config, distributors, extra_infos_base_url, context_rx) => create_context,
create_context = context_manager(db_config, distributors, extra_infos_base_url, confidence, context_rx) => create_context,
_ = kill.recv() => {println!("Shut down manager");},
}
}
@ -94,6 +99,7 @@ async fn context_manager(
db_config: DbConfig,
distributors: BTreeMap<BridgeDistributor, String>,
extra_infos_base_url: &str,
confidence: f64,
mut context_rx: mpsc::Receiver<Command>,
) {
let db: Db = sled::open(&db_config.db_path).unwrap();
@ -114,7 +120,7 @@ async fn context_manager(
println!("Shutdown Sent.");
}
Update {} => {
update_daily_info(&db, &distributors, &extra_infos_base_url).await;
update_daily_info(&db, &distributors, &extra_infos_base_url, confidence).await;
}
}
}
@ -135,13 +141,6 @@ enum Command {
#[tokio::main]
async fn main() {
// TODO: Currently, we're processing extra-infos here, but we want to:
// 2. Periodically (daily):
// a) download new extra-infos
// b) determine whether we think each bridge is blocked or not
// c) report these results to the LA
// 3. Store all our data
let args: Args = Args::parse();
let config: Config = serde_json::from_reader(BufReader::new(
@ -178,7 +177,7 @@ async fn main() {
let updater = spawn(async move {
// Run updater once per day
let mut sched = Scheduler::utc();
sched.add(Job::new("* * 22 * * * *", move || {
sched.add(Job::new(config.updater_schedule, move || {
run_updater(updater_tx.clone())
}));
});
@ -188,6 +187,7 @@ async fn main() {
config.db,
config.distributors,
&config.extra_infos_base_url,
config.confidence,
request_rx,
kill,
)

View File

@ -29,6 +29,9 @@ lazy_static! {
pub static ref COUNTRY_CODES: HashSet<&'static str> = HashSet::from(["??","ad","ae","af","ag","ai","al","am","ao","ap","aq","ar","as","at","au","aw","ax","az","ba","bb","bd","be","bf","bg","bh","bi","bj","bl","bm","bn","bo","bq","br","bs","bt","bv","bw","by","bz","ca","cc","cd","cf","cg","ch","ci","ck","cl","cm","cn","co","cr","cs","cu","cv","cw","cx","cy","cz","de","dj","dk","dm","do","dz","ec","ee","eg","eh","er","es","et","eu","fi","fj","fk","fm","fo","fr","ga","gb","gd","ge","gf","gg","gh","gi","gl","gm","gn","gp","gq","gr","gs","gt","gu","gw","gy","hk","hm","hn","hr","ht","hu","id","ie","il","im","in","io","iq","ir","is","it","je","jm","jo","jp","ke","kg","kh","ki","km","kn","kp","kr","kw","ky","kz","la","lb","lc","li","lk","lr","ls","lt","lu","lv","ly","ma","mc","md","me","mf","mg","mh","mk","ml","mm","mn","mo","mp","mq","mr","ms","mt","mu","mv","mw","mx","my","mz","na","nc","ne","nf","ng","ni","nl","no","np","nr","nu","nz","om","pa","pe","pf","pg","ph","pk","pl","pm","pn","pr","ps","pt","pw","py","qa","re","ro","rs","ru","rw","sa","sb","sc","sd","se","sg","sh","si","sj","sk","sl","sm","sn","so","sr","ss","st","sv","sx","sy","sz","tc","td","tf","tg","th","tj","tk","tl","tm","tn","to","tr","tt","tv","tw","tz","ua","ug","um","us","uy","uz","va","vc","ve","vg","vi","vn","vu","wf","ws","ye","yt","za","zm","zw"]);
}
/// We will accept reports up to this many days old.
pub const MAX_BACKDATE: u32 = 3;
/// Get Julian date
pub fn get_date() -> u32 {
time::OffsetDateTime::now_utc()
@ -315,14 +318,25 @@ pub async fn update_extra_infos(
// Process negative reports
/// Negative reports can be deduplicated, so we store to-be-processed
/// negative reports as a map of [report] to [count of report]. Add this
/// NR to that map (or create a new map if necessary).
/// We store to-be-processed negative reports as a vector. Add this NR
/// to that vector (or create a new vector if necessary)
pub fn save_negative_report_to_process(db: &Db, nr: NegativeReport) {
// We serialize the negative reports as strings to use them as map keys.
// TODO: Purge these database entries sometimes
let mut nonces = match db.get(format!("nonces_{}", &nr.date)).unwrap() {
Some(v) => bincode::deserialize(&v).unwrap(),
None => HashSet::<[u8; 32]>::new(),
};
// Just ignore the report if we've seen the nonce before
if nonces.insert(nr.nonce) {
db.insert(
format!("nonces_{}", &nr.date),
bincode::serialize(&nonces).unwrap(),
)
.unwrap();
let mut reports = match db.get("nrs-to-process").unwrap() {
Some(v) => bincode::deserialize(&v).unwrap(),
None => BTreeMap::<String, BTreeMap<String, u32>>::new(),
None => BTreeMap::<String, Vec<SerializableNegativeReport>>::new(),
};
// Store to-be-processed reports with key [fingerprint]_[country]_[date]
let map_key = format!(
@ -331,23 +345,20 @@ pub fn save_negative_report_to_process(db: &Db, nr: NegativeReport) {
&nr.country,
&nr.date,
);
let serialized_nr = nr.to_json();
if reports.contains_key(&map_key) {
let nr_map = reports.get_mut(&map_key).unwrap();
if nr_map.contains_key(&serialized_nr) {
let prev_count = nr_map.get(&serialized_nr).unwrap();
nr_map.insert(serialized_nr, prev_count + 1);
reports
.get_mut(&map_key)
.unwrap()
.push(nr.to_serializable_report());
} else {
nr_map.insert(serialized_nr, 1);
}
} else {
let mut nr_map = BTreeMap::<String, u32>::new();
nr_map.insert(serialized_nr, 1);
reports.insert(map_key, nr_map);
let mut nrs = Vec::<SerializableNegativeReport>::new();
nrs.push(nr.to_serializable_report());
reports.insert(map_key, nrs);
}
// Commit changes to database
db.insert("nrs-to-process", bincode::serialize(&reports).unwrap())
.unwrap();
}
}
/// Sends a collection of negative reports to the Lox Authority and returns the
@ -356,15 +367,14 @@ pub fn save_negative_report_to_process(db: &Db, nr: NegativeReport) {
/// distributor.
pub async fn verify_negative_reports(
distributors: &BTreeMap<BridgeDistributor, String>,
reports: &BTreeMap<String, u32>,
reports: &Vec<SerializableNegativeReport>,
) -> u32 {
// Don't make a network call if we don't have any reports anyway
if reports.is_empty() {
return 0;
}
// Get one report, assume the rest have the same distributor
let first_report: SerializableNegativeReport =
serde_json::from_str(reports.first_key_value().unwrap().0).unwrap();
let first_report = &reports[0];
let distributor = first_report.distributor;
let client = Client::new();
let uri: String = (distributors.get(&distributor).unwrap().to_owned() + "/verifynegative")
@ -385,17 +395,16 @@ pub async fn verify_negative_reports(
pub async fn update_negative_reports(db: &Db, distributors: &BTreeMap<BridgeDistributor, String>) {
let mut all_negative_reports = match db.get("nrs-to-process").unwrap() {
Some(v) => bincode::deserialize(&v).unwrap(),
None => BTreeMap::<String, BTreeMap<String, u32>>::new(),
None => BTreeMap::<String, Vec<SerializableNegativeReport>>::new(),
};
// Key is [fingerprint]_[country]_[date]
for bridge_country_date in all_negative_reports.keys() {
let reports = all_negative_reports.get(bridge_country_date).unwrap();
if !reports.is_empty() {
let first_report: SerializableNegativeReport =
serde_json::from_str(reports.first_key_value().unwrap().0).unwrap();
let first_report = &reports[0];
let fingerprint = first_report.fingerprint;
let date = first_report.date;
let country = first_report.country;
let country = first_report.country.clone();
let count_valid = verify_negative_reports(&distributors, reports).await;
// Get bridge info or make new one
@ -421,7 +430,6 @@ pub async fn update_negative_reports(db: &Db, distributors: &BTreeMap<BridgeDist
.info_by_country
.insert(country, bridge_country_info);
}
// Commit changes to database
db.insert(fingerprint, bincode::serialize(&bridge_info).unwrap())
.unwrap();
@ -556,7 +564,11 @@ pub async fn update_positive_reports(db: &Db, distributors: &BTreeMap<BridgeDist
/// Guess which countries block a bridge. This function returns a map of new
/// blockages (fingerprint : set of countries which block the bridge)
pub fn guess_blockages(db: &Db, analyzer: &dyn Analyzer) -> HashMap<[u8; 20], HashSet<String>> {
pub fn guess_blockages(
db: &Db,
analyzer: &dyn Analyzer,
confidence: f64,
) -> HashMap<[u8; 20], HashSet<String>> {
// Map of bridge fingerprint to set of countries which newly block it
let mut blockages = HashMap::<[u8; 20], HashSet<String>>::new();
@ -571,7 +583,7 @@ pub fn guess_blockages(db: &Db, analyzer: &dyn Analyzer) -> HashMap<[u8; 20], Ha
let mut bridge_info: BridgeInfo =
bincode::deserialize(&db.get(fingerprint).unwrap().unwrap()).unwrap();
let mut new_blockages = HashSet::<String>::new();
let blocked_in = analyzer.blocked_in(&bridge_info);
let blocked_in = analyzer.blocked_in(&bridge_info, confidence);
for country in blocked_in {
let bridge_country_info = bridge_info.info_by_country.get_mut(&country).unwrap();
if !bridge_country_info.blocked {

View File

@ -1,10 +1,11 @@
use crate::{
bridge_verification_info::BridgeVerificationInfo, get_date, BridgeDistributor, COUNTRY_CODES,
MAX_BACKDATE,
};
use curve25519_dalek::scalar::Scalar;
use lox_library::{bridge_table::BridgeLine, cred::Lox};
use rand::RngCore;
use serde::{Deserialize, Serialize};
use sha1::{Digest, Sha1};
use sha3::Sha3_256;
@ -12,13 +13,14 @@ use sha3::Sha3_256;
#[derive(Debug, Serialize)]
pub enum NegativeReportError {
DateInFuture,
DateInPast, // report is more than MAX_BACKDATE days old
FailedToDeserialize, // couldn't deserialize to SerializableNegativeReport
InvalidCountryCode,
MissingCountryCode,
}
/// A report that the user was unable to connect to the bridge
#[derive(Eq, PartialEq, Ord, PartialOrd)]
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd)]
pub struct NegativeReport {
/// hashed fingerprint (SHA-1 hash of 20-byte bridge ID)
pub fingerprint: [u8; 20],
@ -32,6 +34,9 @@ pub struct NegativeReport {
/// today's Julian date
pub date: u32,
/// a random nonce used in the bridge_pok
pub nonce: [u8; 32],
/// the bridge distributor, e.g., Lox, Https, or Moat
pub distributor: BridgeDistributor,
}
@ -42,6 +47,7 @@ impl NegativeReport {
bridge_pok: ProofOfBridgeKnowledge,
country: String,
date: u32,
nonce: [u8; 32],
distributor: BridgeDistributor,
) -> Self {
let mut hasher = Sha1::new();
@ -52,6 +58,7 @@ impl NegativeReport {
bridge_pok,
country,
date,
nonce,
distributor,
}
}
@ -62,24 +69,42 @@ impl NegativeReport {
distributor: BridgeDistributor,
) -> Self {
let date = get_date();
let bridge_pok =
ProofOfBridgeKnowledge::HashOfBridgeLine(HashOfBridgeLine::new(&bridgeline, date));
NegativeReport::new(
let mut rng = rand::thread_rng();
let mut nonce = [0; 32];
rng.fill_bytes(&mut nonce);
let bridge_pok = ProofOfBridgeKnowledge::HashOfBridgeLine(HashOfBridgeLine::new(
&bridgeline,
date,
nonce,
));
Self::new(
bridgeline.fingerprint,
bridge_pok,
country,
date,
nonce,
distributor,
)
}
pub fn from_lox_bucket(bridge_id: [u8; 20], bucket: Scalar, country: String) -> Self {
let date = get_date();
let bridge_pok = ProofOfBridgeKnowledge::HashOfBucket(HashOfBucket::new(&bucket, date));
NegativeReport::new(bridge_id, bridge_pok, country, date, BridgeDistributor::Lox)
let mut rng = rand::thread_rng();
let mut nonce = [0; 32];
rng.fill_bytes(&mut nonce);
let bridge_pok =
ProofOfBridgeKnowledge::HashOfBucket(HashOfBucket::new(&bucket, date, nonce));
Self::new(
bridge_id,
bridge_pok,
country,
date,
nonce,
BridgeDistributor::Lox,
)
}
pub fn from_lox_credential(bridge_id: [u8; 20], cred: Lox, country: String) -> Self {
pub fn from_lox_credential(bridge_id: [u8; 20], cred: &Lox, country: String) -> Self {
NegativeReport::from_lox_bucket(bridge_id, cred.bucket, country)
}
@ -90,6 +115,7 @@ impl NegativeReport {
bridge_pok: self.bridge_pok,
country: self.country,
date: self.date,
nonce: self.nonce,
distributor: self.distributor,
}
}
@ -119,12 +145,12 @@ impl NegativeReport {
pub fn verify(self, bridge_info: &BridgeVerificationInfo) -> bool {
match self.bridge_pok {
ProofOfBridgeKnowledge::HashOfBridgeLine(pok) => {
let hash = HashOfBridgeLine::new(&bridge_info.bridge_line, self.date);
let hash = HashOfBridgeLine::new(&bridge_info.bridge_line, self.date, self.nonce);
hash == pok
}
ProofOfBridgeKnowledge::HashOfBucket(pok) => {
for b in &bridge_info.buckets {
let hash = HashOfBucket::new(&b, self.date);
let hash = HashOfBucket::new(&b, self.date, self.nonce);
if hash == pok {
return true;
}
@ -143,6 +169,7 @@ pub struct SerializableNegativeReport {
bridge_pok: ProofOfBridgeKnowledge,
pub country: String,
pub date: u32,
pub nonce: [u8; 32],
pub distributor: BridgeDistributor,
}
@ -154,21 +181,26 @@ impl SerializableNegativeReport {
if !COUNTRY_CODES.contains(self.country.as_str()) {
return Err(NegativeReportError::InvalidCountryCode);
}
if self.date > get_date().into() {
let date = get_date();
if self.date > date {
return Err(NegativeReportError::DateInFuture);
}
if self.date < date - MAX_BACKDATE {
return Err(NegativeReportError::DateInPast);
}
Ok(NegativeReport {
fingerprint: self.fingerprint,
bridge_pok: self.bridge_pok,
country: self.country.to_string(),
date: self.date.try_into().unwrap(),
nonce: self.nonce,
distributor: self.distributor,
})
}
}
/// Proof that the user knows (and should be able to access) a given bridge
#[derive(Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)]
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)]
pub enum ProofOfBridgeKnowledge {
/// Hash of bridge line as proof of knowledge of bridge line
HashOfBridgeLine(HashOfBridgeLine),
@ -178,15 +210,16 @@ pub enum ProofOfBridgeKnowledge {
}
/// Hash of bridge line to prove knowledge of that bridge
#[derive(Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)]
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)]
pub struct HashOfBridgeLine {
hash: [u8; 32],
}
impl HashOfBridgeLine {
pub fn new(bl: &BridgeLine, date: u32) -> Self {
pub fn new(bl: &BridgeLine, date: u32, nonce: [u8; 32]) -> Self {
let mut hasher = Sha3_256::new();
hasher.update(date.to_le_bytes());
hasher.update(nonce);
hasher.update(bincode::serialize(&bl).unwrap());
let hash: [u8; 32] = hasher.finalize().into();
Self { hash }
@ -194,15 +227,16 @@ impl HashOfBridgeLine {
}
/// Hash of bucket ID to prove knowledge of bridges in that bucket
#[derive(Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)]
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)]
pub struct HashOfBucket {
hash: [u8; 32],
}
impl HashOfBucket {
pub fn new(bucket: &Scalar, date: u32) -> Self {
pub fn new(bucket: &Scalar, date: u32, nonce: [u8; 32]) -> Self {
let mut hasher = Sha3_256::new();
hasher.update(date.to_le_bytes());
hasher.update(nonce);
hasher.update(bucket.to_bytes());
let hash: [u8; 32] = hasher.finalize().into();
Self { hash }

View File

@ -1,7 +1,9 @@
// For Lox-related code where points are uppercase and scalars are lowercase
#![allow(non_snake_case)]
use crate::{bridge_verification_info::BridgeVerificationInfo, get_date, COUNTRY_CODES};
use crate::{
bridge_verification_info::BridgeVerificationInfo, get_date, COUNTRY_CODES, MAX_BACKDATE,
};
use curve25519_dalek::ristretto::RistrettoBasepointTable;
use ed25519_dalek::{Signature, Signer, SigningKey, Verifier};
@ -15,6 +17,7 @@ pub const REQUIRE_BRIDGE_TOKEN: bool = false;
#[derive(Debug, Serialize)]
pub enum PositiveReportError {
DateInFuture,
DateInPast, // report is more than MAX_BACKDATE days old
FailedToDeserialize, // couldn't deserialize to SerializablePositiveReport
InvalidBridgeToken,
InvalidCountryCode,
@ -180,10 +183,13 @@ impl SerializablePositiveReport {
if !COUNTRY_CODES.contains(self.country.as_str()) {
return Err(PositiveReportError::InvalidCountryCode);
}
let date: u32 = get_date().into();
let date: u32 = get_date();
if self.date > date {
return Err(PositiveReportError::DateInFuture);
}
if self.date < date - MAX_BACKDATE {
return Err(PositiveReportError::DateInPast);
}
if self.lox_proof.date != date {
return Err(PositiveReportError::InvalidLoxProof);
}

View File

@ -210,12 +210,64 @@ fn test_negative_reports() {
let report_2 =
NegativeReport::from_lox_bucket(bridges[1].fingerprint, cred.bucket, "ru".to_string());
let report_3 =
NegativeReport::from_lox_credential(bridges[2].fingerprint, cred, "ru".to_string());
NegativeReport::from_lox_credential(bridges[2].fingerprint, &cred, "ru".to_string());
// Backdated reports
let date = get_date();
let mut rng = rand::thread_rng();
let mut nonce = [0; 32];
rng.fill_bytes(&mut nonce);
let report_4 = NegativeReport::new(
bridges[0].fingerprint,
ProofOfBridgeKnowledge::HashOfBridgeLine(HashOfBridgeLine::new(
&bridges[0],
date - 1,
nonce,
)),
"ru".to_string(),
date - 1,
nonce,
BridgeDistributor::Lox,
);
let mut nonce = [0; 32];
rng.fill_bytes(&mut nonce);
let report_5 = NegativeReport::new(
bridges[1].fingerprint,
ProofOfBridgeKnowledge::HashOfBridgeLine(HashOfBridgeLine::new(
&bridges[1],
date - 2,
nonce,
)),
"ru".to_string(),
date - 2,
nonce,
BridgeDistributor::Lox,
);
let mut nonce = [0; 32];
rng.fill_bytes(&mut nonce);
let report_6 = NegativeReport::new(
bridges[2].fingerprint,
ProofOfBridgeKnowledge::HashOfBridgeLine(HashOfBridgeLine::new(
&bridges[2],
date - 3,
nonce,
)),
"ru".to_string(),
date - 3,
nonce,
BridgeDistributor::Lox,
);
// Verify reports
assert!(report_1.verify(&bridge_info_1));
assert!(report_2.verify(&bridge_info_2));
assert!(report_3.verify(&bridge_info_3));
assert!(report_4.verify(&bridge_info_1));
assert!(report_5.verify(&bridge_info_2));
assert!(report_6.verify(&bridge_info_3));
// Check that deserialization fails under invalid conditions
@ -225,41 +277,287 @@ fn test_negative_reports() {
.to_serializable_report();
invalid_report_1.date = invalid_report_1.date + 2;
// Date too far in past
let mut invalid_report_2 =
NegativeReport::from_bridgeline(bridges[1], "ru".to_string(), BridgeDistributor::Lox)
.to_serializable_report();
invalid_report_2.date = invalid_report_2.date - MAX_BACKDATE - 1;
// Invalid country code
let invalid_report_2 =
NegativeReport::from_bridgeline(bridges[1], "xx".to_string(), BridgeDistributor::Lox)
let invalid_report_3 =
NegativeReport::from_bridgeline(bridges[2], "xx".to_string(), BridgeDistributor::Lox)
.to_serializable_report();
assert!(invalid_report_1.to_report().is_err());
assert!(invalid_report_2.to_report().is_err());
assert!(invalid_report_3.to_report().is_err());
// Check that verification fails with incorrect data
let date = get_date();
let mut rng = rand::thread_rng();
// Incorrect BridgeLine hash
let invalid_report_3 = NegativeReport::new(
let mut nonce = [0; 32];
rng.fill_bytes(&mut nonce);
let invalid_report_4 = NegativeReport::new(
bridges[0].fingerprint,
ProofOfBridgeKnowledge::HashOfBridgeLine(HashOfBridgeLine::new(
&BridgeLine::default(),
date,
nonce,
)),
"ru".to_string(),
date,
nonce,
BridgeDistributor::Lox,
);
// Incorrect bucket hash
let invalid_report_4 = NegativeReport::new(
let mut nonce = [0; 32];
rng.fill_bytes(&mut nonce);
let invalid_report_5 = NegativeReport::new(
bridges[1].fingerprint,
ProofOfBridgeKnowledge::HashOfBucket(HashOfBucket::new(&Scalar::ZERO, date)),
ProofOfBridgeKnowledge::HashOfBucket(HashOfBucket::new(&Scalar::ZERO, date, nonce)),
"ru".to_string(),
date,
nonce,
BridgeDistributor::Lox,
);
assert!(!invalid_report_3.verify(&bridge_info_1));
assert!(!invalid_report_4.verify(&bridge_info_2));
assert!(!invalid_report_4.verify(&bridge_info_1));
assert!(!invalid_report_5.verify(&bridge_info_2));
// Test that reports with duplicate nonces are rejected
// Open test database
let db: Db = sled::open("test_db").unwrap();
// Delete all data in test DB
db.clear().unwrap();
assert!(!db.contains_key("nrs-to-process").unwrap());
let mut nonce = [0; 32];
rng.fill_bytes(&mut nonce);
// A valid report
let valid_report_1 = NegativeReport::new(
bridges[0].fingerprint,
ProofOfBridgeKnowledge::HashOfBridgeLine(HashOfBridgeLine::new(&bridges[0], date, nonce)),
"ru".to_string(),
date,
nonce,
BridgeDistributor::Lox,
);
// Report which reuses this nonce
let invalid_report_1 = NegativeReport::new(
bridges[0].fingerprint,
ProofOfBridgeKnowledge::HashOfBridgeLine(HashOfBridgeLine::new(&bridges[0], date, nonce)),
"ru".to_string(),
date,
nonce,
BridgeDistributor::Lox,
);
// This is the same report
assert_eq!(valid_report_1, invalid_report_1);
// Report which reuses this nonce for a different bridge
let invalid_report_2 = NegativeReport::new(
bridges[1].fingerprint,
ProofOfBridgeKnowledge::HashOfBridgeLine(HashOfBridgeLine::new(&bridges[1], date, nonce)),
"ru".to_string(),
date,
nonce,
BridgeDistributor::Lox,
);
// Report which uses this nonce but on a different day
let valid_report_2 = NegativeReport::new(
bridges[0].fingerprint,
ProofOfBridgeKnowledge::HashOfBridgeLine(HashOfBridgeLine::new(
&bridges[0],
date - 1,
nonce,
)),
"ru".to_string(),
date - 1,
nonce,
BridgeDistributor::Lox,
);
// Report with different nonce
let mut nonce = [0; 32];
rng.fill_bytes(&mut nonce);
let valid_report_3 = NegativeReport::new(
bridges[0].fingerprint,
ProofOfBridgeKnowledge::HashOfBridgeLine(HashOfBridgeLine::new(&bridges[0], date, nonce)),
"ru".to_string(),
date,
nonce,
BridgeDistributor::Lox,
);
let map_key_1 = format!(
"{}_{}_{}",
array_bytes::bytes2hex("", valid_report_1.fingerprint),
"ru".to_string(),
date
);
save_negative_report_to_process(&db, valid_report_1);
let nrs_to_process: BTreeMap<String, Vec<SerializableNegativeReport>> =
bincode::deserialize(&db.get("nrs-to-process").unwrap().unwrap()).unwrap();
let negative_reports = nrs_to_process.get(&map_key_1).unwrap();
assert_eq!(negative_reports.len(), 1);
save_negative_report_to_process(&db, invalid_report_1); // no change
let nrs_to_process: BTreeMap<String, Vec<SerializableNegativeReport>> =
bincode::deserialize(&db.get("nrs-to-process").unwrap().unwrap()).unwrap();
let negative_reports = nrs_to_process.get(&map_key_1).unwrap();
assert_eq!(negative_reports.len(), 1);
let map_key_2 = format!(
"{}_{}_{}",
array_bytes::bytes2hex("", invalid_report_2.fingerprint),
"ru".to_string(),
date
);
save_negative_report_to_process(&db, invalid_report_2); // no change
let nrs_to_process: BTreeMap<String, Vec<SerializableNegativeReport>> =
bincode::deserialize(&db.get("nrs-to-process").unwrap().unwrap()).unwrap();
assert!(!nrs_to_process.contains_key(&map_key_2));
let map_key_3 = format!(
"{}_{}_{}",
array_bytes::bytes2hex("", valid_report_2.fingerprint),
"ru".to_string(),
date - 1
);
save_negative_report_to_process(&db, valid_report_2);
let nrs_to_process: BTreeMap<String, Vec<SerializableNegativeReport>> =
bincode::deserialize(&db.get("nrs-to-process").unwrap().unwrap()).unwrap();
let negative_reports = nrs_to_process.get(&map_key_3).unwrap();
assert_eq!(negative_reports.len(), 1);
save_negative_report_to_process(&db, valid_report_3);
let nrs_to_process: BTreeMap<String, Vec<SerializableNegativeReport>> =
bincode::deserialize(&db.get("nrs-to-process").unwrap().unwrap()).unwrap();
let negative_reports = nrs_to_process.get(&map_key_1).unwrap();
assert_eq!(negative_reports.len(), 2);
// Same tests, but use hash of bucket
// Delete all data in test DB
db.clear().unwrap();
assert!(!db.contains_key("nrs-to-process").unwrap());
let mut nonce = [0; 32];
rng.fill_bytes(&mut nonce);
// A valid report
let valid_report_1 = NegativeReport::new(
bridges[0].fingerprint,
ProofOfBridgeKnowledge::HashOfBucket(HashOfBucket::new(&cred.bucket, date, nonce)),
"ru".to_string(),
date,
nonce,
BridgeDistributor::Lox,
);
// Report which reuses this nonce
let invalid_report_1 = NegativeReport::new(
bridges[0].fingerprint,
ProofOfBridgeKnowledge::HashOfBucket(HashOfBucket::new(&cred.bucket, date, nonce)),
"ru".to_string(),
date,
nonce,
BridgeDistributor::Lox,
);
// This is the same report
assert_eq!(valid_report_1, invalid_report_1);
// Report which reuses this nonce for a different bridge
let invalid_report_2 = NegativeReport::new(
bridges[1].fingerprint,
ProofOfBridgeKnowledge::HashOfBucket(HashOfBucket::new(&cred.bucket, date, nonce)),
"ru".to_string(),
date,
nonce,
BridgeDistributor::Lox,
);
// Report which uses this nonce but on a different day
let valid_report_2 = NegativeReport::new(
bridges[0].fingerprint,
ProofOfBridgeKnowledge::HashOfBucket(HashOfBucket::new(&cred.bucket, date - 1, nonce)),
"ru".to_string(),
date - 1,
nonce,
BridgeDistributor::Lox,
);
// Report with different nonce
let mut nonce = [0; 32];
rng.fill_bytes(&mut nonce);
let valid_report_3 = NegativeReport::new(
bridges[0].fingerprint,
ProofOfBridgeKnowledge::HashOfBucket(HashOfBucket::new(&cred.bucket, date, nonce)),
"ru".to_string(),
date,
nonce,
BridgeDistributor::Lox,
);
let map_key_1 = format!(
"{}_{}_{}",
array_bytes::bytes2hex("", valid_report_1.fingerprint),
"ru".to_string(),
date
);
save_negative_report_to_process(&db, valid_report_1);
let nrs_to_process: BTreeMap<String, Vec<SerializableNegativeReport>> =
bincode::deserialize(&db.get("nrs-to-process").unwrap().unwrap()).unwrap();
let negative_reports = nrs_to_process.get(&map_key_1).unwrap();
assert_eq!(negative_reports.len(), 1);
save_negative_report_to_process(&db, invalid_report_1); // no change
let nrs_to_process: BTreeMap<String, Vec<SerializableNegativeReport>> =
bincode::deserialize(&db.get("nrs-to-process").unwrap().unwrap()).unwrap();
let negative_reports = nrs_to_process.get(&map_key_1).unwrap();
assert_eq!(negative_reports.len(), 1);
let map_key_2 = format!(
"{}_{}_{}",
array_bytes::bytes2hex("", invalid_report_2.fingerprint),
"ru".to_string(),
date
);
save_negative_report_to_process(&db, invalid_report_2); // no change
let nrs_to_process: BTreeMap<String, Vec<SerializableNegativeReport>> =
bincode::deserialize(&db.get("nrs-to-process").unwrap().unwrap()).unwrap();
assert!(!nrs_to_process.contains_key(&map_key_2));
let map_key_3 = format!(
"{}_{}_{}",
array_bytes::bytes2hex("", valid_report_2.fingerprint),
"ru".to_string(),
date - 1
);
save_negative_report_to_process(&db, valid_report_2);
let nrs_to_process: BTreeMap<String, Vec<SerializableNegativeReport>> =
bincode::deserialize(&db.get("nrs-to-process").unwrap().unwrap()).unwrap();
let negative_reports = nrs_to_process.get(&map_key_3).unwrap();
assert_eq!(negative_reports.len(), 1);
save_negative_report_to_process(&db, valid_report_3);
let nrs_to_process: BTreeMap<String, Vec<SerializableNegativeReport>> =
bincode::deserialize(&db.get("nrs-to-process").unwrap().unwrap()).unwrap();
let negative_reports = nrs_to_process.get(&map_key_1).unwrap();
assert_eq!(negative_reports.len(), 2);
}
#[test]
@ -402,4 +700,76 @@ fn test_positive_reports() {
assert!(invalid_report_4.to_report().is_err());
assert!(invalid_report_5.to_report().is_err());
// Test storing to-be-processed positive reports to database
// Create reports
let report_1 = PositiveReport::from_lox_credential(
bridges[0].fingerprint,
None,
&cred,
&th.ba.lox_pub,
"ru".to_string(),
)
.unwrap();
let report_2 = PositiveReport::from_lox_credential(
bridges[0].fingerprint,
None,
&cred,
&th.ba.lox_pub,
"ru".to_string(),
)
.unwrap();
let report_3 = PositiveReport::from_lox_credential(
bridges[1].fingerprint,
None,
&cred,
&th.ba.lox_pub,
"ru".to_string(),
)
.unwrap();
// Open test database
let db: Db = sled::open("test_db").unwrap();
// Delete all data in test DB
db.clear().unwrap();
assert!(!db.contains_key("prs-to-process").unwrap());
let map_key_1 = format!(
"{}_{}_{}",
array_bytes::bytes2hex("", report_1.fingerprint),
&report_1.country,
&report_1.date
);
let map_key_2 = format!(
"{}_{}_{}",
array_bytes::bytes2hex("", report_3.fingerprint),
&report_3.country,
&report_3.date
);
save_positive_report_to_process(&db, report_1);
let prs_to_process: BTreeMap<String, Vec<SerializablePositiveReport>> =
bincode::deserialize(&db.get("prs-to-process").unwrap().unwrap()).unwrap();
let positive_reports = prs_to_process.get(&map_key_1).unwrap();
assert_eq!(positive_reports.len(), 1);
assert!(!prs_to_process.contains_key(&map_key_2));
save_positive_report_to_process(&db, report_2);
let prs_to_process: BTreeMap<String, Vec<SerializablePositiveReport>> =
bincode::deserialize(&db.get("prs-to-process").unwrap().unwrap()).unwrap();
let positive_reports = prs_to_process.get(&map_key_1).unwrap();
assert_eq!(positive_reports.len(), 2);
assert!(!prs_to_process.contains_key(&map_key_2));
save_positive_report_to_process(&db, report_3);
let prs_to_process: BTreeMap<String, Vec<SerializablePositiveReport>> =
bincode::deserialize(&db.get("prs-to-process").unwrap().unwrap()).unwrap();
// Check that this has not changed
let positive_reports = prs_to_process.get(&map_key_1).unwrap();
assert_eq!(positive_reports.len(), 2);
// New report added to its own collection
let positive_reports = prs_to_process.get(&map_key_2).unwrap();
assert_eq!(positive_reports.len(), 1);
}