Compare commits
4 Commits
58ab310a40
...
1ccd676e5c
Author | SHA1 | Date |
---|---|---|
|
1ccd676e5c | |
|
12e699f979 | |
|
3ef9c04141 | |
|
8ea6940397 |
|
@ -12,17 +12,19 @@ chrono = "0.4"
|
|||
clap = { version = "4.4.14", features = ["derive"] }
|
||||
curve25519-dalek = { version = "4", default-features = false, features = ["serde", "rand_core", "digest"] }
|
||||
ed25519-dalek = { version = "2", features = ["serde", "rand_core"] }
|
||||
futures = "0.3.30"
|
||||
http = "1"
|
||||
http-body-util = "0.1"
|
||||
hyper = { version = "1", features = ["full"] }
|
||||
hyper = { version = "0.14.28", features = ["full"] }
|
||||
hyper-rustls = "0.26.0"
|
||||
hyper-util = { version = "0.1", features = ["full"] }
|
||||
julianday = "1.2.0"
|
||||
lazy_static = "1"
|
||||
lox-library = { git = "https://gitlab.torproject.org/vecna/lox.git", version = "0.1.0" }
|
||||
select = "0.6.0"
|
||||
serde = "1.0.195"
|
||||
serde = "1.0.197"
|
||||
serde_json = "1.0"
|
||||
serde_with = {version = "3.5.0", features = ["json"]}
|
||||
serde_with = {version = "3.7.0", features = ["json"]}
|
||||
sha1 = "0.10"
|
||||
sha3 = "0.10"
|
||||
sled = "0.34.7"
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
"db": {
|
||||
"db_path": "server_db"
|
||||
|
||||
},
|
||||
"distributors": {
|
||||
"Lox": "127.0.0.1:8002"
|
||||
},
|
||||
"port": 8003,
|
||||
"require_bridge_token": false
|
||||
}
|
|
@ -0,0 +1,22 @@
|
|||
use crate::BridgeInfo;
|
||||
use std::collections::HashSet;
|
||||
|
||||
/// Provides a function for predicting which countries block this bridge
|
||||
pub trait Analyzer {
|
||||
fn blocked_in(&self, bridge_info: &BridgeInfo) -> HashSet<String>;
|
||||
}
|
||||
|
||||
pub struct ExampleAnalyzer {}
|
||||
|
||||
/// Dummy example which just tells us about blockages we already know about
|
||||
impl Analyzer for ExampleAnalyzer {
|
||||
fn blocked_in(&self, bridge_info: &BridgeInfo) -> HashSet<String> {
|
||||
let mut blocked_in = HashSet::<String>::new();
|
||||
for (country, info) in &bridge_info.info_by_country {
|
||||
if info.blocked {
|
||||
blocked_in.insert(country.to_string());
|
||||
}
|
||||
}
|
||||
blocked_in
|
||||
}
|
||||
}
|
|
@ -1,49 +1,180 @@
|
|||
use troll_patrol::{
|
||||
extra_info::{self, ExtraInfo},
|
||||
//negative_report::SerializableNegativeReport,
|
||||
//positive_report::SerializablePositiveReport,
|
||||
*,
|
||||
};
|
||||
use troll_patrol::{request_handler::handle, *};
|
||||
|
||||
use clap::Parser;
|
||||
use futures::future;
|
||||
use hyper::{
|
||||
server::conn::AddrStream,
|
||||
service::{make_service_fn, service_fn},
|
||||
Body, Request, Response, Server,
|
||||
};
|
||||
use serde::Deserialize;
|
||||
use sled::Db;
|
||||
use std::{collections::HashSet, path::PathBuf};
|
||||
use std::{
|
||||
collections::BTreeMap, convert::Infallible, fs::File, io::BufReader, net::SocketAddr,
|
||||
path::PathBuf, time::Duration,
|
||||
};
|
||||
use tokio::{
|
||||
signal, spawn,
|
||||
sync::{broadcast, mpsc, oneshot},
|
||||
time::sleep,
|
||||
};
|
||||
|
||||
async fn shutdown_signal() {
|
||||
tokio::signal::ctrl_c()
|
||||
.await
|
||||
.expect("failed to listen for ctrl+c signal");
|
||||
println!("Shut down Troll Patrol Server");
|
||||
}
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(author, version, about, long_about = None)]
|
||||
struct Args {
|
||||
/// Name/path of the configuration file
|
||||
#[arg(short, long, default_value = "config.json")]
|
||||
config: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct Config {
|
||||
pub db: DbConfig,
|
||||
// map of distributor name to IP:port to contact it
|
||||
pub distributors: BTreeMap<BridgeDistributor, String>,
|
||||
//require_bridge_token: bool,
|
||||
port: u16,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct DbConfig {
|
||||
// The path for the server database, default is "server_db"
|
||||
pub db_path: String,
|
||||
}
|
||||
|
||||
impl Default for DbConfig {
|
||||
fn default() -> DbConfig {
|
||||
DbConfig {
|
||||
db_path: "server_db".to_owned(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn update_daily_info(db: &Db, distributors: &BTreeMap<BridgeDistributor, String>) {
|
||||
update_extra_infos(&db).await;
|
||||
update_negative_reports(&db, &distributors).await;
|
||||
update_positive_reports(&db, &distributors).await;
|
||||
let new_blockages = guess_blockages(&db, &analyzer::ExampleAnalyzer {});
|
||||
report_blockages(&distributors, new_blockages).await;
|
||||
}
|
||||
|
||||
async fn create_context_manager(
|
||||
db_config: DbConfig,
|
||||
context_rx: mpsc::Receiver<Command>,
|
||||
mut kill: broadcast::Receiver<()>,
|
||||
) {
|
||||
tokio::select! {
|
||||
create_context = context_manager(db_config, context_rx) => create_context,
|
||||
_ = kill.recv() => {println!("Shut down manager");},
|
||||
}
|
||||
}
|
||||
|
||||
async fn context_manager(db_config: DbConfig, mut context_rx: mpsc::Receiver<Command>) {
|
||||
let db: Db = sled::open(&db_config.db_path).unwrap();
|
||||
|
||||
while let Some(cmd) = context_rx.recv().await {
|
||||
use Command::*;
|
||||
match cmd {
|
||||
Request { req, sender } => {
|
||||
let response = handle(&db, req).await;
|
||||
if let Err(e) = sender.send(response) {
|
||||
eprintln!("Server Response Error: {:?}", e);
|
||||
};
|
||||
sleep(Duration::from_millis(1)).await;
|
||||
}
|
||||
Shutdown { shutdown_sig } => {
|
||||
println!("Sending Shutdown Signal, all threads should shutdown.");
|
||||
drop(shutdown_sig);
|
||||
println!("Shutdown Sent.");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Each of the commands that can be handled
|
||||
#[derive(Debug)]
|
||||
enum Command {
|
||||
Request {
|
||||
req: Request<Body>,
|
||||
sender: oneshot::Sender<Result<Response<Body>, Infallible>>,
|
||||
},
|
||||
Shutdown {
|
||||
shutdown_sig: broadcast::Sender<()>,
|
||||
},
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
// TODO: Currently, we're processing extra-infos here, but we want to:
|
||||
// 1. Run a server to accept incoming reports
|
||||
// 2. Periodically (daily):
|
||||
// a) download new extra-infos
|
||||
// b) determine whether we think each bridge is blocked or not
|
||||
// c) report these results to the LA
|
||||
// 3. Store all our data
|
||||
|
||||
let db: Db = sled::open(&CONFIG.db.db_path).unwrap();
|
||||
let args: Args = Args::parse();
|
||||
|
||||
// Track which files have been processed. This is slightly redundant
|
||||
// because we're only downloading files we don't already have, but it
|
||||
// might be a good idea to check in case we downloaded a file but didn't
|
||||
// process it for some reason.
|
||||
let mut processed_extra_infos_files = match db.get(b"extra_infos_files").unwrap() {
|
||||
Some(v) => bincode::deserialize(&v).unwrap(),
|
||||
None => HashSet::<String>::new(),
|
||||
};
|
||||
let config: Config = serde_json::from_reader(BufReader::new(
|
||||
File::open(&args.config).expect("Could not read config file"),
|
||||
))
|
||||
.expect("Reading config file from JSON failed");
|
||||
|
||||
let new_files = extra_info::download_extra_infos().await.unwrap();
|
||||
let (request_tx, request_rx) = mpsc::channel(32);
|
||||
|
||||
let mut new_extra_infos = HashSet::<ExtraInfo>::new();
|
||||
let shutdown_cmd_tx = request_tx.clone();
|
||||
|
||||
// Make set of new extra-infos
|
||||
for extra_info_file in &new_files {
|
||||
extra_info::add_extra_infos(&extra_info_file, &mut new_extra_infos);
|
||||
processed_extra_infos_files.insert(extra_info_file.to_string());
|
||||
// create the shutdown broadcast channel and clone for every thread
|
||||
let (shutdown_tx, mut shutdown_rx) = broadcast::channel(16);
|
||||
let kill = shutdown_tx.subscribe();
|
||||
|
||||
// Listen for ctrl_c, send signal to broadcast shutdown to all threads by dropping shutdown_tx
|
||||
let shutdown_handler = spawn(async move {
|
||||
tokio::select! {
|
||||
_ = signal::ctrl_c() => {
|
||||
let cmd = Command::Shutdown {
|
||||
shutdown_sig: shutdown_tx,
|
||||
};
|
||||
shutdown_cmd_tx.send(cmd).await.unwrap();
|
||||
sleep(Duration::from_secs(1)).await;
|
||||
|
||||
_ = shutdown_rx.recv().await;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let context_manager =
|
||||
spawn(async move { create_context_manager(config.db, request_rx, kill).await });
|
||||
|
||||
let make_service = make_service_fn(move |_conn: &AddrStream| {
|
||||
let request_tx = request_tx.clone();
|
||||
let service = service_fn(move |req| {
|
||||
let request_tx = request_tx.clone();
|
||||
let (response_tx, response_rx) = oneshot::channel();
|
||||
let cmd = Command::Request {
|
||||
req,
|
||||
sender: response_tx,
|
||||
};
|
||||
async move {
|
||||
request_tx.send(cmd).await.unwrap();
|
||||
response_rx.await.unwrap()
|
||||
}
|
||||
});
|
||||
async move { Ok::<_, Infallible>(service) }
|
||||
});
|
||||
|
||||
let addr = SocketAddr::from(([0, 0, 0, 0], config.port));
|
||||
let server = Server::bind(&addr).serve(make_service);
|
||||
let graceful = server.with_graceful_shutdown(shutdown_signal());
|
||||
println!("Listening on {}", addr);
|
||||
if let Err(e) = graceful.await {
|
||||
eprintln!("server error: {}", e);
|
||||
}
|
||||
|
||||
// Add new extra-infos data to database
|
||||
for extra_info in new_extra_infos {
|
||||
add_extra_info_to_db(&db, extra_info);
|
||||
}
|
||||
|
||||
db.insert(b"extra_infos_files", bincode::serialize(&processed_extra_infos_files).unwrap()).unwrap();
|
||||
future::join_all([context_manager, shutdown_handler]).await;
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@ Note, this is NOT a complete implementation of the document format.
|
|||
(https://spec.torproject.org/dir-spec/extra-info-document-format.html) */
|
||||
|
||||
use chrono::DateTime;
|
||||
use http::status::StatusCode;
|
||||
use http_body_util::{BodyExt, Empty};
|
||||
use hyper::body::Bytes;
|
||||
use hyper_util::{client::legacy::Client, rt::TokioExecutor};
|
||||
|
@ -164,7 +165,7 @@ pub async fn download_extra_infos(
|
|||
println!("Downloading {}", base_url);
|
||||
let mut res = client.get(url).await?;
|
||||
|
||||
assert_eq!(res.status(), hyper::StatusCode::OK);
|
||||
assert_eq!(res.status(), StatusCode::OK);
|
||||
let mut body_str = String::from("");
|
||||
while let Some(next) = res.frame().await {
|
||||
let frame = next?;
|
||||
|
@ -191,7 +192,7 @@ pub async fn download_extra_infos(
|
|||
let extra_infos_url = format!("{}{}", base_url, link);
|
||||
println!("Downloading {}", extra_infos_url);
|
||||
let mut res = client.get(extra_infos_url.parse().unwrap()).await?;
|
||||
assert_eq!(res.status(), hyper::StatusCode::OK);
|
||||
assert_eq!(res.status(), StatusCode::OK);
|
||||
let mut file = std::fs::File::create(filename).unwrap();
|
||||
while let Some(next) = res.frame().await {
|
||||
let frame = next?;
|
||||
|
|
528
src/lib.rs
528
src/lib.rs
|
@ -1,53 +1,28 @@
|
|||
use hyper::{Body, Client, Method, Request};
|
||||
use lazy_static::lazy_static;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sled::Db;
|
||||
use std::{
|
||||
collections::{BTreeMap, HashMap, HashSet},
|
||||
fmt,
|
||||
fs::File,
|
||||
io::BufReader,
|
||||
};
|
||||
|
||||
pub mod analyzer;
|
||||
pub mod bridge_verification_info;
|
||||
pub mod extra_info;
|
||||
pub mod negative_report;
|
||||
pub mod positive_report;
|
||||
pub mod request_handler;
|
||||
|
||||
use analyzer::Analyzer;
|
||||
use extra_info::*;
|
||||
use negative_report::*;
|
||||
use positive_report::*;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct Config {
|
||||
pub db: DbConfig,
|
||||
//require_bridge_token: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct DbConfig {
|
||||
// The path for the server database, default is "server_db"
|
||||
pub db_path: String,
|
||||
}
|
||||
|
||||
impl Default for DbConfig {
|
||||
fn default() -> DbConfig {
|
||||
DbConfig {
|
||||
db_path: "server_db".to_owned(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
// known country codes based on Tor geoIP database
|
||||
// Produced with `cat /usr/share/tor/geoip{,6} | grep -v ^# | grep -o ..$ | sort | uniq | tr '[:upper:]' '[:lower:]' | tr '\n' ',' | sed 's/,/","/g'`
|
||||
pub static ref COUNTRY_CODES: HashSet<&'static str> = HashSet::from(["??","ad","ae","af","ag","ai","al","am","ao","ap","aq","ar","as","at","au","aw","ax","az","ba","bb","bd","be","bf","bg","bh","bi","bj","bl","bm","bn","bo","bq","br","bs","bt","bv","bw","by","bz","ca","cc","cd","cf","cg","ch","ci","ck","cl","cm","cn","co","cr","cs","cu","cv","cw","cx","cy","cz","de","dj","dk","dm","do","dz","ec","ee","eg","eh","er","es","et","eu","fi","fj","fk","fm","fo","fr","ga","gb","gd","ge","gf","gg","gh","gi","gl","gm","gn","gp","gq","gr","gs","gt","gu","gw","gy","hk","hm","hn","hr","ht","hu","id","ie","il","im","in","io","iq","ir","is","it","je","jm","jo","jp","ke","kg","kh","ki","km","kn","kp","kr","kw","ky","kz","la","lb","lc","li","lk","lr","ls","lt","lu","lv","ly","ma","mc","md","me","mf","mg","mh","mk","ml","mm","mn","mo","mp","mq","mr","ms","mt","mu","mv","mw","mx","my","mz","na","nc","ne","nf","ng","ni","nl","no","np","nr","nu","nz","om","pa","pe","pf","pg","ph","pk","pl","pm","pn","pr","ps","pt","pw","py","qa","re","ro","rs","ru","rw","sa","sb","sc","sd","se","sg","sh","si","sj","sk","sl","sm","sn","so","sr","ss","st","sv","sx","sy","sz","tc","td","tf","tg","th","tj","tk","tl","tm","tn","to","tr","tt","tv","tw","tz","ua","ug","um","us","uy","uz","va","vc","ve","vg","vi","vn","vu","wf","ws","ye","yt","za","zm","zw"]);
|
||||
|
||||
// read config data at run time
|
||||
pub static ref CONFIG: Config = serde_json::from_reader(
|
||||
BufReader::new(
|
||||
File::open("config.json").expect("Could not read config file") // TODO: Make config filename configurable
|
||||
)
|
||||
).expect("Reading config file from JSON failed");
|
||||
}
|
||||
|
||||
/// Get Julian date
|
||||
|
@ -59,7 +34,7 @@ pub fn get_date() -> u32 {
|
|||
.unwrap()
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)]
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)]
|
||||
pub enum BridgeDistributor {
|
||||
Lox,
|
||||
}
|
||||
|
@ -76,11 +51,8 @@ pub struct BridgeInfo {
|
|||
/// first Julian date we started collecting data on this bridge
|
||||
pub first_seen: u32,
|
||||
|
||||
/// flag indicating whether the bridge is believed to be blocked
|
||||
pub is_blocked: bool,
|
||||
|
||||
/// map of dates to data for that day
|
||||
pub info_by_day: HashMap<u32, DailyBridgeInfo>,
|
||||
/// map of countries to data for this bridge in that country
|
||||
pub info_by_country: HashMap<String, BridgeCountryInfo>,
|
||||
}
|
||||
|
||||
impl BridgeInfo {
|
||||
|
@ -89,8 +61,7 @@ impl BridgeInfo {
|
|||
fingerprint: fingerprint,
|
||||
nickname: nickname.to_string(),
|
||||
first_seen: get_date(),
|
||||
is_blocked: false,
|
||||
info_by_day: HashMap::<u32, DailyBridgeInfo>::new(),
|
||||
info_by_country: HashMap::<String, BridgeCountryInfo>::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -103,12 +74,11 @@ impl fmt::Display for BridgeInfo {
|
|||
);
|
||||
str.push_str(format!("nickname: {}\n", self.nickname).as_str());
|
||||
str.push_str(format!("first_seen: {}\n", self.first_seen).as_str());
|
||||
str.push_str(format!("is_blocked: {}\n", self.is_blocked).as_str());
|
||||
str.push_str("info_by_day:");
|
||||
for day in self.info_by_day.keys() {
|
||||
str.push_str(format!("\n day: {}", day).as_str());
|
||||
let daily_info = self.info_by_day.get(day).unwrap();
|
||||
for line in daily_info.to_string().lines() {
|
||||
str.push_str("info_by_country:");
|
||||
for country in self.info_by_country.keys() {
|
||||
str.push_str(format!("\n country: {}", country).as_str());
|
||||
let country_info = self.info_by_country.get(country).unwrap();
|
||||
for line in country_info.to_string().lines() {
|
||||
str.push_str(format!("\n {}", line).as_str());
|
||||
}
|
||||
}
|
||||
|
@ -123,62 +93,49 @@ pub enum BridgeInfoType {
|
|||
PositiveReports,
|
||||
}
|
||||
|
||||
/// Information about bridge reachability, gathered daily
|
||||
/// Information about bridge reachability from a given country
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct DailyBridgeInfo {
|
||||
pub info_by_country: BTreeMap<String, BTreeMap<BridgeInfoType, u32>>,
|
||||
pub struct BridgeCountryInfo {
|
||||
pub info_by_day: BTreeMap<u32, BTreeMap<BridgeInfoType, u32>>,
|
||||
pub blocked: bool,
|
||||
}
|
||||
|
||||
impl DailyBridgeInfo {
|
||||
impl BridgeCountryInfo {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
info_by_country: BTreeMap::<String, BTreeMap<BridgeInfoType, u32>>::new(),
|
||||
info_by_day: BTreeMap::<u32, BTreeMap<BridgeInfoType, u32>>::new(),
|
||||
blocked: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_info(
|
||||
&mut self,
|
||||
info_type: BridgeInfoType,
|
||||
count_per_country: &BTreeMap<String, u32>,
|
||||
) {
|
||||
for country in count_per_country.keys() {
|
||||
if self.info_by_country.contains_key(country) {
|
||||
let info = self.info_by_country.get_mut(country).unwrap();
|
||||
if !info.contains_key(&info_type) {
|
||||
info.insert(
|
||||
info_type,
|
||||
*count_per_country.get(&country.to_string()).unwrap(),
|
||||
);
|
||||
} else if info_type == BridgeInfoType::BridgeIps {
|
||||
// Use newest value we've seen today
|
||||
if info.get(&info_type).unwrap() < count_per_country.get(country).unwrap() {
|
||||
info.insert(
|
||||
BridgeInfoType::BridgeIps,
|
||||
*count_per_country.get(&country.to_string()).unwrap(),
|
||||
);
|
||||
}
|
||||
} else {
|
||||
let new_count = info.get(&info_type).unwrap()
|
||||
+ *count_per_country.get(&country.to_string()).unwrap();
|
||||
info.insert(info_type, new_count);
|
||||
pub fn add_info(&mut self, info_type: BridgeInfoType, date: u32, count: u32) {
|
||||
if self.info_by_day.contains_key(&date) {
|
||||
let info = self.info_by_day.get_mut(&date).unwrap();
|
||||
if !info.contains_key(&info_type) {
|
||||
info.insert(info_type, count);
|
||||
} else if info_type == BridgeInfoType::BridgeIps {
|
||||
if *info.get(&info_type).unwrap() < count {
|
||||
// Use highest value we've seen today
|
||||
info.insert(info_type, count);
|
||||
}
|
||||
} else {
|
||||
let mut info = BTreeMap::<BridgeInfoType, u32>::new();
|
||||
info.insert(
|
||||
info_type,
|
||||
*count_per_country.get(&country.to_string()).unwrap(),
|
||||
);
|
||||
self.info_by_country.insert(country.to_string(), info);
|
||||
// Add count to previous count for reports
|
||||
let new_count = info.get(&info_type).unwrap() + count;
|
||||
info.insert(info_type, new_count);
|
||||
}
|
||||
} else {
|
||||
let mut info = BTreeMap::<BridgeInfoType, u32>::new();
|
||||
info.insert(info_type, count);
|
||||
self.info_by_day.insert(date, info);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for DailyBridgeInfo {
|
||||
impl fmt::Display for BridgeCountryInfo {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let mut str = String::from("info:");
|
||||
for country in self.info_by_country.keys() {
|
||||
let info = self.info_by_country.get(country).unwrap();
|
||||
for date in self.info_by_day.keys() {
|
||||
let info = self.info_by_day.get(date).unwrap();
|
||||
let ip_count = match info.get(&BridgeInfoType::BridgeIps) {
|
||||
Some(v) => v,
|
||||
None => &0,
|
||||
|
@ -194,8 +151,8 @@ impl fmt::Display for DailyBridgeInfo {
|
|||
if ip_count > &0 || nr_count > &0 || pr_count > &0 {
|
||||
str.push_str(
|
||||
format!(
|
||||
"\n cc: {}\n connections: {}\n negative reports: {}\n positive reports: {}",
|
||||
country,
|
||||
"\n date: {}\n connections: {}\n negative reports: {}\n positive reports: {}",
|
||||
date,
|
||||
ip_count,
|
||||
nr_count,
|
||||
pr_count,
|
||||
|
@ -208,32 +165,399 @@ impl fmt::Display for DailyBridgeInfo {
|
|||
}
|
||||
}
|
||||
|
||||
// Process extra-infos
|
||||
|
||||
/// Adds the extra-info data for a single bridge to the database. If the
|
||||
/// database already contains an extra-info for this bridge for thid date,
|
||||
/// but this extra-info contains different data for some reason, use the
|
||||
/// greater count of connections from each country.
|
||||
pub fn add_extra_info_to_db(db: &Db, extra_info: ExtraInfo) {
|
||||
let fingerprint = extra_info.fingerprint;
|
||||
let mut bridge_info = match db.get(&fingerprint).unwrap() {
|
||||
let mut bridges = match db.get("bridges").unwrap() {
|
||||
Some(v) => bincode::deserialize(&v).unwrap(),
|
||||
None => BridgeInfo::new(fingerprint, &extra_info.nickname),
|
||||
None => BTreeMap::<[u8; 20], BridgeInfo>::new(),
|
||||
};
|
||||
// If we already have an entry, compare it with the new one. For each
|
||||
// country:count mapping, use the greater of the two counts.
|
||||
if bridge_info.info_by_day.contains_key(&extra_info.date) {
|
||||
let daily_bridge_info = bridge_info.info_by_day.get_mut(&extra_info.date).unwrap();
|
||||
daily_bridge_info.add_info(BridgeInfoType::BridgeIps, &extra_info.bridge_ips);
|
||||
} else {
|
||||
// No existing entry; make a new one.
|
||||
let mut daily_bridge_info = DailyBridgeInfo {
|
||||
info_by_country: BTreeMap::<String, BTreeMap<BridgeInfoType, u32>>::new(),
|
||||
};
|
||||
daily_bridge_info.add_info(BridgeInfoType::BridgeIps, &extra_info.bridge_ips);
|
||||
bridge_info
|
||||
.info_by_day
|
||||
.insert(extra_info.date, daily_bridge_info);
|
||||
let fingerprint = extra_info.fingerprint;
|
||||
if !bridges.contains_key(&fingerprint) {
|
||||
bridges.insert(
|
||||
fingerprint,
|
||||
BridgeInfo::new(fingerprint, &extra_info.nickname),
|
||||
);
|
||||
}
|
||||
let bridge_info = bridges.get_mut(&fingerprint).unwrap();
|
||||
for country in extra_info.bridge_ips.keys() {
|
||||
if bridge_info.info_by_country.contains_key::<String>(country) {
|
||||
bridge_info
|
||||
.info_by_country
|
||||
.get_mut(country)
|
||||
.unwrap()
|
||||
.add_info(
|
||||
BridgeInfoType::BridgeIps,
|
||||
extra_info.date,
|
||||
*extra_info.bridge_ips.get(country).unwrap(),
|
||||
);
|
||||
} else {
|
||||
// No existing entry; make a new one.
|
||||
let mut bridge_country_info = BridgeCountryInfo::new();
|
||||
bridge_country_info.add_info(
|
||||
BridgeInfoType::BridgeIps,
|
||||
extra_info.date,
|
||||
*extra_info.bridge_ips.get(country).unwrap(),
|
||||
);
|
||||
bridge_info
|
||||
.info_by_country
|
||||
.insert(country.to_string(), bridge_country_info);
|
||||
}
|
||||
}
|
||||
// Commit changes to database
|
||||
db.insert(fingerprint, bincode::serialize(&bridge_info).unwrap())
|
||||
db.insert("bridges", bincode::serialize(&bridges).unwrap())
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
/// Download new extra-infos files and add their data to the database
|
||||
pub async fn update_extra_infos(db: &Db) {
|
||||
// Track which files have been processed. This is slightly redundant
|
||||
// because we're only downloading files we don't already have, but it
|
||||
// might be a good idea to check in case we downloaded a file but didn't
|
||||
// process it for some reason.
|
||||
let mut processed_extra_infos_files = match db.get(b"extra_infos_files").unwrap() {
|
||||
Some(v) => bincode::deserialize(&v).unwrap(),
|
||||
None => HashSet::<String>::new(),
|
||||
};
|
||||
|
||||
let new_files = extra_info::download_extra_infos().await.unwrap();
|
||||
|
||||
let mut new_extra_infos = HashSet::<ExtraInfo>::new();
|
||||
|
||||
// Make set of new extra-infos
|
||||
for extra_info_file in &new_files {
|
||||
extra_info::add_extra_infos(&extra_info_file, &mut new_extra_infos);
|
||||
processed_extra_infos_files.insert(extra_info_file.to_string());
|
||||
}
|
||||
|
||||
// Add new extra-infos data to database
|
||||
for extra_info in new_extra_infos {
|
||||
add_extra_info_to_db(&db, extra_info);
|
||||
}
|
||||
|
||||
db.insert(
|
||||
b"extra_infos_files",
|
||||
bincode::serialize(&processed_extra_infos_files).unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
// Process negative reports
|
||||
|
||||
/// Negative reports can be deduplicated, so we store to-be-processed
|
||||
/// negative reports as a map of [report] to [count of report]. Add this
|
||||
/// NR to that map (or create a new map if necessary).
|
||||
pub fn save_negative_report_to_process(db: &Db, nr: NegativeReport) {
|
||||
// We serialize the negative reports as strings to use them as map keys.
|
||||
let mut reports = match db.get("nrs-to-process").unwrap() {
|
||||
Some(v) => bincode::deserialize(&v).unwrap(),
|
||||
None => BTreeMap::<String, BTreeMap<String, u32>>::new(),
|
||||
};
|
||||
// Store to-be-processed reports with key [fingerprint]_[country]_[date]
|
||||
let map_key = format!(
|
||||
"{}_{}_{}",
|
||||
array_bytes::bytes2hex("", &nr.fingerprint),
|
||||
&nr.country,
|
||||
&nr.date,
|
||||
);
|
||||
let serialized_nr = nr.to_json();
|
||||
if reports.contains_key(&map_key) {
|
||||
let nr_map = reports.get_mut(&map_key).unwrap();
|
||||
if nr_map.contains_key(&serialized_nr) {
|
||||
let prev_count = nr_map.get(&serialized_nr).unwrap();
|
||||
nr_map.insert(serialized_nr, prev_count + 1);
|
||||
} else {
|
||||
nr_map.insert(serialized_nr, 1);
|
||||
}
|
||||
} else {
|
||||
let mut nr_map = BTreeMap::<String, u32>::new();
|
||||
nr_map.insert(serialized_nr, 1);
|
||||
reports.insert(map_key, nr_map);
|
||||
}
|
||||
// Commit changes to database
|
||||
db.insert("nrs-to-process", bincode::serialize(&reports).unwrap())
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
/// Sends a collection of negative reports to the Lox Authority and returns the
|
||||
/// number of valid reports returned by the server. The negative reports in the
|
||||
/// collection should all have the same bridge fingerprint, date, country, and
|
||||
/// distributor.
|
||||
pub async fn verify_negative_reports(
|
||||
distributors: &BTreeMap<BridgeDistributor, String>,
|
||||
reports: &BTreeMap<String, u32>,
|
||||
) -> u32 {
|
||||
// Don't make a network call if we don't have any reports anyway
|
||||
if reports.is_empty() {
|
||||
return 0;
|
||||
}
|
||||
// Get one report, assume the rest have the same distributor
|
||||
let first_report: SerializableNegativeReport =
|
||||
serde_json::from_str(reports.first_key_value().unwrap().0).unwrap();
|
||||
let distributor = first_report.distributor;
|
||||
let client = Client::new();
|
||||
let uri: String = (distributors.get(&distributor).unwrap().to_owned() + "/verifynegative")
|
||||
.parse()
|
||||
.unwrap();
|
||||
let req = Request::builder()
|
||||
.method(Method::POST)
|
||||
.uri(uri)
|
||||
.body(Body::from(serde_json::to_string(&reports).unwrap()))
|
||||
.unwrap();
|
||||
let resp = client.request(req).await.unwrap();
|
||||
let buf = hyper::body::to_bytes(resp).await.unwrap();
|
||||
serde_json::from_slice(&buf).unwrap()
|
||||
}
|
||||
|
||||
/// Process today's negative reports and store the count of verified reports in
|
||||
/// the database.
|
||||
pub async fn update_negative_reports(db: &Db, distributors: &BTreeMap<BridgeDistributor, String>) {
|
||||
let mut all_negative_reports = match db.get("nrs-to-process").unwrap() {
|
||||
Some(v) => bincode::deserialize(&v).unwrap(),
|
||||
None => BTreeMap::<String, BTreeMap<String, u32>>::new(),
|
||||
};
|
||||
// Key is [fingerprint]_[country]_[date]
|
||||
for bridge_country_date in all_negative_reports.keys() {
|
||||
let reports = all_negative_reports.get(bridge_country_date).unwrap();
|
||||
if !reports.is_empty() {
|
||||
let first_report: SerializableNegativeReport =
|
||||
serde_json::from_str(reports.first_key_value().unwrap().0).unwrap();
|
||||
let fingerprint = first_report.fingerprint;
|
||||
let date = first_report.date;
|
||||
let country = first_report.country;
|
||||
let count_valid = verify_negative_reports(&distributors, reports).await;
|
||||
|
||||
let mut bridges = match db.get("bridges").unwrap() {
|
||||
Some(v) => bincode::deserialize(&v).unwrap(),
|
||||
None => BTreeMap::<[u8; 20], BridgeInfo>::new(),
|
||||
};
|
||||
|
||||
// Get bridge info or make new one
|
||||
if !bridges.contains_key(&fingerprint) {
|
||||
// This case shouldn't happen unless the bridge hasn't published
|
||||
// any bridge stats.
|
||||
bridges.insert(fingerprint, BridgeInfo::new(fingerprint, &"".to_string()));
|
||||
}
|
||||
let bridge_info = bridges.get_mut(&fingerprint).unwrap();
|
||||
|
||||
// Add the new report count to it
|
||||
if bridge_info.info_by_country.contains_key(&country) {
|
||||
let bridge_country_info = bridge_info.info_by_country.get_mut(&country).unwrap();
|
||||
bridge_country_info.add_info(BridgeInfoType::NegativeReports, date, count_valid);
|
||||
} else {
|
||||
// No existing entry; make a new one.
|
||||
let mut bridge_country_info = BridgeCountryInfo::new();
|
||||
bridge_country_info.add_info(BridgeInfoType::NegativeReports, date, count_valid);
|
||||
bridge_info
|
||||
.info_by_country
|
||||
.insert(country, bridge_country_info);
|
||||
}
|
||||
|
||||
// Commit changes to database
|
||||
db.insert("bridges", bincode::serialize(&bridges).unwrap())
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
// TODO: Would it be cheaper to just recreate it?
|
||||
all_negative_reports.clear();
|
||||
// Remove the now-processed reports from the database
|
||||
db.insert(
|
||||
"nrs-to-process",
|
||||
bincode::serialize(&all_negative_reports).unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
// Process positive reports
|
||||
|
||||
/// We store to-be-processed positive reports as a vector. Add this PR
|
||||
/// to that vector (or create a new vector if necessary).
|
||||
pub fn save_positive_report_to_process(db: &Db, pr: PositiveReport) {
|
||||
let mut reports = match db.get("prs-to-process").unwrap() {
|
||||
Some(v) => bincode::deserialize(&v).unwrap(),
|
||||
None => BTreeMap::<String, Vec<SerializablePositiveReport>>::new(),
|
||||
};
|
||||
// Store to-be-processed reports with key [fingerprint]_[country]_[date]
|
||||
let map_key = format!(
|
||||
"{}_{}_{}",
|
||||
array_bytes::bytes2hex("", &pr.fingerprint),
|
||||
&pr.country,
|
||||
&pr.date,
|
||||
);
|
||||
if reports.contains_key(&map_key) {
|
||||
reports
|
||||
.get_mut(&map_key)
|
||||
.unwrap()
|
||||
.push(pr.to_serializable_report());
|
||||
} else {
|
||||
let mut prs = Vec::<SerializablePositiveReport>::new();
|
||||
prs.push(pr.to_serializable_report());
|
||||
reports.insert(map_key, prs);
|
||||
}
|
||||
// Commit changes to database
|
||||
db.insert("prs-to-process", bincode::serialize(&reports).unwrap())
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
/// Sends a collection of positive reports to the Lox Authority and returns the
|
||||
/// number of valid reports returned by the server. The positive reports in the
|
||||
/// collection should all have the same bridge fingerprint, date, and country.
|
||||
pub async fn verify_positive_reports(
|
||||
distributors: &BTreeMap<BridgeDistributor, String>,
|
||||
reports: &Vec<SerializablePositiveReport>,
|
||||
) -> u32 {
|
||||
// Don't make a network call if we don't have any reports anyway
|
||||
if reports.is_empty() {
|
||||
return 0;
|
||||
}
|
||||
let client = Client::new();
|
||||
let uri: String = (distributors
|
||||
.get(&BridgeDistributor::Lox)
|
||||
.unwrap()
|
||||
.to_owned()
|
||||
+ "/verifypositive")
|
||||
.parse()
|
||||
.unwrap();
|
||||
let req = Request::builder()
|
||||
.method(Method::POST)
|
||||
.uri(uri)
|
||||
.body(Body::from(serde_json::to_string(&reports).unwrap()))
|
||||
.unwrap();
|
||||
let resp = client.request(req).await.unwrap();
|
||||
let buf = hyper::body::to_bytes(resp).await.unwrap();
|
||||
serde_json::from_slice(&buf).unwrap()
|
||||
}
|
||||
|
||||
/// Process today's positive reports and store the count of verified reports in
|
||||
/// the database.
|
||||
pub async fn update_positive_reports(db: &Db, distributors: &BTreeMap<BridgeDistributor, String>) {
|
||||
let mut all_positive_reports = match db.get("prs-to-process").unwrap() {
|
||||
Some(v) => bincode::deserialize(&v).unwrap(),
|
||||
None => BTreeMap::<String, Vec<SerializablePositiveReport>>::new(),
|
||||
};
|
||||
// Key is [fingerprint]_[country]_[date]
|
||||
for bridge_country_date in all_positive_reports.keys() {
|
||||
let reports = all_positive_reports.get(bridge_country_date).unwrap();
|
||||
if !reports.is_empty() {
|
||||
let first_report = &reports[0];
|
||||
let fingerprint = first_report.fingerprint;
|
||||
let date = first_report.date;
|
||||
let country = first_report.country.clone();
|
||||
let count_valid = verify_positive_reports(&distributors, reports).await;
|
||||
|
||||
// Get bridge data from database
|
||||
let mut bridges = match db.get("bridges").unwrap() {
|
||||
Some(v) => bincode::deserialize(&v).unwrap(),
|
||||
None => BTreeMap::<[u8; 20], BridgeInfo>::new(),
|
||||
};
|
||||
|
||||
// Get bridge info or make new one
|
||||
if !bridges.contains_key(&fingerprint) {
|
||||
// This case shouldn't happen unless the bridge hasn't published
|
||||
// any bridge stats.
|
||||
bridges.insert(fingerprint, BridgeInfo::new(fingerprint, &"".to_string()));
|
||||
}
|
||||
let bridge_info = bridges.get_mut(&fingerprint).unwrap();
|
||||
|
||||
// Add the new report count to it
|
||||
if bridge_info.info_by_country.contains_key(&country) {
|
||||
let bridge_country_info = bridge_info.info_by_country.get_mut(&country).unwrap();
|
||||
bridge_country_info.add_info(BridgeInfoType::PositiveReports, date, count_valid);
|
||||
} else {
|
||||
// No existing entry; make a new one.
|
||||
let mut bridge_country_info = BridgeCountryInfo::new();
|
||||
bridge_country_info.add_info(BridgeInfoType::PositiveReports, date, count_valid);
|
||||
bridge_info
|
||||
.info_by_country
|
||||
.insert(country, bridge_country_info);
|
||||
}
|
||||
// Commit changes to database
|
||||
db.insert("bridges", bincode::serialize(&bridges).unwrap())
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
// TODO: Would it be cheaper to just recreate it?
|
||||
all_positive_reports.clear();
|
||||
// Remove the now-processed reports from the database
|
||||
db.insert(
|
||||
"prs-to-process",
|
||||
bincode::serialize(&all_positive_reports).unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
// Verdict on bridge reachability
|
||||
|
||||
/// Guess which countries block a bridge. This function returns a map of new
|
||||
/// blockages (fingerprint : set of countries which block the bridge)
|
||||
pub fn guess_blockages(db: &Db, analyzer: &dyn Analyzer) -> HashMap<[u8; 20], HashSet<String>> {
|
||||
// Map of bridge fingerprint to set of countries which newly block it
|
||||
let mut blockages = HashMap::<[u8; 20], HashSet<String>>::new();
|
||||
|
||||
// Get bridge data from database
|
||||
let mut bridges = match db.get("bridges").unwrap() {
|
||||
Some(v) => bincode::deserialize(&v).unwrap(),
|
||||
None => BTreeMap::<[u8; 20], BridgeInfo>::new(),
|
||||
};
|
||||
|
||||
// Guess for each bridge
|
||||
for (fingerprint, bridge_info) in &mut bridges {
|
||||
let mut new_blockages = HashSet::<String>::new();
|
||||
let blocked_in = analyzer.blocked_in(&bridge_info);
|
||||
for country in blocked_in {
|
||||
let bridge_country_info = bridge_info.info_by_country.get_mut(&country).unwrap();
|
||||
if !bridge_country_info.blocked {
|
||||
new_blockages.insert(country.to_string());
|
||||
// Mark bridge as blocked when db gets updated
|
||||
bridge_country_info.blocked = true;
|
||||
}
|
||||
}
|
||||
blockages.insert(*fingerprint, new_blockages);
|
||||
}
|
||||
|
||||
// Commit changes to database
|
||||
db.insert("bridges", bincode::serialize(&bridges).unwrap())
|
||||
.unwrap();
|
||||
|
||||
// Return map of new blockages
|
||||
blockages
|
||||
}
|
||||
|
||||
/// Report blocked bridges to bridge distributor
|
||||
pub async fn report_blockages(
|
||||
distributors: &BTreeMap<BridgeDistributor, String>,
|
||||
blockages: HashMap<[u8; 20], HashSet<String>>,
|
||||
) {
|
||||
// For now, only report to Lox
|
||||
// TODO: Support more distributors
|
||||
let uri: String = (distributors
|
||||
.get(&BridgeDistributor::Lox)
|
||||
.unwrap()
|
||||
.to_owned()
|
||||
+ "/reportblocked")
|
||||
.parse()
|
||||
.unwrap();
|
||||
|
||||
// Convert map keys from [u8; 20] to 40-character hex strings
|
||||
let mut blockages_str = HashMap::<String, HashSet<String>>::new();
|
||||
for (fingerprint, countries) in blockages {
|
||||
let fpr_string = array_bytes::bytes2hex("", fingerprint);
|
||||
blockages_str.insert(fpr_string, countries);
|
||||
}
|
||||
|
||||
// Report blocked bridges to bridge distributor
|
||||
let client = Client::new();
|
||||
let req = Request::builder()
|
||||
.method(Method::POST)
|
||||
.uri(uri)
|
||||
.body(Body::from(serde_json::to_string(&blockages_str).unwrap()))
|
||||
.unwrap();
|
||||
let resp = client.request(req).await.unwrap();
|
||||
let buf = hyper::body::to_bytes(resp).await.unwrap();
|
||||
let resp_str: String = serde_json::from_slice(&buf).unwrap();
|
||||
assert_eq!("OK", resp_str);
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@ use serde::{Deserialize, Serialize};
|
|||
use sha1::{Digest, Sha1};
|
||||
use sha3::Sha3_256;
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Serialize)]
|
||||
pub enum NegativeReportError {
|
||||
DateInFuture,
|
||||
FailedToDeserialize, // couldn't deserialize to SerializableNegativeReport
|
||||
|
@ -102,6 +102,14 @@ impl NegativeReport {
|
|||
}
|
||||
}
|
||||
|
||||
/// Deserializes the report from slice, eliding the underlying process
|
||||
pub fn from_slice(slice: &[u8]) -> Result<Self, NegativeReportError> {
|
||||
match serde_json::from_slice::<SerializableNegativeReport>(&slice) {
|
||||
Ok(v) => v.to_report(),
|
||||
Err(_) => Err(NegativeReportError::FailedToDeserialize),
|
||||
}
|
||||
}
|
||||
|
||||
/// Verify the report
|
||||
pub fn verify(self, bridge_info: &BridgeVerificationInfo) -> bool {
|
||||
match self.bridge_pok {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
// For Lox-related code where points are uppercase and scalars are lowercase
|
||||
#![allow(non_snake_case)]
|
||||
|
||||
use crate::{bridge_verification_info::BridgeVerificationInfo, get_date, CONFIG, COUNTRY_CODES};
|
||||
use crate::{bridge_verification_info::BridgeVerificationInfo, get_date, COUNTRY_CODES};
|
||||
|
||||
use curve25519_dalek::ristretto::RistrettoBasepointTable;
|
||||
use ed25519_dalek::{Signature, Signer, SigningKey, Verifier};
|
||||
|
@ -12,7 +12,7 @@ use std::option::Option;
|
|||
|
||||
pub const REQUIRE_BRIDGE_TOKEN: bool = false;
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Serialize)]
|
||||
pub enum PositiveReportError {
|
||||
DateInFuture,
|
||||
FailedToDeserialize, // couldn't deserialize to SerializablePositiveReport
|
||||
|
@ -105,6 +105,14 @@ impl PositiveReport {
|
|||
}
|
||||
}
|
||||
|
||||
/// Deserializes the report from slice, eliding the underlying process
|
||||
pub fn from_slice(slice: &[u8]) -> Result<Self, PositiveReportError> {
|
||||
match serde_json::from_slice::<SerializablePositiveReport>(&slice) {
|
||||
Ok(v) => v.to_report(),
|
||||
Err(_) => Err(PositiveReportError::FailedToDeserialize),
|
||||
}
|
||||
}
|
||||
|
||||
/// Verify report
|
||||
pub fn verify(
|
||||
self,
|
||||
|
|
|
@ -0,0 +1,61 @@
|
|||
use crate::{negative_report::NegativeReport, positive_report::PositiveReport, *};
|
||||
use hyper::{body, header::HeaderValue, Body, Method, Request, Response, StatusCode};
|
||||
use serde_json::json;
|
||||
use sled::Db;
|
||||
use std::convert::Infallible;
|
||||
|
||||
// Handle submitted reports
|
||||
pub async fn handle(db: &Db, req: Request<Body>) -> Result<Response<Body>, Infallible> {
|
||||
match req.method() {
|
||||
&Method::OPTIONS => Ok(Response::builder()
|
||||
.header("Access-Control-Allow-Origin", HeaderValue::from_static("*"))
|
||||
.header("Access-Control-Allow-Headers", "accept, content-type")
|
||||
.header("Access-Control-Allow-Methods", "POST")
|
||||
.status(200)
|
||||
.body(Body::from("Allow POST"))
|
||||
.unwrap()),
|
||||
_ => match (req.method(), req.uri().path()) {
|
||||
(&Method::POST, "/negativereport") => Ok::<_, Infallible>({
|
||||
let bytes = body::to_bytes(req.into_body()).await.unwrap();
|
||||
let nr = match NegativeReport::from_slice(&bytes) {
|
||||
Ok(nr) => nr,
|
||||
Err(e) => {
|
||||
let response = json!({"error": e});
|
||||
let val = serde_json::to_string(&response).unwrap();
|
||||
return Ok(prepare_header(val));
|
||||
}
|
||||
};
|
||||
save_negative_report_to_process(&db, nr);
|
||||
prepare_header("OK".to_string())
|
||||
}),
|
||||
(&Method::POST, "/positivereport") => Ok::<_, Infallible>({
|
||||
let bytes = body::to_bytes(req.into_body()).await.unwrap();
|
||||
let pr = match PositiveReport::from_slice(&bytes) {
|
||||
Ok(pr) => pr,
|
||||
Err(e) => {
|
||||
let response = json!({"error": e});
|
||||
let val = serde_json::to_string(&response).unwrap();
|
||||
return Ok(prepare_header(val));
|
||||
}
|
||||
};
|
||||
save_positive_report_to_process(&db, pr);
|
||||
prepare_header("OK".to_string())
|
||||
}),
|
||||
_ => {
|
||||
// Return 404 not found response.
|
||||
Ok(Response::builder()
|
||||
.status(StatusCode::NOT_FOUND)
|
||||
.body(Body::from("Not found"))
|
||||
.unwrap())
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Prepare HTTP Response for successful Server Request
|
||||
fn prepare_header(response: String) -> Response<Body> {
|
||||
let mut resp = Response::new(Body::from(response));
|
||||
resp.headers_mut()
|
||||
.insert("Access-Control-Allow-Origin", HeaderValue::from_static("*"));
|
||||
resp
|
||||
}
|
Loading…
Reference in New Issue