Main method to download and process extra-infos
This commit is contained in:
parent
08cfacbf85
commit
e4ee46866a
|
@ -0,0 +1,49 @@
|
|||
use troll_patrol::{
|
||||
extra_info::{self, ExtraInfo},
|
||||
//negative_report::SerializableNegativeReport,
|
||||
//positive_report::SerializablePositiveReport,
|
||||
*,
|
||||
};
|
||||
|
||||
use clap::Parser;
|
||||
use sled::Db;
|
||||
use std::{collections::HashSet, path::PathBuf};
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
// TODO: Currently, we're processing extra-infos here, but we want to:
|
||||
// 1. Run a server to accept incoming reports
|
||||
// 2. Periodically (daily):
|
||||
// a) download new extra-infos
|
||||
// b) determine whether we think each bridge is blocked or not
|
||||
// c) report these results to the LA
|
||||
// 3. Store all our data
|
||||
|
||||
let db: Db = sled::open(&CONFIG.db.db_path).unwrap();
|
||||
|
||||
// Track which files have been processed. This is slightly redundant
|
||||
// because we're only downloading files we don't already have, but it
|
||||
// might be a good idea to check in case we downloaded a file but didn't
|
||||
// process it for some reason.
|
||||
let mut processed_extra_infos_files = match db.get(b"extra_infos_files").unwrap() {
|
||||
Some(v) => bincode::deserialize(&v).unwrap(),
|
||||
None => HashSet::<String>::new(),
|
||||
};
|
||||
|
||||
let new_files = extra_info::download_extra_infos().await.unwrap();
|
||||
|
||||
let mut new_extra_infos = HashSet::<ExtraInfo>::new();
|
||||
|
||||
// Make set of new extra-infos
|
||||
for extra_info_file in &new_files {
|
||||
extra_info::add_extra_infos(&extra_info_file, &mut new_extra_infos);
|
||||
processed_extra_infos_files.insert(extra_info_file.to_string());
|
||||
}
|
||||
|
||||
// Add new extra-infos data to database
|
||||
for extra_info in new_extra_infos {
|
||||
add_extra_info_to_db(&db, extra_info);
|
||||
}
|
||||
|
||||
db.insert(b"extra_infos_files", bincode::serialize(&processed_extra_infos_files).unwrap()).unwrap();
|
||||
}
|
Loading…
Reference in New Issue