|
|
@ -1,20 +1,22 @@ |
|
|
|
use rand::prelude::StdRng;
|
|
|
|
use rand::seq::SliceRandom;
|
|
|
|
use rand::thread_rng;
|
|
|
|
use rand::{thread_rng, SeedableRng};
|
|
|
|
use serde::Deserialize;
|
|
|
|
use std::collections::HashMap;
|
|
|
|
use std::ops::DerefMut;
|
|
|
|
use std::sync::{Arc, Mutex, RwLock};
|
|
|
|
use std::time::{Duration, Instant};
|
|
|
|
use std::{cmp, fs};
|
|
|
|
use tokio::time;
|
|
|
|
use warp::{http, Filter, Reply};
|
|
|
|
|
|
|
|
const BLACKLIST_DELAY_TIME_MINS: u64 = 15;
|
|
|
|
const ONE_YEAR: Duration = Duration::from_secs(60 * 60 * 24 * 365);
|
|
|
|
|
|
|
|
#[derive(Deserialize, Debug)]
|
|
|
|
struct WebsiteConfig {
|
|
|
|
// May add more here in the future
|
|
|
|
delay: u32, // seconds
|
|
|
|
blacklist_minutes: u32,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Deserialize, Debug)]
|
|
|
@ -28,12 +30,14 @@ struct Website { |
|
|
|
proxies: HashMap<String, Instant>,
|
|
|
|
index: usize,
|
|
|
|
delay: u32,
|
|
|
|
blacklist_minutes: u32,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone)]
|
|
|
|
struct ServerState {
|
|
|
|
websites: HashMap<String, Arc<Mutex<Website>>>,
|
|
|
|
proxies: Vec<String>,
|
|
|
|
rng: Arc<Mutex<StdRng>>,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Deserialize)]
|
|
|
@ -76,6 +80,7 @@ async fn main() { |
|
|
|
proxies: hsh,
|
|
|
|
index: 0,
|
|
|
|
delay: config.websites[site].delay,
|
|
|
|
blacklist_minutes: config.websites[site].blacklist_minutes,
|
|
|
|
})),
|
|
|
|
);
|
|
|
|
}
|
|
|
@ -83,6 +88,7 @@ async fn main() { |
|
|
|
let state = Arc::new(RwLock::new(ServerState {
|
|
|
|
websites,
|
|
|
|
proxies: config.proxies,
|
|
|
|
rng: Arc::new(Mutex::new(StdRng::from_rng(thread_rng()).unwrap())),
|
|
|
|
}));
|
|
|
|
|
|
|
|
let get_proxy = {
|
|
|
@ -120,7 +126,11 @@ async fn get_proxy_fn( |
|
|
|
.iter()
|
|
|
|
.map(|(k, v)| (k.clone(), *v))
|
|
|
|
.collect();
|
|
|
|
proxies.shuffle(&mut thread_rng());
|
|
|
|
//for _ in 0..100 {
|
|
|
|
proxies.shuffle(state.rng.lock().unwrap().deref_mut());
|
|
|
|
//}
|
|
|
|
|
|
|
|
println!("{:?}", proxies);
|
|
|
|
|
|
|
|
let mut min_proxy_address: String = "".to_string();
|
|
|
|
let mut min_time = Instant::now() + ONE_YEAR;
|
|
|
@ -158,7 +168,7 @@ async fn get_proxy_fn( |
|
|
|
}
|
|
|
|
|
|
|
|
// Marks a proxy as blacklisted(not working, blocked by website, etc.)
|
|
|
|
// Will auto-unblacklist after 15 minutes
|
|
|
|
// Will auto-un-blacklist after 15 minutes
|
|
|
|
async fn blacklist_proxy_fn(
|
|
|
|
website_str: String,
|
|
|
|
state: Arc<RwLock<ServerState>>,
|
|
|
@ -166,21 +176,26 @@ async fn blacklist_proxy_fn( |
|
|
|
) -> Result<impl warp::Reply, warp::Rejection> {
|
|
|
|
let state = state.read().unwrap();
|
|
|
|
match state.websites.get(&website_str) {
|
|
|
|
Some(x) => match x.lock().unwrap().proxies.get_mut(&body.proxy) {
|
|
|
|
Some(y) => {
|
|
|
|
*y = Instant::now() + Duration::from_secs(60 * BLACKLIST_DELAY_TIME_MINS);
|
|
|
|
|
|
|
|
Ok(
|
|
|
|
warp::reply::with_status("Success", http::status::StatusCode::OK)
|
|
|
|
.into_response(),
|
|
|
|
Some(x) => {
|
|
|
|
let mut website = x.lock().unwrap();
|
|
|
|
let blacklist_minutes = website.blacklist_minutes as u64;
|
|
|
|
match website.proxies.get_mut(&body.proxy) {
|
|
|
|
Some(y) => {
|
|
|
|
*y = Instant::now() + Duration::from_secs(60 * blacklist_minutes);
|
|
|
|
|
|
|
|
Ok(
|
|
|
|
warp::reply::with_status("Success", http::status::StatusCode::OK)
|
|
|
|
.into_response(),
|
|
|
|
)
|
|
|
|
}
|
|
|
|
None => Ok(warp::reply::with_status(
|
|
|
|
"Proxy not found",
|
|
|
|
http::status::StatusCode::NOT_FOUND,
|
|
|
|
)
|
|
|
|
.into_response()),
|
|
|
|
}
|
|
|
|
None => Ok(warp::reply::with_status(
|
|
|
|
"Proxy not found",
|
|
|
|
http::status::StatusCode::NOT_FOUND,
|
|
|
|
)
|
|
|
|
.into_response()),
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
None => Ok(warp::reply::with_status(
|
|
|
|
"Website not found",
|
|
|
|
http::status::StatusCode::NOT_FOUND,
|
|
|
|