project restructure 😔

rewrote some stuff, cleaned some code up and i dont even remember what else 😭
This commit is contained in:
Yuhki 2025-03-25 17:40:32 +01:00
parent 219891ea58
commit bc558a8714
14 changed files with 702 additions and 580 deletions

View file

@ -26,16 +26,7 @@ cd wuthering-waves-downloader
cargo build --release
```
### 🆕 What's New
- **Multi-CDN Support** - Automatically tries all available CDNs for each download
- **Version Selection** - Choose between Live/Beta and CN/OS versions
- **GZIP Compression** - Full support for compressed responses
- **Enhanced Progress Tracking** - Cleaner UI with progress in window title
- **Improved Reliability** - Better error handling and recovery
> feel free to open a pull request if you want to improve anything (i'm tired and can't do this shit anymore 😭)
## 🌟 Key Features

5
src/config/cfg.rs Normal file
View file

@ -0,0 +1,5 @@
#[derive(Clone)]
pub struct Config {
pub index_url: String,
pub zip_bases: Vec<String>,
}

2
src/config/mod.rs Normal file
View file

@ -0,0 +1,2 @@
pub mod cfg;
pub mod status;

14
src/config/status.rs Normal file
View file

@ -0,0 +1,14 @@
use colored::*;
#[derive(Clone, Copy)]
pub struct Status;
impl Status {
pub fn info() -> ColoredString { "[*]".cyan() }
pub fn success() -> ColoredString { "[+]".green() }
pub fn warning() -> ColoredString { "[!]".yellow() }
pub fn error() -> ColoredString { "[-]".red() }
pub fn question() -> ColoredString { "[?]".blue() }
pub fn progress() -> ColoredString { "[→]".magenta() }
pub fn matched() -> ColoredString { "[↓]".blue() }
}

1
src/download/mod.rs Normal file
View file

@ -0,0 +1 @@
pub mod progress;

8
src/download/progress.rs Normal file
View file

@ -0,0 +1,8 @@
use std::{sync::Arc, time::Instant};
#[derive(Clone)]
pub struct DownloadProgress {
pub total_bytes: Arc<std::sync::atomic::AtomicU64>,
pub downloaded_bytes: Arc<std::sync::atomic::AtomicU64>,
pub start_time: Instant,
}

83
src/io/console.rs Normal file
View file

@ -0,0 +1,83 @@
use std::{io, path::Path, time::{Duration, Instant}};
use colored::Colorize;
use winconsole::console::set_title;
use crate::{config::status::Status, download::progress::DownloadProgress};
use super::logging::{bytes_to_human, format_duration};
pub fn print_results(success: usize, total: usize, folder: &Path) {
let title = if success == total {
" DOWNLOAD COMPLETE ".on_blue().white().bold()
} else {
" PARTIAL DOWNLOAD ".on_blue().white().bold()
};
println!("\n{}\n", title);
println!(
"{} Successfully downloaded: {}",
Status::success(),
success.to_string().green()
);
println!(
"{} Failed downloads: {}",
Status::error(),
(total - success).to_string().red()
);
println!(
"{} Files saved to: {}",
Status::info(),
folder.display().to_string().cyan()
);
println!("\n{} Press Enter to exit...", Status::warning());
let _ = io::stdin().read_line(&mut String::new());
}
pub fn update_title(
start_time: Instant,
success: usize,
total: usize,
progress: &DownloadProgress,
) {
let elapsed = start_time.elapsed();
let elapsed_secs = elapsed.as_secs();
let downloaded_bytes = progress.downloaded_bytes.load(std::sync::atomic::Ordering::SeqCst);
let total_bytes = progress.total_bytes.load(std::sync::atomic::Ordering::SeqCst);
let speed = if elapsed_secs > 0 {
downloaded_bytes / elapsed_secs
} else {
0
};
let (speed_value, speed_unit) = if speed > 1_000_000 {
(speed / 1_000_000, "MB/s")
} else {
(speed / 1_000, "KB/s")
};
let remaining_bytes = total_bytes.saturating_sub(downloaded_bytes);
let eta_secs = if speed > 0 {
remaining_bytes / speed
} else {
0
};
let eta_str = format_duration(Duration::from_secs(eta_secs));
let progress_percent = if total_bytes > 0 {
format!(" ({}%)", (downloaded_bytes * 100 / total_bytes))
} else {
String::new()
};
let title = format!(
"Wuthering Waves Downloader - {}/{} files - {}{} - Speed: {}{} - ETA: {}",
success,
total,
bytes_to_human(downloaded_bytes),
progress_percent,
speed_value,
speed_unit,
eta_str
);
set_title(&title).unwrap();
}

77
src/io/file.rs Normal file
View file

@ -0,0 +1,77 @@
use std::{fs, io, io::Write, path::{Path, PathBuf}};
use md5::{Digest, Md5};
use crate::config::status::Status;
pub fn calculate_md5(path: &Path) -> String {
let mut file = fs::File::open(path).unwrap();
let mut hasher = Md5::new();
io::copy(&mut file, &mut hasher).unwrap();
format!("{:x}", hasher.finalize())
}
pub fn check_existing_file(path: &Path, expected_md5: Option<&str>, expected_size: Option<u64>) -> bool {
if !path.exists() {
return false;
}
if let Some(size) = expected_size {
if fs::metadata(path).map(|m| m.len()).unwrap_or(0) != size {
return false;
}
}
if let Some(md5) = expected_md5 {
if calculate_md5(path) != md5 {
return false;
}
}
true
}
pub fn get_filename(path: &str) -> String {
Path::new(path)
.file_name()
.and_then(|n| n.to_str())
.unwrap_or(path)
.to_string()
}
pub fn get_dir() -> PathBuf {
loop {
print!(
"{} Enter download directory (Enter for current): ",
Status::question()
);
io::stdout().flush().unwrap();
let mut input = String::new();
io::stdin().read_line(&mut input).unwrap();
let path = input.trim();
let path = if path.is_empty() {
std::env::current_dir().unwrap()
} else {
PathBuf::from(shellexpand::tilde(path).into_owned())
};
if path.is_dir() {
return path;
}
print!(
"{} Directory doesn't exist. Create? (y/n): ",
Status::warning()
);
io::stdout().flush().unwrap();
let mut input = String::new();
io::stdin().read_line(&mut input).unwrap();
if input.trim().to_lowercase() == "y" {
fs::create_dir_all(&path).unwrap();
return path;
}
}
}

31
src/io/logging.rs Normal file
View file

@ -0,0 +1,31 @@
use std::{fs::{self, OpenOptions}, io::Write, time::{Duration, SystemTime}};
pub fn setup_logging() -> fs::File {
OpenOptions::new()
.create(true)
.append(true)
.open("logs.log")
.expect("Failed to create/open log file")
}
pub fn log_error(mut log_file: &fs::File, message: &str) {
let timestamp = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
writeln!(log_file, "[{}] ERROR: {}", timestamp, message).unwrap();
}
pub fn format_duration(duration: Duration) -> String {
let secs = duration.as_secs();
format!("{:02}:{:02}:{:02}", secs / 3600, (secs % 3600) / 60, secs % 60)
}
pub fn bytes_to_human(bytes: u64) -> String {
match bytes {
b if b > 1_000_000_000 => format!("{:.2} GB", b as f64 / 1_000_000_000.0),
b if b > 1_000_000 => format!("{:.2} MB", b as f64 / 1_000_000.0),
b if b > 1_000 => format!("{:.2} KB", b as f64 / 1_000.0),
b => format!("{} B", b),
}
}

3
src/io/mod.rs Normal file
View file

@ -0,0 +1,3 @@
pub mod console;
pub mod file;
pub mod logging;

4
src/lib.rs Normal file
View file

@ -0,0 +1,4 @@
pub mod config;
pub mod download;
pub mod io;
pub mod network;

View file

@ -1,540 +1,15 @@
use std::{
fs::{self, OpenOptions},
io::{self, Read, Write},
path::{Path, PathBuf},
sync::Arc,
thread,
time::{Duration, Instant, SystemTime},
};
use colored::*;
use flate2::read::GzDecoder;
use md5::{Digest, Md5};
use reqwest::{StatusCode, blocking::Client};
use serde_json::{Value, from_reader, from_str};
use winconsole::console::{clear, set_title};
const INDEX_URL: &str = "https://gist.githubusercontent.com/yuhkix/b8796681ac2cd3bab11b7e8cdc022254/raw/30a8e747debe9e333d5f4ec5d8700dab500594a2/wuwa.json";
const MAX_RETRIES: usize = 3;
const DOWNLOAD_TIMEOUT: u64 = 300; // minutes in seconds
struct Status;
impl Status {
fn info() -> ColoredString {
"[*]".cyan()
}
fn success() -> ColoredString {
"[+]".green()
}
fn warning() -> ColoredString {
"[!]".yellow()
}
fn error() -> ColoredString {
"[-]".red()
}
fn question() -> ColoredString {
"[?]".blue()
}
fn progress() -> ColoredString {
"[→]".magenta()
}
}
#[derive(Clone)]
struct DownloadConfig {
index_url: String,
zip_bases: Vec<String>,
}
fn setup_logging() -> fs::File {
let log_file = OpenOptions::new()
.create(true)
.append(true)
.open("logs.log")
.expect("Failed to create/open log file");
log_file
}
fn log_error(mut log_file: &fs::File, message: &str) {
let timestamp = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
writeln!(log_file, "[{}] ERROR: {}", timestamp, message).expect("Failed to write to log file");
}
fn fetch_gist(client: &Client) -> Result<String, String> {
let mut response = client
.get(INDEX_URL)
.timeout(Duration::from_secs(30))
.send()
.map_err(|e| format!("Network error: {}", e))?;
if !response.status().is_success() {
return Err(format!("Server error: HTTP {}", response.status()));
}
let content_encoding = response
.headers()
.get("content-encoding")
.and_then(|v| v.to_str().ok())
.unwrap_or("");
let gist_data: Value = if content_encoding.contains("gzip") {
let mut buffer = Vec::new();
response
.copy_to(&mut buffer)
.map_err(|e| format!("Error reading response bytes: {}", e))?;
let mut gz = GzDecoder::new(&buffer[..]);
let mut decompressed = String::new();
gz.read_to_string(&mut decompressed)
.map_err(|e| format!("Error decompressing content: {}", e))?;
from_str(&decompressed).map_err(|e| format!("Invalid JSON: {}", e))?
} else {
from_reader(response).map_err(|e| format!("Invalid JSON: {}", e))?
};
println!("{} Available versions:", Status::info());
println!("1. Preload - OS");
println!("2. Live - CN (Needs Update)");
println!("3. Beta - OS (Needs Update)");
println!("4. Beta - CN (Needs Update)");
loop {
print!("{} Select version to download: ", Status::question());
io::stdout().flush().unwrap();
let mut input = String::new();
io::stdin().read_line(&mut input).unwrap();
let choice = input.trim();
match choice {
"1" => {
return gist_data["live"]["os-live"]
.as_str()
.map(|s| s.to_string())
.ok_or("Missing os-live URL".to_string());
}
"2" => {
return gist_data["live"]["cn-live"]
.as_str()
.map(|s| s.to_string())
.ok_or("Missing cn-live URL".to_string());
}
"3" => {
return gist_data["beta"]["os-beta"]
.as_str()
.map(|s| s.to_string())
.ok_or("Missing os-beta URL".to_string());
}
"4" => {
return gist_data["beta"]["cn-beta"]
.as_str()
.map(|s| s.to_string())
.ok_or("Missing cn-beta URL".to_string());
}
_ => println!("{} Invalid selection, please try again", Status::error()),
}
}
}
fn get_predownload(client: &Client) -> Result<DownloadConfig, String> {
let selected_index_url = fetch_gist(client)?;
println!("{} Fetching download configuration...", Status::info());
let mut response = client
.get(&selected_index_url)
.timeout(Duration::from_secs(30))
.send()
.map_err(|e| format!("Network error: {}", e))?;
if !response.status().is_success() {
return Err(format!("Server error: HTTP {}", response.status()));
}
let content_encoding = response
.headers()
.get("content-encoding")
.and_then(|v| v.to_str().ok())
.unwrap_or("");
let config: Value = if content_encoding.contains("gzip") {
let mut buffer = Vec::new();
response
.copy_to(&mut buffer)
.map_err(|e| format!("Error reading response bytes: {}", e))?;
let mut gz = GzDecoder::new(&buffer[..]);
let mut decompressed = String::new();
gz.read_to_string(&mut decompressed)
.map_err(|e| format!("Error decompressing content: {}", e))?;
from_str(&decompressed).map_err(|e| format!("Invalid JSON: {}", e))?
} else {
from_reader(response).map_err(|e| format!("Invalid JSON: {}", e))?
};
let predownload_config = config
.get("predownload")
.and_then(|p| p.get("config"))
.ok_or("Missing predownload.config in response")?;
let base_url = predownload_config
.get("baseUrl")
.and_then(Value::as_str)
.ok_or("Missing or invalid baseUrl")?;
let index_file = predownload_config
.get("indexFile")
.and_then(Value::as_str)
.ok_or("Missing or invalid indexFile")?;
let default_config = config
.get("default")
.ok_or("Missing default config in response")?;
let cdn_list = default_config
.get("cdnList")
.and_then(Value::as_array)
.ok_or("Missing or invalid cdnList")?;
let mut cdn_urls = Vec::new();
for cdn in cdn_list {
if let Some(url) = cdn.get("url").and_then(Value::as_str) {
cdn_urls.push(url.trim_end_matches('/').to_string());
}
}
if cdn_urls.is_empty() {
return Err("No valid CDN URLs found".to_string());
}
let full_index_url = format!("{}/{}", cdn_urls[0], index_file.trim_start_matches('/'));
let zip_bases = cdn_urls
.iter()
.map(|cdn| format!("{}/{}", cdn, base_url.trim_start_matches('/')))
.collect();
Ok(DownloadConfig {
index_url: full_index_url,
zip_bases,
})
}
fn fetch_index(client: &Client, config: &DownloadConfig, log_file: &fs::File) -> Value {
println!("{} Fetching index file...", Status::info());
let mut response = match client
.get(&config.index_url)
.timeout(Duration::from_secs(30))
.send()
{
Ok(resp) => resp,
Err(e) => {
log_error(log_file, &format!("Error fetching index file: {}", e));
clear().unwrap();
println!("{} Error fetching index file: {}", Status::error(), e);
println!("\n{} Press Enter to exit...", Status::warning());
let _ = io::stdin().read_line(&mut String::new());
std::process::exit(1);
}
};
if !response.status().is_success() {
let msg = format!("Error fetching index file: HTTP {}", response.status());
log_error(log_file, &msg);
clear().unwrap();
println!("{} {}", Status::error(), msg);
println!("\n{} Press Enter to exit...", Status::warning());
let _ = io::stdin().read_line(&mut String::new());
std::process::exit(1);
}
let content_encoding = response
.headers()
.get("content-encoding")
.and_then(|v| v.to_str().ok())
.unwrap_or("");
let text = if content_encoding.contains("gzip") {
let mut buffer = Vec::new();
if let Err(e) = response.copy_to(&mut buffer) {
log_error(log_file, &format!("Error reading index file bytes: {}", e));
clear().unwrap();
println!("{} Error reading index file: {}", Status::error(), e);
println!("\n{} Press Enter to exit...", Status::warning());
let _ = io::stdin().read_line(&mut String::new());
std::process::exit(1);
}
let mut gz = GzDecoder::new(&buffer[..]);
let mut decompressed_text = String::new();
if let Err(e) = gz.read_to_string(&mut decompressed_text) {
log_error(log_file, &format!("Error decompressing index file: {}", e));
clear().unwrap();
println!("{} Error decompressing index file: {}", Status::error(), e);
println!("\n{} Press Enter to exit...", Status::warning());
let _ = io::stdin().read_line(&mut String::new());
std::process::exit(1);
}
decompressed_text
} else {
match response.text() {
Ok(t) => t,
Err(e) => {
log_error(
log_file,
&format!("Error reading index file response: {}", e),
);
clear().unwrap();
println!("{} Error reading index file: {}", Status::error(), e);
println!("\n{} Press Enter to exit...", Status::warning());
let _ = io::stdin().read_line(&mut String::new());
std::process::exit(1);
}
}
};
println!("{} Index file downloaded successfully", Status::success());
match from_str(&text) {
Ok(v) => v,
Err(e) => {
log_error(log_file, &format!("Error parsing index file JSON: {}", e));
clear().unwrap();
println!("{} Error parsing index file: {}", Status::error(), e);
println!("\n{} Press Enter to exit...", Status::warning());
let _ = io::stdin().read_line(&mut String::new());
std::process::exit(1);
}
}
}
fn get_dir() -> PathBuf {
loop {
print!(
"{} Enter download directory (Enter for current): ",
Status::question()
);
io::stdout().flush().unwrap();
let mut input = String::new();
io::stdin().read_line(&mut input).unwrap();
let path = input.trim();
let path = if path.is_empty() {
std::env::current_dir().unwrap()
} else {
PathBuf::from(shellexpand::tilde(path).into_owned())
};
if path.is_dir() {
return path;
}
print!(
"{} Directory doesn't exist. Create? (y/n): ",
Status::warning()
);
io::stdout().flush().unwrap();
let mut input = String::new();
io::stdin().read_line(&mut input).unwrap();
if input.trim().to_lowercase() == "y" {
fs::create_dir_all(&path).unwrap();
return path;
}
}
}
fn calculate_md5(path: &Path) -> String {
let mut file = fs::File::open(path).unwrap();
let mut hasher = Md5::new();
io::copy(&mut file, &mut hasher).unwrap();
format!("{:x}", hasher.finalize())
}
fn download_file(
client: &Client,
config: &DownloadConfig,
dest: &str,
folder: &Path,
expected_md5: Option<&str>,
log_file: &fs::File,
) -> bool {
let dest = dest.replace('\\', "/");
let path = folder.join(&dest);
if let Some(parent) = path.parent() {
if let Err(e) = fs::create_dir_all(parent) {
log_error(
log_file,
&format!("Error creating directory for {}: {}", dest, e),
);
println!("{} Error creating directory: {}", Status::error(), e);
return false;
}
}
for (i, base_url) in config.zip_bases.iter().enumerate() {
let url = format!("{}{}", base_url, dest);
let head_response = match client.head(&url).timeout(Duration::from_secs(10)).send() {
Ok(resp) => resp,
Err(e) => {
log_error(
log_file,
&format!("CDN {} failed for {} - {}", i + 1, dest, e),
);
continue;
}
};
if head_response.status() != StatusCode::OK {
log_error(
log_file,
&format!(
"CDN {} failed for {} (HTTP {})",
i + 1,
dest,
head_response.status()
),
);
continue;
}
println!("{} Downloading: {}", Status::progress(), dest);
let mut retries = MAX_RETRIES;
let mut last_error = None;
while retries > 0 {
let result = (|| -> Result<(), Box<dyn std::error::Error>> {
let response = client
.get(&url)
.timeout(Duration::from_secs(DOWNLOAD_TIMEOUT))
.send()?;
if !response.status().is_success() {
return Err(response.error_for_status().unwrap_err().into());
}
let mut file = fs::File::create(&path)?;
let mut content = response;
io::copy(&mut content, &mut file)?;
Ok(())
})();
match result {
Ok(_) => break,
Err(e) => {
last_error = Some(e.to_string());
log_error(
log_file,
&format!(
"Download attempt failed for {} ({} retries left): {}",
dest,
retries - 1,
e
),
);
retries -= 1;
let _ = fs::remove_file(&path);
if retries > 0 {
println!(
"{} Retrying download for {}... ({} attempts left)",
Status::warning(),
dest,
retries
);
}
}
}
}
if retries == 0 {
log_error(
log_file,
&format!(
"Download failed after retries for {}: {}",
dest,
last_error.unwrap_or_default()
),
);
println!("{} Download failed: {}", Status::error(), dest.red());
return false;
}
if let Some(expected) = expected_md5 {
let actual = calculate_md5(&path);
if actual != expected {
log_error(
log_file,
&format!(
"Checksum failed for {}: expected {}, got {}",
dest, expected, actual
),
);
fs::remove_file(&path).unwrap();
println!("{} Checksum failed: {}", Status::error(), dest.red());
return false;
}
}
println!(
"{} {}: {}",
Status::success(),
if expected_md5.is_some() {
"Verified"
} else {
"Downloaded"
},
dest
);
return true;
}
log_error(log_file, &format!("All CDNs failed for {}", dest));
println!("{} All CDNs failed for {}", Status::error(), dest.red());
false
}
fn format_duration(duration: Duration) -> String {
let secs = duration.as_secs();
let hours = secs / 3600;
let minutes = (secs % 3600) / 60;
let seconds = secs % 60;
format!("{:02}:{:02}:{:02}", hours, minutes, seconds)
}
fn update_title(start_time: Instant, success: usize, total: usize) {
let elapsed = start_time.elapsed();
let elapsed_str = format_duration(elapsed);
let progress = if total > 0 {
format!(" ({}%)", (success as f32 / total as f32 * 100.0).round())
} else {
String::new()
};
let title = format!(
"Wuthering Waves Downloader - Elapsed: {} - {}/{} files{}",
elapsed_str, success, total, progress
);
set_title(&title).unwrap();
}
use reqwest::blocking::Client;
use serde_json::Value;
use std::{sync::Arc, time::{Duration, Instant}, io, thread};
use winconsole::console::clear;
use wuwa_downloader::{
config::status::Status,
download::progress::DownloadProgress,
io::{console::{print_results, update_title}, file::get_dir, logging::{log_error, setup_logging}},
network::client::{download_file, fetch_index, get_predownload},
};
fn main() {
let log_file = setup_logging();
@ -579,28 +54,40 @@ fn main() {
Status::info(),
resources.len().to_string().cyan()
);
let should_stop = Arc::new(std::sync::atomic::AtomicBool::new(false));
let success = Arc::new(std::sync::atomic::AtomicUsize::new(0));
let total_files = resources.len();
let folder_clone = folder.clone();
let start_time = Instant::now();
let progress = DownloadProgress {
total_bytes: Arc::new(std::sync::atomic::AtomicU64::new(0)),
downloaded_bytes: Arc::new(std::sync::atomic::AtomicU64::new(0)),
start_time: Instant::now(),
};
let success_clone = success.clone();
let should_stop_clone = should_stop.clone();
let progress_clone = progress.clone();
let title_thread = thread::spawn(move || {
loop {
while !should_stop_clone.load(std::sync::atomic::Ordering::SeqCst) {
update_title(
start_time,
progress_clone.start_time,
success_clone.load(std::sync::atomic::Ordering::SeqCst),
total_files,
&progress_clone,
);
thread::sleep(Duration::from_secs(1));
}
});
let success_clone = success.clone();
let should_stop_clone = should_stop.clone();
let log_file_clone = log_file.try_clone().unwrap();
let folder_clone2 = folder.clone();
ctrlc::set_handler(move || {
should_stop_clone.store(true, std::sync::atomic::Ordering::SeqCst);
clear().unwrap();
println!("{} Download interrupted by user", Status::warning());
let success_count = success_clone.load(std::sync::atomic::Ordering::SeqCst);
@ -625,7 +112,7 @@ fn main() {
println!(
"{} Files saved to: {}",
Status::info(),
folder_clone.display().to_string().cyan()
folder_clone2.display().to_string().cyan()
);
println!("\n{} Press Enter to exit...", Status::warning());
@ -644,46 +131,34 @@ fn main() {
.unwrap();
for item in resources.iter() {
if should_stop.load(std::sync::atomic::Ordering::SeqCst) {
break;
}
if let Some(dest) = item.get("dest").and_then(Value::as_str) {
let md5 = item.get("md5").and_then(Value::as_str);
if download_file(&client, &config, dest, &folder, md5, &log_file) {
if download_file(
&client,
&config,
dest,
&folder,
md5,
&log_file,
&should_stop,
&progress,
) {
success.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
}
}
}
drop(title_thread);
should_stop.store(true, std::sync::atomic::Ordering::SeqCst);
title_thread.join().unwrap();
clear().unwrap();
print_results(
success.load(std::sync::atomic::Ordering::SeqCst),
total_files,
&folder,
);
}
fn print_results(success: usize, total: usize, folder: &Path) {
let title = if success == total {
" DOWNLOAD COMPLETE ".on_blue().white().bold()
} else {
" PARTIAL DOWNLOAD ".on_blue().white().bold()
};
println!("\n{}\n", title);
println!(
"{} Successfully downloaded: {}",
Status::success(),
success.to_string().green()
);
println!(
"{} Failed downloads: {}",
Status::error(),
(total - success).to_string().red()
);
println!(
"{} Files saved to: {}",
Status::info(),
folder.display().to_string().cyan()
);
println!("\n{} Press Enter to exit...", Status::warning());
let _ = io::stdin().read_line(&mut String::new());
}

427
src/network/client.rs Normal file
View file

@ -0,0 +1,427 @@
use colored::Colorize;
use flate2::read::GzDecoder;
use reqwest::blocking::Client;
use serde_json::{from_reader, from_str, Value};
use winconsole::console;
use std::{io::{Read, Write}, fs, io, path::Path, time::Duration};
use crate::config::cfg::Config;
use crate::download::progress::DownloadProgress;
use crate::io::file::{calculate_md5, check_existing_file, get_filename};
use crate::io::logging::log_error;
use crate::config::status::Status;
const INDEX_URL: &str = "https://gist.githubusercontent.com/yuhkix/b8796681ac2cd3bab11b7e8cdc022254/raw/30a8e747debe9e333d5f4ec5d8700dab500594a2/wuwa.json";
const MAX_RETRIES: usize = 3;
const DOWNLOAD_TIMEOUT: u64 = 300;
const BUFFER_SIZE: usize = 8192;
pub fn fetch_index(client: &Client, config: &Config, log_file: &fs::File) -> Value {
println!("{} Fetching index file...", Status::info());
let mut response = match client
.get(&config.index_url)
.timeout(Duration::from_secs(30))
.send()
{
Ok(resp) => resp,
Err(e) => {
log_error(log_file, &format!("Error fetching index file: {}", e));
console::clear().unwrap();
println!("{} Error fetching index file: {}", Status::error(), e);
println!("\n{} Press Enter to exit...", Status::warning());
let _ = io::stdin().read_line(&mut String::new());
std::process::exit(1);
}
};
if !response.status().is_success() {
let msg = format!("Error fetching index file: HTTP {}", response.status());
log_error(log_file, &msg);
console::clear().unwrap();
println!("{} {}", Status::error(), msg);
println!("\n{} Press Enter to exit...", Status::warning());
let _ = io::stdin().read_line(&mut String::new());
std::process::exit(1);
}
let content_encoding = response
.headers()
.get("content-encoding")
.and_then(|v| v.to_str().ok())
.unwrap_or("");
let text = if content_encoding.contains("gzip") {
let mut buffer = Vec::new();
if let Err(e) = response.copy_to(&mut buffer) {
log_error(log_file, &format!("Error reading index file bytes: {}", e));
console::clear().unwrap();
println!("{} Error reading index file: {}", Status::error(), e);
println!("\n{} Press Enter to exit...", Status::warning());
let _ = io::stdin().read_line(&mut String::new());
std::process::exit(1);
}
let mut gz = GzDecoder::new(&buffer[..]);
let mut decompressed_text = String::new();
if let Err(e) = gz.read_to_string(&mut decompressed_text) {
log_error(log_file, &format!("Error decompressing index file: {}", e));
console::clear().unwrap();
println!("{} Error decompressing index file: {}", Status::error(), e);
println!("\n{} Press Enter to exit...", Status::warning());
let _ = io::stdin().read_line(&mut String::new());
std::process::exit(1);
}
decompressed_text
} else {
match response.text() {
Ok(t) => t,
Err(e) => {
log_error(
log_file,
&format!("Error reading index file response: {}", e),
);
console::clear().unwrap();
println!("{} Error reading index file: {}", Status::error(), e);
println!("\n{} Press Enter to exit...", Status::warning());
let _ = io::stdin().read_line(&mut String::new());
std::process::exit(1);
}
}
};
println!("{} Index file downloaded successfully", Status::success());
match from_str(&text) {
Ok(v) => v,
Err(e) => {
log_error(log_file, &format!("Error parsing index file JSON: {}", e));
console::clear().unwrap();
println!("{} Error parsing index file: {}", Status::error(), e);
println!("\n{} Press Enter to exit...", Status::warning());
let _ = io::stdin().read_line(&mut String::new());
std::process::exit(1);
}
}
}
pub fn download_file(
client: &Client,
config: &Config,
dest: &str,
folder: &Path,
expected_md5: Option<&str>,
log_file: &fs::File,
should_stop: &std::sync::atomic::AtomicBool,
progress: &DownloadProgress,
) -> bool {
let dest = dest.replace('\\', "/");
let path = folder.join(&dest);
let filename = get_filename(&dest);
let mut file_size = None;
for base_url in &config.zip_bases {
let url = format!("{}{}", base_url, dest);
if let Ok(head_response) = client.head(&url).timeout(Duration::from_secs(10)).send() {
if let Some(size) = head_response.headers()
.get("content-length")
.and_then(|v| v.to_str().ok())
.and_then(|s| s.parse::<u64>().ok())
{
file_size = Some(size);
progress.total_bytes.fetch_add(size, std::sync::atomic::Ordering::SeqCst);
break;
}
}
}
if let (Some(md5), Some(size)) = (expected_md5, file_size) {
if should_skip_download(&path, Some(md5), Some(size)) {
println!("{} File is valid: {}", Status::matched(), filename.blue());
return true;
}
}
if let Some(parent) = path.parent() {
if let Err(e) = fs::create_dir_all(parent) {
log_error(log_file, &format!("Directory error for {}: {}", dest, e));
println!("{} Directory error: {}", Status::error(), e);
return false;
}
}
for (i, base_url) in config.zip_bases.iter().enumerate() {
let url = format!("{}{}", base_url, dest);
let head_response = match client.head(&url).timeout(Duration::from_secs(10)).send() {
Ok(resp) if resp.status().is_success() => resp,
Ok(resp) => {
log_error(log_file, &format!("CDN {} failed for {} (HTTP {})", i+1, dest, resp.status()));
continue;
},
Err(e) => {
log_error(log_file, &format!("CDN {} failed for {}: {}", i+1, dest, e));
continue;
}
};
let expected_size = file_size.or_else(|| head_response.headers()
.get("content-length")
.and_then(|v| v.to_str().ok())
.and_then(|s| s.parse::<u64>().ok()));
if let (Some(md5), Some(size)) = (expected_md5, expected_size) {
if check_existing_file(&path, Some(md5), Some(size)) {
println!("{} File is valid: {}", Status::matched(), filename.blue());
return true;
}
}
println!("{} Downloading: {}", Status::progress(), filename.magenta());
let mut retries = MAX_RETRIES;
let mut last_error = None;
while retries > 0 {
let result = download_single_file(&client, &url, &path, should_stop, progress);
match result {
Ok(_) => break,
Err(e) => {
last_error = Some(e);
retries -= 1;
let _ = fs::remove_file(&path);
if retries > 0 {
println!("{} Retrying {}... ({} left)",
Status::warning(), filename.yellow(), retries);
}
}
}
}
if retries == 0 {
log_error(log_file, &format!("Failed after retries for {}: {}", dest,
last_error.unwrap_or_default()));
println!("{} Failed: {}", Status::error(), filename.red());
return false;
}
if let Some(expected) = expected_md5 {
if should_stop.load(std::sync::atomic::Ordering::SeqCst) {
return false;
}
let actual = calculate_md5(&path);
if actual != expected {
log_error(log_file, &format!("Checksum failed for {}: expected {}, got {}",
dest, expected, actual));
fs::remove_file(&path).unwrap();
println!("{} Checksum failed: {}", Status::error(), filename.red());
return false;
}
}
println!("{} Downloaded: {}", Status::success(), filename.green());
return true;
}
log_error(log_file, &format!("All CDNs failed for {}", dest));
println!("{} All CDNs failed for {}", Status::error(), filename.red());
false
}
fn download_single_file(
client: &Client,
url: &str,
path: &Path,
should_stop: &std::sync::atomic::AtomicBool,
progress: &DownloadProgress,
) -> Result<(), String> {
let mut response = client
.get(url)
.timeout(Duration::from_secs(DOWNLOAD_TIMEOUT))
.send()
.map_err(|e| format!("Network error: {}", e))?;
if !response.status().is_success() {
return Err(format!("HTTP error: {}", response.status()));
}
let mut file = fs::File::create(path)
.map_err(|e| format!("File error: {}", e))?;
let mut buffer = [0; BUFFER_SIZE];
loop {
if should_stop.load(std::sync::atomic::Ordering::SeqCst) {
return Err("Download interrupted".into());
}
let bytes_read = response.read(&mut buffer)
.map_err(|e| format!("Read error: {}", e))?;
if bytes_read == 0 {
break;
}
file.write_all(&buffer[..bytes_read])
.map_err(|e| format!("Write error: {}", e))?;
progress.downloaded_bytes.fetch_add(bytes_read as u64, std::sync::atomic::Ordering::SeqCst);
}
Ok(())
}
pub fn get_predownload(client: &Client) -> Result<Config, String> {
let selected_index_url = fetch_gist(client)?;
println!("{} Fetching download configuration...", Status::info());
let mut response = client
.get(&selected_index_url)
.timeout(Duration::from_secs(30))
.send()
.map_err(|e| format!("Network error: {}", e))?;
if !response.status().is_success() {
return Err(format!("Server error: HTTP {}", response.status()));
}
let content_encoding = response
.headers()
.get("content-encoding")
.and_then(|v| v.to_str().ok())
.unwrap_or("");
let config: Value = if content_encoding.contains("gzip") {
let mut buffer = Vec::new();
response
.copy_to(&mut buffer)
.map_err(|e| format!("Error reading response bytes: {}", e))?;
let mut gz = GzDecoder::new(&buffer[..]);
let mut decompressed = String::new();
gz.read_to_string(&mut decompressed)
.map_err(|e| format!("Error decompressing content: {}", e))?;
from_str(&decompressed).map_err(|e| format!("Invalid JSON: {}", e))?
} else {
from_reader(response).map_err(|e| format!("Invalid JSON: {}", e))?
};
let predownload_config = config
.get("predownload")
.and_then(|p| p.get("config"))
.ok_or("Missing predownload.config in response")?;
let base_url = predownload_config
.get("baseUrl")
.and_then(Value::as_str)
.ok_or("Missing or invalid baseUrl")?;
let index_file = predownload_config
.get("indexFile")
.and_then(Value::as_str)
.ok_or("Missing or invalid indexFile")?;
let default_config = config
.get("default")
.ok_or("Missing default config in response")?;
let cdn_list = default_config
.get("cdnList")
.and_then(Value::as_array)
.ok_or("Missing or invalid cdnList")?;
let mut cdn_urls = Vec::new();
for cdn in cdn_list {
if let Some(url) = cdn.get("url").and_then(Value::as_str) {
cdn_urls.push(url.trim_end_matches('/').to_string());
}
}
if cdn_urls.is_empty() {
return Err("No valid CDN URLs found".to_string());
}
let full_index_url = format!("{}/{}", cdn_urls[0], index_file.trim_start_matches('/'));
let zip_bases = cdn_urls
.iter()
.map(|cdn| format!("{}/{}", cdn, base_url.trim_start_matches('/')))
.collect();
Ok(Config {
index_url: full_index_url,
zip_bases,
})
}
fn should_skip_download(path: &Path, md5: Option<&str>, size: Option<u64>) -> bool {
if let (Some(md5), Some(size)) = (md5, size) {
check_existing_file(path, Some(md5), Some(size))
} else {
false
}
}
pub fn fetch_gist(client: &Client) -> Result<String, String> {
let mut response = client
.get(INDEX_URL)
.timeout(Duration::from_secs(30))
.send()
.map_err(|e| format!("Network error: {}", e))?;
if !response.status().is_success() {
return Err(format!("Server error: HTTP {}", response.status()));
}
let content_encoding = response
.headers()
.get("content-encoding")
.and_then(|v| v.to_str().ok())
.unwrap_or("");
let gist_data: Value = if content_encoding.contains("gzip") {
let mut buffer = Vec::new();
response.copy_to(&mut buffer)
.map_err(|e| format!("Error reading response: {}", e))?;
let mut gz = GzDecoder::new(&buffer[..]);
let mut decompressed = String::new();
gz.read_to_string(&mut decompressed)
.map_err(|e| format!("Error decompressing: {}", e))?;
from_str(&decompressed).map_err(|e| format!("Invalid JSON: {}", e))?
} else {
from_reader(response).map_err(|e| format!("Invalid JSON: {}", e))?
};
println!("{} Available versions:", Status::info());
println!("1. Preload - OS");
println!("2. Live - CN (Needs Update)");
println!("3. Beta - OS (Needs Update)");
println!("4. Beta - CN (Needs Update)");
loop {
print!("{} Select version: ", Status::question());
io::stdout().flush().unwrap();
let mut input = String::new();
io::stdin().read_line(&mut input).unwrap();
match input.trim() {
"1" => return get_version_url(&gist_data, "live", "os-live"),
"2" => return get_version_url(&gist_data, "live", "cn-live"),
"3" => return get_version_url(&gist_data, "beta", "os-beta"),
"4" => return get_version_url(&gist_data, "beta", "cn-beta"),
_ => println!("{} Invalid selection", Status::error()),
}
}
}
fn get_version_url(data: &Value, category: &str, version: &str) -> Result<String, String> {
data[category][version]
.as_str()
.map(|s| s.to_string())
.ok_or_else(|| format!("Missing {} URL", version))
}

1
src/network/mod.rs Normal file
View file

@ -0,0 +1 @@
pub mod client;