mirror of
https://github.com/yuhkix/wuwa-downloader.git
synced 2025-06-04 08:53:41 +00:00
kuro security bypass (insane)
This commit is contained in:
parent
4d70933c4f
commit
09f3712390
5 changed files with 163 additions and 79 deletions
4
build.rs
4
build.rs
|
@ -2,7 +2,7 @@ fn main() {
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
{
|
{
|
||||||
let mut res = winres::WindowsResource::new();
|
let mut res = winres::WindowsResource::new();
|
||||||
res.set_icon("zani.ico");
|
res.set_icon("cartethyia.ico");
|
||||||
res.compile().unwrap();
|
res.compile().unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
BIN
cartethyia.ico
Normal file
BIN
cartethyia.ico
Normal file
Binary file not shown.
After Width: | Height: | Size: 64 KiB |
|
@ -1,15 +1,31 @@
|
||||||
use std::{fs::File, io, sync::Arc, thread, time::{Duration, Instant}};
|
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
use reqwest::blocking::Client;
|
use reqwest::blocking::Client;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
use std::{
|
||||||
|
fs::File,
|
||||||
|
io,
|
||||||
|
sync::Arc,
|
||||||
|
thread,
|
||||||
|
time::{Duration, Instant},
|
||||||
|
};
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
use winconsole::console::{clear, set_title};
|
use winconsole::console::{clear, set_title};
|
||||||
|
|
||||||
use crate::{config::{cfg::Config, status::Status}, download::progress::DownloadProgress, io::logging::log_error, network::client::download_file};
|
use crate::{
|
||||||
|
config::{cfg::Config, status::Status},
|
||||||
|
download::progress::DownloadProgress,
|
||||||
|
io::logging::log_error,
|
||||||
|
network::client::download_file,
|
||||||
|
};
|
||||||
|
|
||||||
pub fn format_duration(duration: Duration) -> String {
|
pub fn format_duration(duration: Duration) -> String {
|
||||||
let secs = duration.as_secs();
|
let secs = duration.as_secs();
|
||||||
format!("{:02}:{:02}:{:02}", secs / 3600, (secs % 3600) / 60, secs % 60)
|
format!(
|
||||||
|
"{:02}:{:02}:{:02}",
|
||||||
|
secs / 3600,
|
||||||
|
(secs % 3600) / 60,
|
||||||
|
secs % 60
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn bytes_to_human(bytes: u64) -> String {
|
pub fn bytes_to_human(bytes: u64) -> String {
|
||||||
|
@ -26,12 +42,12 @@ pub fn calculate_total_size(resources: &[Value], client: &Client, config: &Confi
|
||||||
let mut failed_urls = 0;
|
let mut failed_urls = 0;
|
||||||
|
|
||||||
println!("{} Processing files...", Status::info());
|
println!("{} Processing files...", Status::info());
|
||||||
|
|
||||||
for (i, item) in resources.iter().enumerate() {
|
for (i, item) in resources.iter().enumerate() {
|
||||||
if let Some(dest) = item.get("dest").and_then(Value::as_str) {
|
if let Some(dest) = item.get("dest").and_then(Value::as_str) {
|
||||||
let mut file_size = 0;
|
let mut file_size = 0;
|
||||||
let mut found_valid_url = false;
|
let mut found_valid_url = false;
|
||||||
|
|
||||||
for base_url in &config.zip_bases {
|
for base_url in &config.zip_bases {
|
||||||
let url = format!("{}/{}", base_url, dest);
|
let url = format!("{}/{}", base_url, dest);
|
||||||
match client.head(&url).send() {
|
match client.head(&url).send() {
|
||||||
|
@ -56,7 +72,11 @@ pub fn calculate_total_size(resources: &[Value], client: &Client, config: &Confi
|
||||||
total_size += file_size;
|
total_size += file_size;
|
||||||
} else {
|
} else {
|
||||||
failed_urls += 1;
|
failed_urls += 1;
|
||||||
println!("{} Could not determine size for file: {}", Status::error(), dest);
|
println!(
|
||||||
|
"{} Could not determine size for file: {}",
|
||||||
|
Status::error(),
|
||||||
|
dest
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -99,14 +119,16 @@ pub fn exit_with_error(log_file: &File, error: &str) -> ! {
|
||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
clear().unwrap();
|
clear().unwrap();
|
||||||
|
|
||||||
println!("{} {}", Status::error(), error);
|
println!("{} {}", Status::error(), error);
|
||||||
println!("\n{} Press Enter to exit...", Status::warning());
|
println!("\n{} Press Enter to exit...", Status::warning());
|
||||||
let _ = io::stdin().read_line(&mut String::new());
|
let _ = io::stdin().read_line(&mut String::new());
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn track_progress(total_size: u64) -> (
|
pub fn track_progress(
|
||||||
|
total_size: u64,
|
||||||
|
) -> (
|
||||||
Arc<std::sync::atomic::AtomicBool>,
|
Arc<std::sync::atomic::AtomicBool>,
|
||||||
Arc<std::sync::atomic::AtomicUsize>,
|
Arc<std::sync::atomic::AtomicUsize>,
|
||||||
DownloadProgress,
|
DownloadProgress,
|
||||||
|
@ -133,11 +155,19 @@ pub fn start_title_thread(
|
||||||
while !should_stop.load(std::sync::atomic::Ordering::SeqCst) {
|
while !should_stop.load(std::sync::atomic::Ordering::SeqCst) {
|
||||||
let elapsed = progress.start_time.elapsed();
|
let elapsed = progress.start_time.elapsed();
|
||||||
let elapsed_secs = elapsed.as_secs();
|
let elapsed_secs = elapsed.as_secs();
|
||||||
let downloaded_bytes = progress.downloaded_bytes.load(std::sync::atomic::Ordering::SeqCst);
|
let downloaded_bytes = progress
|
||||||
let total_bytes = progress.total_bytes.load(std::sync::atomic::Ordering::SeqCst);
|
.downloaded_bytes
|
||||||
|
.load(std::sync::atomic::Ordering::SeqCst);
|
||||||
|
let total_bytes = progress
|
||||||
|
.total_bytes
|
||||||
|
.load(std::sync::atomic::Ordering::SeqCst);
|
||||||
let current_success = success.load(std::sync::atomic::Ordering::SeqCst);
|
let current_success = success.load(std::sync::atomic::Ordering::SeqCst);
|
||||||
|
|
||||||
let speed = if elapsed_secs > 0 { downloaded_bytes / elapsed_secs } else { 0 };
|
let speed = if elapsed_secs > 0 {
|
||||||
|
downloaded_bytes / elapsed_secs
|
||||||
|
} else {
|
||||||
|
0
|
||||||
|
};
|
||||||
let (speed_value, speed_unit) = if speed > 1_000_000 {
|
let (speed_value, speed_unit) = if speed > 1_000_000 {
|
||||||
(speed / 1_000_000, "MB/s")
|
(speed / 1_000_000, "MB/s")
|
||||||
} else {
|
} else {
|
||||||
|
@ -146,7 +176,11 @@ pub fn start_title_thread(
|
||||||
|
|
||||||
let remaining_files = total_files - current_success;
|
let remaining_files = total_files - current_success;
|
||||||
let remaining_bytes = total_bytes.saturating_sub(downloaded_bytes);
|
let remaining_bytes = total_bytes.saturating_sub(downloaded_bytes);
|
||||||
let eta_secs = if speed > 0 && remaining_files > 0 { remaining_bytes / speed } else { 0 };
|
let eta_secs = if speed > 0 && remaining_files > 0 {
|
||||||
|
remaining_bytes / speed
|
||||||
|
} else {
|
||||||
|
0
|
||||||
|
};
|
||||||
let eta_str = format_duration(Duration::from_secs(eta_secs));
|
let eta_str = format_duration(Duration::from_secs(eta_secs));
|
||||||
|
|
||||||
let progress_percent = if total_bytes > 0 {
|
let progress_percent = if total_bytes > 0 {
|
||||||
|
@ -156,7 +190,7 @@ pub fn start_title_thread(
|
||||||
};
|
};
|
||||||
|
|
||||||
let title = format!(
|
let title = format!(
|
||||||
"Wuthering Waves Downloader - {}/{} files - Current File: {}{} - Speed: {}{} - Total ETA: {}",
|
"Wuthering Waves Downloader - {}/{} files - Total Downloaded: {}{} - Speed: {}{} - Total ETA: {}",
|
||||||
current_success,
|
current_success,
|
||||||
total_files,
|
total_files,
|
||||||
bytes_to_human(downloaded_bytes),
|
bytes_to_human(downloaded_bytes),
|
||||||
|
@ -165,10 +199,10 @@ pub fn start_title_thread(
|
||||||
speed_unit,
|
speed_unit,
|
||||||
eta_str
|
eta_str
|
||||||
);
|
);
|
||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
set_title(&title).unwrap();
|
set_title(&title).unwrap();
|
||||||
|
|
||||||
thread::sleep(Duration::from_secs(1));
|
thread::sleep(Duration::from_secs(1));
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -180,7 +214,7 @@ pub fn setup_ctrlc(should_stop: Arc<std::sync::atomic::AtomicBool>) {
|
||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
clear().unwrap();
|
clear().unwrap();
|
||||||
|
|
||||||
println!("\n{} Download interrupted by user", Status::warning());
|
println!("\n{} Download interrupted by user", Status::warning());
|
||||||
})
|
})
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
@ -217,4 +251,4 @@ pub fn download_resources(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,16 +1,21 @@
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
use flate2::read::GzDecoder;
|
use flate2::read::GzDecoder;
|
||||||
use reqwest::blocking::Client;
|
use reqwest::blocking::Client;
|
||||||
use serde_json::{from_reader, from_str, Value};
|
use serde_json::{Value, from_reader, from_str};
|
||||||
use std::{io::{Read, Write}, fs, io, path::Path, time::Duration};
|
use std::{
|
||||||
|
fs, io,
|
||||||
|
io::{Read, Write},
|
||||||
|
path::Path,
|
||||||
|
time::Duration,
|
||||||
|
};
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
use winconsole::console::clear;
|
use winconsole::console::clear;
|
||||||
|
|
||||||
use crate::config::cfg::Config;
|
use crate::config::cfg::Config;
|
||||||
|
use crate::config::status::Status;
|
||||||
use crate::download::progress::DownloadProgress;
|
use crate::download::progress::DownloadProgress;
|
||||||
use crate::io::file::{calculate_md5, check_existing_file, get_filename};
|
use crate::io::file::{calculate_md5, check_existing_file, get_filename};
|
||||||
use crate::io::{logging::log_error, util::get_version};
|
use crate::io::{logging::log_error, util::get_version};
|
||||||
use crate::config::status::Status;
|
|
||||||
|
|
||||||
const INDEX_URL: &str = "https://gist.githubusercontent.com/yuhkix/b8796681ac2cd3bab11b7e8cdc022254/raw/4435fd290c07f7f766a6d2ab09ed3096d83b02e3/wuwa.json";
|
const INDEX_URL: &str = "https://gist.githubusercontent.com/yuhkix/b8796681ac2cd3bab11b7e8cdc022254/raw/4435fd290c07f7f766a6d2ab09ed3096d83b02e3/wuwa.json";
|
||||||
const MAX_RETRIES: usize = 3;
|
const MAX_RETRIES: usize = 3;
|
||||||
|
@ -45,7 +50,7 @@ pub fn fetch_index(client: &Client, config: &Config, log_file: &fs::File) -> Val
|
||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
clear().unwrap();
|
clear().unwrap();
|
||||||
|
|
||||||
println!("{} {}", Status::error(), msg);
|
println!("{} {}", Status::error(), msg);
|
||||||
println!("\n{} Press Enter to exit...", Status::warning());
|
println!("\n{} Press Enter to exit...", Status::warning());
|
||||||
let _ = io::stdin().read_line(&mut String::new());
|
let _ = io::stdin().read_line(&mut String::new());
|
||||||
|
@ -65,7 +70,7 @@ pub fn fetch_index(client: &Client, config: &Config, log_file: &fs::File) -> Val
|
||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
clear().unwrap();
|
clear().unwrap();
|
||||||
|
|
||||||
println!("{} Error reading index file: {}", Status::error(), e);
|
println!("{} Error reading index file: {}", Status::error(), e);
|
||||||
println!("\n{} Press Enter to exit...", Status::warning());
|
println!("\n{} Press Enter to exit...", Status::warning());
|
||||||
let _ = io::stdin().read_line(&mut String::new());
|
let _ = io::stdin().read_line(&mut String::new());
|
||||||
|
@ -76,10 +81,10 @@ pub fn fetch_index(client: &Client, config: &Config, log_file: &fs::File) -> Val
|
||||||
let mut decompressed_text = String::new();
|
let mut decompressed_text = String::new();
|
||||||
if let Err(e) = gz.read_to_string(&mut decompressed_text) {
|
if let Err(e) = gz.read_to_string(&mut decompressed_text) {
|
||||||
log_error(log_file, &format!("Error decompressing index file: {}", e));
|
log_error(log_file, &format!("Error decompressing index file: {}", e));
|
||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
clear().unwrap();
|
clear().unwrap();
|
||||||
|
|
||||||
println!("{} Error decompressing index file: {}", Status::error(), e);
|
println!("{} Error decompressing index file: {}", Status::error(), e);
|
||||||
println!("\n{} Press Enter to exit...", Status::warning());
|
println!("\n{} Press Enter to exit...", Status::warning());
|
||||||
let _ = io::stdin().read_line(&mut String::new());
|
let _ = io::stdin().read_line(&mut String::new());
|
||||||
|
@ -97,7 +102,7 @@ pub fn fetch_index(client: &Client, config: &Config, log_file: &fs::File) -> Val
|
||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
clear().unwrap();
|
clear().unwrap();
|
||||||
|
|
||||||
println!("{} Error reading index file: {}", Status::error(), e);
|
println!("{} Error reading index file: {}", Status::error(), e);
|
||||||
println!("\n{} Press Enter to exit...", Status::warning());
|
println!("\n{} Press Enter to exit...", Status::warning());
|
||||||
let _ = io::stdin().read_line(&mut String::new());
|
let _ = io::stdin().read_line(&mut String::new());
|
||||||
|
@ -112,10 +117,10 @@ pub fn fetch_index(client: &Client, config: &Config, log_file: &fs::File) -> Val
|
||||||
Ok(v) => v,
|
Ok(v) => v,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
log_error(log_file, &format!("Error parsing index file JSON: {}", e));
|
log_error(log_file, &format!("Error parsing index file JSON: {}", e));
|
||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
clear().unwrap();
|
clear().unwrap();
|
||||||
|
|
||||||
println!("{} Error parsing index file: {}", Status::error(), e);
|
println!("{} Error parsing index file: {}", Status::error(), e);
|
||||||
println!("\n{} Press Enter to exit...", Status::warning());
|
println!("\n{} Press Enter to exit...", Status::warning());
|
||||||
let _ = io::stdin().read_line(&mut String::new());
|
let _ = io::stdin().read_line(&mut String::new());
|
||||||
|
@ -152,13 +157,16 @@ pub fn download_file(
|
||||||
let url = format!("{}{}", base_url, dest);
|
let url = format!("{}{}", base_url, dest);
|
||||||
|
|
||||||
if let Ok(head_response) = client.head(&url).timeout(Duration::from_secs(10)).send() {
|
if let Ok(head_response) = client.head(&url).timeout(Duration::from_secs(10)).send() {
|
||||||
if let Some(size) = head_response.headers()
|
if let Some(size) = head_response
|
||||||
|
.headers()
|
||||||
.get("content-length")
|
.get("content-length")
|
||||||
.and_then(|v| v.to_str().ok())
|
.and_then(|v| v.to_str().ok())
|
||||||
.and_then(|s| s.parse::<u64>().ok())
|
.and_then(|s| s.parse::<u64>().ok())
|
||||||
{
|
{
|
||||||
file_size = Some(size);
|
file_size = Some(size);
|
||||||
progress.total_bytes.fetch_add(size, std::sync::atomic::Ordering::SeqCst);
|
progress
|
||||||
|
.total_bytes
|
||||||
|
.fetch_add(size, std::sync::atomic::Ordering::SeqCst);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -166,7 +174,11 @@ pub fn download_file(
|
||||||
|
|
||||||
if let (Some(md5), Some(size)) = (expected_md5, file_size) {
|
if let (Some(md5), Some(size)) = (expected_md5, file_size) {
|
||||||
if should_skip_download(&path, Some(md5), Some(size)) {
|
if should_skip_download(&path, Some(md5), Some(size)) {
|
||||||
println!("{} File is valid: {}", Status::matched(), filename.bright_purple());
|
println!(
|
||||||
|
"{} File is valid: {}",
|
||||||
|
Status::matched(),
|
||||||
|
filename.bright_purple()
|
||||||
|
);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -185,23 +197,36 @@ pub fn download_file(
|
||||||
let head_response = match client.head(&url).timeout(Duration::from_secs(10)).send() {
|
let head_response = match client.head(&url).timeout(Duration::from_secs(10)).send() {
|
||||||
Ok(resp) if resp.status().is_success() => resp,
|
Ok(resp) if resp.status().is_success() => resp,
|
||||||
Ok(resp) => {
|
Ok(resp) => {
|
||||||
log_error(log_file, &format!("CDN {} failed for {} (HTTP {})", i+1, dest, resp.status()));
|
log_error(
|
||||||
|
log_file,
|
||||||
|
&format!("CDN {} failed for {} (HTTP {})", i + 1, dest, resp.status()),
|
||||||
|
);
|
||||||
continue;
|
continue;
|
||||||
},
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
log_error(log_file, &format!("CDN {} failed for {}: {}", i+1, dest, e));
|
log_error(
|
||||||
|
log_file,
|
||||||
|
&format!("CDN {} failed for {}: {}", i + 1, dest, e),
|
||||||
|
);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let expected_size = file_size.or_else(|| head_response.headers()
|
let expected_size = file_size.or_else(|| {
|
||||||
.get("content-length")
|
head_response
|
||||||
.and_then(|v| v.to_str().ok())
|
.headers()
|
||||||
.and_then(|s| s.parse::<u64>().ok()));
|
.get("content-length")
|
||||||
|
.and_then(|v| v.to_str().ok())
|
||||||
|
.and_then(|s| s.parse::<u64>().ok())
|
||||||
|
});
|
||||||
|
|
||||||
if let (Some(md5), Some(size)) = (expected_md5, expected_size) {
|
if let (Some(md5), Some(size)) = (expected_md5, expected_size) {
|
||||||
if check_existing_file(&path, Some(md5), Some(size)) {
|
if check_existing_file(&path, Some(md5), Some(size)) {
|
||||||
println!("{} File is valid: {}", Status::matched(), filename.bright_purple());
|
println!(
|
||||||
|
"{} File is valid: {}",
|
||||||
|
Status::matched(),
|
||||||
|
filename.bright_purple()
|
||||||
|
);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -213,7 +238,7 @@ pub fn download_file(
|
||||||
|
|
||||||
while retries > 0 {
|
while retries > 0 {
|
||||||
let result = download_single_file(&client, &url, &path, should_stop, progress);
|
let result = download_single_file(&client, &url, &path, should_stop, progress);
|
||||||
|
|
||||||
match result {
|
match result {
|
||||||
Ok(_) => break,
|
Ok(_) => break,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
|
@ -224,10 +249,14 @@ pub fn download_file(
|
||||||
last_error = Some(e);
|
last_error = Some(e);
|
||||||
retries -= 1;
|
retries -= 1;
|
||||||
let _ = fs::remove_file(&path);
|
let _ = fs::remove_file(&path);
|
||||||
|
|
||||||
if retries > 0 {
|
if retries > 0 {
|
||||||
println!("{} Retrying {}... ({} left)",
|
println!(
|
||||||
Status::warning(), filename.yellow(), retries);
|
"{} Retrying {}... ({} left)",
|
||||||
|
Status::warning(),
|
||||||
|
filename.yellow(),
|
||||||
|
retries
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -238,8 +267,14 @@ pub fn download_file(
|
||||||
}
|
}
|
||||||
|
|
||||||
if retries == 0 {
|
if retries == 0 {
|
||||||
log_error(log_file, &format!("Failed after retries for {}: {}", dest,
|
log_error(
|
||||||
last_error.unwrap_or_default()));
|
log_file,
|
||||||
|
&format!(
|
||||||
|
"Failed after retries for {}: {}",
|
||||||
|
dest,
|
||||||
|
last_error.unwrap_or_default()
|
||||||
|
),
|
||||||
|
);
|
||||||
println!("{} Failed: {}", Status::error(), filename.red());
|
println!("{} Failed: {}", Status::error(), filename.red());
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -248,11 +283,16 @@ pub fn download_file(
|
||||||
if should_stop.load(std::sync::atomic::Ordering::SeqCst) {
|
if should_stop.load(std::sync::atomic::Ordering::SeqCst) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
let actual = calculate_md5(&path);
|
let actual = calculate_md5(&path);
|
||||||
if actual != expected {
|
if actual != expected {
|
||||||
log_error(log_file, &format!("Checksum failed for {}: expected {}, got {}",
|
log_error(
|
||||||
dest, expected, actual));
|
log_file,
|
||||||
|
&format!(
|
||||||
|
"Checksum failed for {}: expected {}, got {}",
|
||||||
|
dest, expected, actual
|
||||||
|
),
|
||||||
|
);
|
||||||
fs::remove_file(&path).unwrap();
|
fs::remove_file(&path).unwrap();
|
||||||
println!("{} Checksum failed: {}", Status::error(), filename.red());
|
println!("{} Checksum failed: {}", Status::error(), filename.red());
|
||||||
return false;
|
return false;
|
||||||
|
@ -285,26 +325,28 @@ fn download_single_file(
|
||||||
return Err(format!("HTTP error: {}", response.status()));
|
return Err(format!("HTTP error: {}", response.status()));
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut file = fs::File::create(path)
|
let mut file = fs::File::create(path).map_err(|e| format!("File error: {}", e))?;
|
||||||
.map_err(|e| format!("File error: {}", e))?;
|
|
||||||
|
|
||||||
let mut buffer = [0; BUFFER_SIZE];
|
let mut buffer = [0; BUFFER_SIZE];
|
||||||
loop {
|
loop {
|
||||||
if should_stop.load(std::sync::atomic::Ordering::SeqCst) {
|
if should_stop.load(std::sync::atomic::Ordering::SeqCst) {
|
||||||
return Err("Download interrupted".into());
|
return Err("Download interrupted".into());
|
||||||
}
|
}
|
||||||
|
|
||||||
let bytes_read = response.read(&mut buffer)
|
let bytes_read = response
|
||||||
|
.read(&mut buffer)
|
||||||
.map_err(|e| format!("Read error: {}", e))?;
|
.map_err(|e| format!("Read error: {}", e))?;
|
||||||
|
|
||||||
if bytes_read == 0 {
|
if bytes_read == 0 {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
file.write_all(&buffer[..bytes_read])
|
file.write_all(&buffer[..bytes_read])
|
||||||
.map_err(|e| format!("Write error: {}", e))?;
|
.map_err(|e| format!("Write error: {}", e))?;
|
||||||
|
|
||||||
progress.downloaded_bytes.fetch_add(bytes_read as u64, std::sync::atomic::Ordering::SeqCst);
|
progress
|
||||||
|
.downloaded_bytes
|
||||||
|
.fetch_add(bytes_read as u64, std::sync::atomic::Ordering::SeqCst);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -312,7 +354,7 @@ fn download_single_file(
|
||||||
|
|
||||||
pub fn get_config(client: &Client) -> Result<Config, String> {
|
pub fn get_config(client: &Client) -> Result<Config, String> {
|
||||||
let selected_index_url = fetch_gist(client)?;
|
let selected_index_url = fetch_gist(client)?;
|
||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
clear().unwrap();
|
clear().unwrap();
|
||||||
|
|
||||||
|
@ -362,24 +404,31 @@ pub fn get_config(client: &Client) -> Result<Config, String> {
|
||||||
println!("{} Using predownload.config", Status::info());
|
println!("{} Using predownload.config", Status::info());
|
||||||
"predownload"
|
"predownload"
|
||||||
}
|
}
|
||||||
(true, true) => {
|
(true, true) => loop {
|
||||||
loop {
|
print!(
|
||||||
print!("{} Choose config to use (1=default, 2=predownload): ", Status::question());
|
"{} Choose config to use (1=default, 2=predownload): ",
|
||||||
io::stdout().flush().map_err(|e| format!("Failed to flush stdout: {}", e))?;
|
Status::question()
|
||||||
|
);
|
||||||
let mut input = String::new();
|
io::stdout()
|
||||||
io::stdin()
|
.flush()
|
||||||
.read_line(&mut input)
|
.map_err(|e| format!("Failed to flush stdout: {}", e))?;
|
||||||
.map_err(|e| format!("Failed to read input: {}", e))?;
|
|
||||||
|
let mut input = String::new();
|
||||||
match input.trim() {
|
io::stdin()
|
||||||
"1" => break "default",
|
.read_line(&mut input)
|
||||||
"2" => break "predownload",
|
.map_err(|e| format!("Failed to read input: {}", e))?;
|
||||||
_ => println!("{} Invalid choice, please enter 1 or 2", Status::error()),
|
|
||||||
}
|
match input.trim() {
|
||||||
|
"1" => break "default",
|
||||||
|
"2" => break "predownload",
|
||||||
|
_ => println!("{} Invalid choice, please enter 1 or 2", Status::error()),
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
(false, false) => {
|
||||||
|
return Err(
|
||||||
|
"Neither default.config nor predownload.config found in response".to_string(),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
(false, false) => return Err("Neither default.config nor predownload.config found in response".to_string()),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let config_data = config
|
let config_data = config
|
||||||
|
@ -416,10 +465,10 @@ pub fn get_config(client: &Client) -> Result<Config, String> {
|
||||||
return Err("No valid CDN URLs found".to_string());
|
return Err("No valid CDN URLs found".to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
let full_index_url = format!("{}/{}", cdn_urls[0], index_file.trim_start_matches('/'));
|
let full_index_url = format!("{}//{}", cdn_urls[0], index_file.trim_start_matches('/'));
|
||||||
let zip_bases = cdn_urls
|
let zip_bases = cdn_urls
|
||||||
.iter()
|
.iter()
|
||||||
.map(|cdn| format!("{}/{}", cdn, base_url.trim_start_matches('/')))
|
.map(|cdn| format!("{}//{}", cdn, base_url.trim_start_matches('/')))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
Ok(Config {
|
Ok(Config {
|
||||||
|
@ -455,14 +504,15 @@ pub fn fetch_gist(client: &Client) -> Result<String, String> {
|
||||||
|
|
||||||
let gist_data: Value = if content_encoding.contains("gzip") {
|
let gist_data: Value = if content_encoding.contains("gzip") {
|
||||||
let mut buffer = Vec::new();
|
let mut buffer = Vec::new();
|
||||||
response.copy_to(&mut buffer)
|
response
|
||||||
|
.copy_to(&mut buffer)
|
||||||
.map_err(|e| format!("Error reading response: {}", e))?;
|
.map_err(|e| format!("Error reading response: {}", e))?;
|
||||||
|
|
||||||
let mut gz = GzDecoder::new(&buffer[..]);
|
let mut gz = GzDecoder::new(&buffer[..]);
|
||||||
let mut decompressed = String::new();
|
let mut decompressed = String::new();
|
||||||
gz.read_to_string(&mut decompressed)
|
gz.read_to_string(&mut decompressed)
|
||||||
.map_err(|e| format!("Error decompressing: {}", e))?;
|
.map_err(|e| format!("Error decompressing: {}", e))?;
|
||||||
|
|
||||||
from_str(&decompressed).map_err(|e| format!("Invalid JSON: {}", e))?
|
from_str(&decompressed).map_err(|e| format!("Invalid JSON: {}", e))?
|
||||||
} else {
|
} else {
|
||||||
from_reader(response).map_err(|e| format!("Invalid JSON: {}", e))?
|
from_reader(response).map_err(|e| format!("Invalid JSON: {}", e))?
|
||||||
|
|
BIN
zani.ico
BIN
zani.ico
Binary file not shown.
Before Width: | Height: | Size: 42 KiB |
Loading…
Reference in a new issue