arhivach-downloader

Download arhivach.vc threads
git clone https://git.ea.contact/arhivach-downloader
Log | Files | Refs | README

commit 3cef4a814f8bf5d97f2c38923c6fea4e06cd6aaf
parent 8103a2a0a87a36965f4b58ec91bb3619b9c9c7a5
Author: egor-achkasov <eaachkasov@gmail.com>
Date:   Mon, 23 Feb 2026 09:01:19 +0000

Remove tokio and all async

Diffstat:
MCargo.lock | 13-------------
MCargo.toml | 1-
Msrc/export.rs | 35+++++++++++++++--------------------
Msrc/main.rs | 13++++++-------
4 files changed, 21 insertions(+), 41 deletions(-)

diff --git a/Cargo.lock b/Cargo.lock @@ -76,7 +76,6 @@ dependencies = [ "regex", "reqwest", "scraper", - "tokio", ] [[package]] @@ -1553,22 +1552,10 @@ dependencies = [ "mio", "pin-project-lite", "socket2", - "tokio-macros", "windows-sys 0.61.2", ] [[package]] -name = "tokio-macros" -version = "2.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] name = "tokio-native-tls" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" diff --git a/Cargo.toml b/Cargo.toml @@ -10,4 +10,3 @@ clap = { version = "4.5.57", features = ["derive"] } regex = "1.12.3" reqwest = { version = "0.12", features = ["blocking"] } scraper = "0.25.0" -tokio = { version = "1.49.0", features = ["macros", "rt", "rt-multi-thread"] } diff --git a/src/export.rs b/src/export.rs @@ -44,7 +44,7 @@ fn render_text_to_html(text: &str) -> String { /// If download_thumbnails is true, downloads thumbnails to ./{thread_id}/thumb /// /// WARNING: If the directory already exists, it will be overwritten -pub async fn export2html( +pub fn export2html( posts: Vec<Post>, download_files: bool, download_thumbnails: bool, @@ -63,11 +63,11 @@ pub async fn export2html( .join("\n"); if download_files { - download_assets(&posts, &format!("{}/files", dir), "files", |f| &f.url).await?; + download_assets(&posts, &format!("{}/files", dir), "files", |f| &f.url)?; } if download_thumbnails { - download_assets(&posts, &format!("{}/thumb", dir), "thumbnails", |f| &f.url_thumb).await?; + download_assets(&posts, &format!("{}/thumb", dir), "thumbnails", |f| &f.url_thumb)?; } let template = std::fs::read_to_string("template.html")? @@ -171,7 +171,7 @@ fn render_images( } -async fn download_assets( +fn download_assets( posts: &[Post], dest_dir: &str, label: &str, @@ -189,20 +189,15 @@ async fn download_assets( let url = url_of(f); let filename = url.split('/').last().unwrap_or(""); let path = format!("{}/{}", dest_dir, filename); - let mut failed = false; - for attempt in 0..3 { - match download(url, &path).await { - Ok(()) => { failed = false; break; } - Err(e) => { - failed = true; - println!("\r\tFailed to download {} {}: {}\n\t-> Waiting 3 seconds...", label, filename, e); - if attempt < 2 { - tokio::time::sleep(std::time::Duration::from_secs(3)).await; - } - } - } + let mut result = Err(anyhow::anyhow!("no attempts")); + for _ in 0..3 { + result = download(url, &path); + if result.is_ok() { break; } + let e = result.as_ref().unwrap_err(); + println!("\r\tFailed to download {} {}: {}\n\t-> Waiting 3 seconds...", label, filename, e); + std::thread::sleep(std::time::Duration::from_secs(3)); } - if failed { + if result.is_err() { println!("\tSkipping {} {} after 3 failed attempts.", label, filename); } } @@ -213,10 +208,10 @@ async fn download_assets( Ok(()) } -async fn download(url: &str, path: &str) -> Result<()> { - let bytes = reqwest::get(url).await +fn download(url: &str, path: &str) -> Result<()> { + let bytes = reqwest::blocking::get(url) .with_context(|| format!("HTTP GET failed for {}", url))? - .bytes().await + .bytes() .context("failed to read response body")?; std::fs::write(path, &bytes) .with_context(|| format!("failed to write {}", path))?; diff --git a/src/main.rs b/src/main.rs @@ -8,16 +8,16 @@ use post::Post; use anyhow::{Context, Ok, Result}; -async fn scrape_thread(url: &str, config: &Config) -> Result<()> { +fn scrape_thread(url: &str, config: &Config) -> Result<()> { use std::io::Write; let t_total = std::time::Instant::now(); print!("\tGetting thread..."); std::io::stdout().flush().ok(); let t = std::time::Instant::now(); - let html = reqwest::get(url).await + let html = reqwest::blocking::get(url) .with_context(|| format!("HTTP GET failed for {url}"))? - .text().await + .text() .context("failed to read response body")?; println!(" Done ({} ms)", t.elapsed().as_millis()); @@ -28,7 +28,7 @@ async fn scrape_thread(url: &str, config: &Config) -> Result<()> { .context("failed to parse thread HTML")?; println!(" Done ({} ms)", t.elapsed().as_millis()); - export::export2html(posts, config.files, config.thumb).await + export::export2html(posts, config.files, config.thumb) .context("failed to export thread")?; println!("Done processing {} ({} ms)", url, t_total.elapsed().as_millis()); @@ -36,8 +36,7 @@ async fn scrape_thread(url: &str, config: &Config) -> Result<()> { } -#[tokio::main] -async fn main() -> Result<()> { +fn main() -> Result<()> { let config = parse_args() .unwrap_or_else(|e| { eprintln!("Error: {}", e); @@ -46,7 +45,7 @@ async fn main() -> Result<()> { for url in &config.urls { println!("Processing {}:", url); - scrape_thread(url, &config).await + scrape_thread(url, &config) .unwrap_or_else(|e| eprintln!("Error processing {}: {:#}", url, e)); }