use std::{ collections::HashMap, io::{Write, stdout}, path::{Path, PathBuf}, sync::{ Arc, atomic::{AtomicUsize, Ordering}, }, }; use McError::Config; use Ordering::SeqCst; use futures::stream::{FuturesUnordered, StreamExt}; use reqwest::Response; use serde::Deserialize; use tokio::{ fs::{File, create_dir_all, read_to_string, write}, io::AsyncWriteExt, spawn, sync::Semaphore, task::{JoinError, JoinHandle}, }; use crate::{ config::RuntimeConfig, errors::McError, minecraft::manifests::{AssetIndex, Library, LibraryArtifact, Version}, platform::paths::{assets_directory, client_jar, library_file}, }; const MAX_CONCURRENT_DOWNLOADS: usize = 100; const PROGRESS_INITIAL_VALUE: usize = 0; const ASSET_URL_BASE: &str = "https://resources.download.minecraft.net/"; const MAX_PROGRESS_PERCENT: f64 = 100.0; const PROGRESS_PRINT_WIDTH: usize = 3; const ATOMIC_ORDERING: Ordering = SeqCst; const DOWNLOAD_COMPLETE_MESSAGE: &str = "All downloads completed successfully!"; const ASSET_INDEX_MISSING_ERROR: &str = "Missing asset_index for the version"; impl From for McError { fn from(error: JoinError) -> Self { Config(format!("Task panicked: {}", error)) } } /// Represents a single file to download, including the URL and destination /// path. #[derive(Debug, Clone)] struct DownloadJob { url: String, destination_path: PathBuf, } impl DownloadJob { fn already_exists(&self) -> bool { self.destination_path.exists() } } /// Represents a single asset entry in the Minecraft asset index. #[derive(Debug, Deserialize)] struct AssetObject { hash: String, } /// The Minecraft asset index manifest. #[derive(Debug, Deserialize)] struct AssetIndexManifest { objects: HashMap, } /// Download all files required to run a specific Minecraft version. pub async fn download_all_files( http_client: &reqwest::Client, runtime_config: &RuntimeConfig, version_info: &Version, ) -> Result<(), McError> { let assets_directory_path: PathBuf = ensure_assets_directories_exist(runtime_config).await?; let asset_manifest: AssetIndexManifest = load_asset_index_manifest( http_client, &assets_directory_path, version_info, ) .await?; let download_jobs: Vec = build_download_jobs( runtime_config, version_info, &assets_directory_path, &asset_manifest, ); execute_download_jobs(http_client, download_jobs).await?; println!("\n{}", DOWNLOAD_COMPLETE_MESSAGE); Ok(()) } /// Ensure the essential assets directories exist. async fn ensure_assets_directories_exist( config: &RuntimeConfig, ) -> Result { let assets_dir = assets_directory(config); create_dir_all(assets_dir.join("objects")).await?; create_dir_all(assets_dir.join("indexes")).await?; Ok(assets_dir) } /// Load the asset index manifest for the given Minecraft version. async fn load_asset_index_manifest( http_client: &reqwest::Client, assets_dir: &Path, version_info: &Version, ) -> Result { let asset_index: &AssetIndex = version_info .asset_index .as_ref() .ok_or_else(|| Config(ASSET_INDEX_MISSING_ERROR.into()))?; let index_file_path: PathBuf = assets_dir .join("indexes") .join(format!("{}.json", asset_index.id)); if !index_file_path.exists() { download_text_file(http_client, &asset_index.url, &index_file_path) .await?; } let json_string: String = read_to_string(index_file_path).await?; Ok(serde_json::from_str(&json_string)?) } fn build_download_jobs( config: &RuntimeConfig, version_info: &Version, assets_dir: &Path, asset_manifest: &AssetIndexManifest, ) -> Vec { let mut jobs: Vec = Vec::new(); add_client_download_job(&mut jobs, config, version_info); add_library_download_jobs(&mut jobs, config, version_info); add_asset_download_jobs(&mut jobs, assets_dir, asset_manifest); jobs } fn add_client_download_job( jobs: &mut Vec, config: &RuntimeConfig, version_info: &Version, ) { jobs.push(DownloadJob { url: version_info.downloads.client.url.clone(), destination_path: client_jar(config, &version_info.id), }); } fn add_library_download_jobs( jobs: &mut Vec, config: &RuntimeConfig, version_info: &Version, ) { for library in &version_info.libraries { add_library_artifact_job(jobs, config, library); add_library_classifier_jobs(jobs, config, library); } } fn add_library_artifact_job( jobs: &mut Vec, config: &RuntimeConfig, library: &Library, ) { let artifact: &LibraryArtifact = match &library.downloads.artifact { | Some(a) => a, | None => return, }; jobs.push(DownloadJob { url: artifact.url.clone(), destination_path: library_file(config, &artifact.path), }); } fn add_library_classifier_jobs( jobs: &mut Vec, config: &RuntimeConfig, library: &Library, ) { let classifiers: &HashMap = match &library.downloads.classifiers { | Some(values) => values, | None => return, }; for classifier_entry in classifiers.values() { jobs.push(DownloadJob { url: classifier_entry.url.clone(), destination_path: library_file(config, &classifier_entry.path), }); } } fn add_asset_download_jobs( jobs: &mut Vec, assets_dir: &Path, asset_manifest: &AssetIndexManifest, ) { for asset_object in asset_manifest.objects.values() { let prefix: &str = &asset_object.hash[0..2]; jobs.push(DownloadJob { url: format!("{}{}/{}", ASSET_URL_BASE, prefix, asset_object.hash), destination_path: assets_dir .join("objects") .join(prefix) .join(&asset_object.hash), }); } } async fn execute_download_jobs( http_client: &reqwest::Client, download_jobs: Vec, ) -> Result<(), McError> { let total_jobs_count: usize = download_jobs.len(); let completed_jobs_count: Arc = Arc::new(AtomicUsize::new(PROGRESS_INITIAL_VALUE)); let concurrent_download_semaphore: Arc = Arc::new(Semaphore::new(MAX_CONCURRENT_DOWNLOADS)); let mut tasks: FuturesUnordered>> = spawn_missing_download_jobs( http_client, download_jobs, &completed_jobs_count, total_jobs_count, concurrent_download_semaphore, ); await_download_tasks(&mut tasks).await } fn spawn_missing_download_jobs( http_client: &reqwest::Client, download_jobs: Vec, completed_jobs_counter: &Arc, total_jobs_count: usize, concurrent_download_semaphore: Arc, ) -> FuturesUnordered>> { let download_tasks: FuturesUnordered>> = FuturesUnordered::new(); for job in download_jobs { if job.already_exists() { completed_jobs_counter.fetch_add(1, SeqCst); print_download_progress(completed_jobs_counter, total_jobs_count); continue; } download_tasks.push(spawn_download_job( http_client.clone(), job, concurrent_download_semaphore.clone(), completed_jobs_counter.clone(), total_jobs_count, )); } download_tasks } async fn await_download_tasks( tasks: &mut FuturesUnordered>>, ) -> Result<(), McError> { while let Some(task_result) = tasks.next().await { task_result.map_err(McError::from)??; } Ok(()) } fn spawn_download_job( http_client: reqwest::Client, download_job: DownloadJob, concurrent_download_semaphore: Arc, completed_jobs_count: Arc, total_jobs_count: usize, ) -> JoinHandle> { spawn(async move { let _permit = concurrent_download_semaphore .acquire_owned() .await .unwrap(); download_file(&http_client, &download_job).await?; completed_jobs_count.fetch_add(1, SeqCst); print_download_progress(&completed_jobs_count, total_jobs_count); Ok(()) }) } /// Print the current progress of downloads to stdout. /// /// # Parameters /// - `completed_jobs_count`: Atomic counter of completed download jobs. /// - `total_jobs_count`: Total number of jobs being processed. fn print_download_progress( completed_jobs_count: &AtomicUsize, total_jobs_count: usize, ) { let completed_jobs: usize = completed_jobs_count.load(ATOMIC_ORDERING); let progress_percentage: f64 = ((completed_jobs as f64 / total_jobs_count as f64) * MAX_PROGRESS_PERCENT) .min(MAX_PROGRESS_PERCENT); print!( "\rDownloading game files: {:>width$.0}%", progress_percentage, width = PROGRESS_PRINT_WIDTH ); stdout().flush().unwrap(); } async fn download_text_file( http_client: &reqwest::Client, file_url: &str, destination_path: &PathBuf, ) -> Result<(), McError> { let text_content = http_client .get(file_url) .send() .await? .error_for_status()? .text() .await?; write(destination_path, text_content).await?; Ok(()) } async fn download_file( http_client: &reqwest::Client, download_job: &DownloadJob, ) -> Result<(), McError> { if let Some(parent_dir) = download_job.destination_path.parent() { create_dir_all(parent_dir).await?; } let response: Response = http_client .get(&download_job.url) .send() .await? .error_for_status()?; let mut byte_stream = response.bytes_stream(); let mut file_handle: File = File::create(&download_job.destination_path).await?; while let Some(chunk) = byte_stream.next().await { file_handle.write_all(&chunk?).await?; } Ok(()) }