use std::fs::File; use std::path::PathBuf; use anyhow::Result; use assignment_1b::image::Image; use assignment_1b::ray::Ray; use assignment_1b::scene::Scene; use clap::Parser; use rayon::prelude::{IntoParallelIterator, ParallelIterator}; /// Simple raycaster. #[derive(Parser)] #[clap(author, version, about, long_about = None)] struct Opt { /// Path to the input file to use. #[clap()] input_path: PathBuf, /// Path to the output (defaults to the same file name as the input except /// with an extension of .ppm) #[clap(short = 'o', long = "output")] output_path: Option, /// Force parallel projection to be used #[clap(long = "parallel")] force_parallel: bool, /// Override distance from eye #[clap(long = "distance", default_value = "1.0")] distance: f64, } fn main() -> Result<()> { let opt = Opt::parse(); let out_file = opt .output_path .unwrap_or_else(|| opt.input_path.with_extension("ppm")); let mut scene = Scene::from_input_file(&opt.input_path)?; let distance = opt.distance; if opt.force_parallel { scene.parallel_projection = true; } // Compute the viewing window let view_window = scene.compute_viewing_window(distance); // Translate image pixels to real-world 3d coords let translate_pixel = { let dx = view_window.upper_right - view_window.upper_left; let pixel_base_x = dx / scene.image_width as f64; let dy = view_window.lower_left - view_window.upper_left; let pixel_base_y = dy / scene.image_height as f64; move |px: usize, py: usize| { let x_component = pixel_base_x * px as f64; let y_component = pixel_base_y * py as f64; // Without adding this, we would be getting the top-left of the pixel's // rectangle. We want the center, so add half of the pixel size as // well. let center_offset = (pixel_base_x + pixel_base_y) / 2.0; view_window.upper_left + x_component + y_component + center_offset } }; // Generate a parallel iterator for pixels // The iterator preserves order and uses row-major order let pixels_iter = (0..scene.image_height) .into_par_iter() .flat_map(|y| (0..scene.image_width).into_par_iter().map(move |x| (x, y))); // Loop through every single pixel of the output file let pixels = pixels_iter .map(|(px, py)| { let pixel_in_space = translate_pixel(px, py); let ray_start = if scene.parallel_projection { // For a parallel projection, we'll just take the view direction and // subtract it from the target point. This means every single // ray will be viewed from a point at infinity, rather than a single eye // position. let n = scene.view_dir.normalize(); let view_dir = n * distance; pixel_in_space - view_dir } else { scene.eye_pos }; let ray = Ray::from_endpoints(ray_start, pixel_in_space); let intersections = scene .objects .iter() .enumerate() .filter_map(|(i, object)| { match object.kind.intersects_ray_at(&ray) { Ok(Some(t)) => { // Return both the t and the sphere, because we want to sort on // the t but later retrieve attributes from the sphere Some(Ok((i, t, object))) } Ok(None) => None, Err(e) => Some(Err(e)), } }) .collect::>>()?; // Sort the list of intersection times by the lowest one. let earliest_intersection = intersections.into_iter().min_by_key(|(_, t, _)| t.time); Ok(match earliest_intersection { // Take the object's material color Some((obj_idx, intersection_context, object)) => scene .compute_pixel_color(obj_idx, object.material, intersection_context), // There was no intersection, so this should default to the scene's // background color None => scene.bkg_color, }) }) .collect::>>()?; // Construct and emit image let image = Image { width: scene.image_width, height: scene.image_height, data: pixels, }; { let file = File::create(out_file)?; image.write(file)?; } Ok(()) }