csci5607/assignment-1b/src/main.rs

156 lines
4.2 KiB
Rust
Raw Normal View History

2023-02-06 03:52:42 +00:00
#[macro_use]
extern crate anyhow;
#[macro_use]
extern crate derivative;
mod image;
mod ray;
mod scene;
mod utils;
use std::fs::File;
use std::path::PathBuf;
use anyhow::Result;
use clap::Parser;
2023-02-15 07:20:03 +00:00
use scene::Scene;
2023-02-06 03:52:42 +00:00
use crate::image::Image;
use crate::ray::Ray;
/// Simple raycaster.
#[derive(Parser)]
#[clap(author, version, about, long_about = None)]
struct Opt {
/// Path to the input file to use.
#[clap()]
input_path: PathBuf,
/// Path to the output (defaults to the same file name as the input except
/// with an extension of .ppm)
#[clap(short = 'o', long = "output")]
output_path: Option<PathBuf>,
/// Force parallel projection to be used
#[clap(long = "parallel")]
force_parallel: bool,
/// Override distance from eye
#[clap(long = "distance", default_value = "1.0")]
distance: f64,
}
fn main() -> Result<()> {
let opt = Opt::parse();
let out_file = opt
.output_path
.unwrap_or_else(|| opt.input_path.with_extension("ppm"));
2023-02-15 07:20:03 +00:00
let mut scene = Scene::from_input_file(&opt.input_path)?;
2023-02-06 03:52:42 +00:00
let distance = opt.distance;
if opt.force_parallel {
scene.parallel_projection = true;
}
// Compute the viewing window
let view_window = scene.compute_viewing_window(distance);
// Translate image pixels to real-world 3d coords
let translate_pixel = {
let dx = view_window.upper_right - view_window.upper_left;
let pixel_base_x = dx / scene.image_width as f64;
let dy = view_window.lower_left - view_window.upper_left;
let pixel_base_y = dy / scene.image_height as f64;
move |px: usize, py: usize| {
let x_component = pixel_base_x * px as f64;
let y_component = pixel_base_y * py as f64;
// Without adding this, we would be getting the top-left of the pixel's
// rectangle. We want the center, so add half of the pixel size as
// well.
let center_offset = (pixel_base_x + pixel_base_y) / 2.0;
view_window.upper_left + x_component + y_component + center_offset
}
};
// Generate a parallel iterator for pixels
// The iterator preserves order and uses row-major order
let pixels_iter = (0..scene.image_height)
2023-02-13 05:46:54 +00:00
// .into_par_iter()
.flat_map(|y| {
(0..scene.image_width)
// .into_par_iter()
.map(move |x| (x, y))
});
2023-02-06 03:52:42 +00:00
// Loop through every single pixel of the output file
let pixels = pixels_iter
.map(|(px, py)| {
let pixel_in_space = translate_pixel(px, py);
let ray_start = if scene.parallel_projection {
// For a parallel projection, we'll just take the view direction and
// subtract it from the target point. This means every single
// ray will be viewed from a point at infinity, rather than a single eye
// position.
let n = scene.view_dir.normalize();
let view_dir = n * distance;
pixel_in_space - view_dir
} else {
scene.eye_pos
};
let ray = Ray::from_endpoints(ray_start, pixel_in_space);
let intersections = scene
.objects
.iter()
.filter_map(|object| {
match object.kind.intersects_ray_at(&ray) {
Ok(Some(t)) => {
// Return both the t and the sphere, because we want to sort on
// the t but later retrieve attributes from the sphere
Some(Ok((t, object)))
}
Ok(None) => None,
Err(e) => Some(Err(e)),
}
})
.collect::<Result<Vec<_>>>()?;
// Sort the list of intersection times by the lowest one.
let earliest_intersection =
intersections.into_iter().min_by_key(|(t, _)| t.time);
Ok(match earliest_intersection {
// Take the object's material color
Some((intersection_context, object)) => {
scene.compute_pixel_color(object.material, intersection_context)
}
// There was no intersection, so this should default to the scene's
// background color
None => scene.bkg_color,
})
})
.collect::<Result<Vec<_>>>()?;
// Construct and emit image
let image = Image {
width: scene.image_width,
height: scene.image_height,
data: pixels,
};
{
let file = File::create(out_file)?;
image.write(file)?;
}
Ok(())
}