days 9 10 11 and 12

This commit is contained in:
Shoofle 2024-12-12 10:40:37 -05:00
parent bd93ac4c3d
commit 14c1e412eb
8 changed files with 815 additions and 0 deletions

6
day09/Cargo.toml Normal file
View File

@ -0,0 +1,6 @@
[package]
name = "day09"
version = "0.1.0"
edition = "2021"
[dependencies]

345
day09/src/main.rs Normal file
View File

@ -0,0 +1,345 @@
use std::collections::HashMap;
use std::fs;
use std::env;
#[derive(Clone, Copy, Debug)]
enum Record {
Gap { start: i32, length: i32},
File { start: i32, length: i32, file: i32 }
}
struct Drive {
in_order: Vec<Record>,
files: HashMap<i32, Record>,
}
fn main() {
println!("Hello, AoC day 09!");
let args: Vec<String> = env::args().collect();
if args.len() != 2 {
println!("wrong number of arguments!");
std::process::exit(1);
}
let file_path = &args[1];
let contents = fs::read_to_string(file_path).expect("Should have been able to read the file");
let mut drive = read_drive(&contents);
print_drive(&drive);
compress(&mut drive);
print_drive(&drive);
let sum = checksum(&drive);
println!("checksum post-compaction is {sum}.");
let mut drive = read_drive_enum(&contents);
print_enums(&drive);
drive = compress_enums(drive);
print_enums(&drive);
let sum = checksum_enum(&drive);
println!("checksum post-compression is {sum}")
}
fn read_drive(contents: &str) -> Vec<i32> {
let mut files: i32 = 0;
let mut empty: i32 = 0;
let mut is_file = true;
for character in contents.chars() {
let maybe = character.to_string().parse::<i32>();
if maybe.is_err() {
continue;
}
let num = maybe.unwrap();
if is_file {
files += num;
} else {
empty += num;
}
is_file = !is_file;
}
let mut drive: Vec<i32> = vec![-1; (empty+files).try_into().unwrap()];
let mut file_number: i32 = 0;
let mut index: usize = 0;
let mut is_file = true;
for character in contents.chars() {
let maybe = character.to_string().parse::<u32>();
if maybe.is_err() {
continue;
}
let num = maybe.unwrap();
if is_file {
for _i in 0..num {
drive[index] = file_number;
index += 1;
}
file_number += 1;
} else {
index += num as usize;
}
is_file = !is_file;
}
return drive;
}
fn print_drive(drive: &[i32]) {
for &x in drive {
if x == -1 {
print!(".");
} else {
print!("{}", x);
}
}
print!("\n");
}
fn checksum(drive: &[i32]) -> i64 {
let mut sum: i64 = 0;
for index in 0..drive.len() {
if drive[index] == -1 { continue; }
sum += (drive[index] as i64) * (index as i64);
}
return sum;
}
fn compress(drive: &mut Vec<i32>) {
let mut write_index: usize = 0;
for read_index in (0..drive.len()).rev() {
if write_index == drive.len() { break; }
let file = drive[read_index];
if file == -1 { continue; }
while drive[write_index] != -1 {
write_index += 1;
}
if write_index >= read_index { break; }
drive[write_index] = file;
drive[read_index] = -1;
write_index += 1;
}
}
fn read_drive_enum(contents: &str) -> Drive {
let mut drive: Vec<Record> = Vec::with_capacity(contents.len());
let mut files: HashMap<i32, Record> = HashMap::new();
let mut is_file = true;
let mut file_number = 0;
let mut index = 0;
for c in contents.trim().chars() {
let num = c.to_string().parse::<i32>().unwrap();
if is_file {
let file = Record::File {
start: index,
length: num,
file: file_number,
};
drive.push(file);
files.insert(file_number, file);
file_number += 1;
} else {
drive.push(Record::Gap { start: index, length: num });
}
index += num;
is_file = !is_file;
}
return Drive { in_order: drive, files: files };
}
fn print_enums(drive: &Drive) {
for x in drive.in_order.iter() {
match x {
Record::Gap { start: _, length } => {
for _ in 0..*length {
print!(".");
}
},
Record::File { start: _, length, file } => {
for _ in 0..*length {
print!("{}", file);
}
}
}
}
print!("\n");
}
fn compress_enums(mut drive: Drive) -> Drive {
// for each file in backwards order,
for file_number in (0..(drive.files.len() as i32)).rev() {
let record = drive.files.get(&file_number);
println!("looking at {}: {:?}", file_number, record);
// index into the in_order vec
let file_index = find_order_index(&drive, file_number);
match record {
// we will never find a gap in the files hashmap
Some(Record::Gap {start: _, length: _}) => (),
// if we found none then it's probably an off-by-one at the edges
None => (),
Some(Record::File {start: file_start, length: file_length, file: _ }) => {
// find the first gap where the object can live,
for gap_index in 0..drive.in_order.len() {
// if we're looking past the file, then we're done!
if gap_index > file_index { break; }
match drive.in_order[gap_index] {
// if we're looking at a file, skip it. can't put our file into an occupied spot.
Record::File { .. } => { continue; },
// if we're looking at a gap, proceed!
Record::Gap { start: gap_start, length: gap_length } => {
// found a gap that fits just right
if gap_length == *file_length {
// remove the gap
drive.in_order.remove(gap_index);
// insert the file into the gap
let new_file = Record::File {
start: gap_start,
length: *file_length,
file: file_number
};
drive.in_order.insert(gap_index, new_file);
// remove the file
drive.in_order.remove(file_index);
// insert the gap where the file was
let new_gap = Record::Gap {
start: *file_start,
length: gap_length
};
drive.in_order.insert(file_index, new_gap);
// update the files hashmap
drive.files.insert(file_number, new_file);
break;
}
// the gap is bigger than the file
if gap_length > *file_length {
let mut offset: i32 = 0;
// remove the gap
drive.in_order.remove(gap_index);
offset -= 1;
// insert the file into the gap
let new_file = Record::File {
start: gap_start,
length: *file_length,
file: file_number
};
drive.in_order.insert(gap_index, new_file);
offset += 1;
// fill the gap
if let Record::Gap {
start: _next_gap_start,
length: next_gap_length
} = drive.in_order[gap_index + 1] {
// if the next record is a gap, merge them
drive.in_order[gap_index + 1] = Record::Gap {
start:gap_start + *file_length,
length: gap_length - file_length + next_gap_length
};
} else {
// if the next record is a file, add a gap.
let new_gap_fill = Record::Gap {
start: gap_start + *file_length,
length: gap_length - file_length
};
drive.in_order.insert(gap_index + 1, new_gap_fill);
offset += 1;
}
let offset_index = (file_index as i32 + offset) as usize;
// remove the file
drive.in_order.remove(offset_index);
// insert the gap where the file was
let new_gap = Record::Gap {
start: *file_start,
length: *file_length
};
drive.in_order.insert(offset_index, new_gap);
// if the new gap has a gap after it, merge them
if offset_index < drive.in_order.len()-1 {
if let Record::Gap {
start: _next_gap_start,
length: next_gap_length
} = drive.in_order[offset_index + 1] {
drive.in_order.remove(offset_index + 1);
drive.in_order[offset_index] = Record::Gap {
start: *file_start,
length: *file_length + next_gap_length
};
}
}
// if the new gap has a gap before it, merge them
if offset_index > 0 {
if let Record::Gap {
start: prev_gap_start,
length: prev_gap_length
} = drive.in_order[offset_index - 1] {
drive.in_order.remove((file_index as i32 + offset) as usize);
drive.in_order[offset_index - 1] = Record::Gap {
start: prev_gap_start,
length: prev_gap_length + *file_length
};
}
}
// update the files hashmap
drive.files.insert(file_number, new_file);
break;
}
// if the gap is neither bigger than the file nor equal to the file it won't fit!
},
}
}
}
}
}
return drive;
}
fn checksum_enum(drive: &Drive) -> i64 {
let mut sum: i64 = 0;
for record in drive.in_order.iter() {
match record {
Record::Gap { start: _, length: _ } => (),
Record::File { start, length, file } => {
for i in 0..*length {
sum += (file * (i + start)) as i64;
}
}
}
}
return sum;
}
fn find_order_index(drive: &Drive, file_number: i32) -> usize {
for i in 0..drive.in_order.len() {
if let Record::File { file, .. } = drive.in_order[i] {
if file == file_number {
return i;
}
}
}
return usize::MAX;
}

6
day10/Cargo.toml Normal file
View File

@ -0,0 +1,6 @@
[package]
name = "day10"
version = "0.1.0"
edition = "2021"
[dependencies]

124
day10/src/main.rs Normal file
View File

@ -0,0 +1,124 @@
use std::collections::HashSet;
use std::collections::HashMap;
use std::fs;
use std::env;
use crate::Dir::{North, South, East, West };
// nodes are (coord, direction) pairs.
// neighbors are step1+turnleft, step2+turnleft, step3+turnleft, step1+turnright, etc
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
enum Dir { North, South, East, West }
fn step(start: &Coord, d: Dir, steps: i32) -> Coord {
match d {
North => (start.0, start.1 - steps),
South => (start.0, start.1 + steps),
East => (start.0 + steps, start.1),
West => (start.0 - steps, start.1),
}
}
type Coord = (i32, i32);
fn main() {
println!("Hello, AoC day 04!");
let args: Vec<String> = env::args().collect();
if args.len() != 2 {
println!("wrong number of arguments!");
std::process::exit(1);
}
let file_path = &args[1];
let contents = fs::read_to_string(file_path).expect("Should have been able to read the file");
let mut grid: HashMap<Coord, i8> = HashMap::new();
let mut trailheads: HashSet<Coord> = HashSet::new();
let mut x;
// build our grid!
let mut y = 0;
for line in contents.lines() {
x = 0;
for c in line.chars() {
let num = c.to_string().parse::<i8>().unwrap();
let coords = (x,y);
if num == 0 {
trailheads.insert(coords);
}
grid.insert(coords, num);
x += 1;
}
y += 1;
}
let mut sum: i32 = 0;
for head in &trailheads {
sum += score(*head, &grid);
}
println!("there are {sum} points in score total");
let mut sum: i32 = 0;
for head in &trailheads {
sum += rating(*head, &grid);
}
println!("there are {sum} points in rating total")
}
fn score(head: Coord, grid: &HashMap<Coord, i8>) -> i32 {
let mut visited: HashSet<Coord> = HashSet::new();
let mut front: HashSet<Coord> = HashSet::from([head]);
let mut peaks: HashSet<Coord> = HashSet::new();
while !front.is_empty() {
let mut new_front: HashSet<Coord> = HashSet::new();
for x in front.drain() {
if grid[&x] == 9 {
peaks.insert(x);
}
visited.insert(x);
for x2 in neighbors(&x, &grid) {
if visited.contains(&x2) { continue; }
new_front.insert(x2);
}
}
front = new_front;
}
return peaks.len() as i32;
}
fn rating(head: Coord, grid: &HashMap<Coord, i8>) -> i32 {
let mut visited: HashSet<Coord> = HashSet::new();
let mut front: Vec<Coord> = vec![head];
let mut points = 0;
while !front.is_empty() {
let mut new_front: Vec<Coord> = vec![];
for x in front.drain(..) {
if grid[&x] == 9 {
points += 1;
}
visited.insert(x);
let x_neighbors = neighbors(&x, &grid);
for x2 in x_neighbors {
if visited.contains(&x2) { continue; }
new_front.push(x2);
}
}
front = new_front;
}
return points as i32;
}
fn neighbors(head: &Coord, grid: &HashMap<Coord, i8>) -> HashSet<Coord> {
let height = grid[head];
let neighbors = vec![North, South, East, West]
.into_iter()
.map(|x| step(head, x, 1))
.filter(|x| grid.contains_key(&x) && grid[&x]==height+1);
return neighbors.collect();
}

7
day11/Cargo.toml Normal file
View File

@ -0,0 +1,7 @@
[package]
name = "day11"
version = "0.1.0"
edition = "2021"
[dependencies]
memoize = "0.4.2"

69
day11/src/main.rs Normal file
View File

@ -0,0 +1,69 @@
use std::iter;
use std::env;
use std::fs;
use memoize::memoize;
fn main() {
println!("Hello, AoC day 09!");
let args: Vec<String> = env::args().collect();
if args.len() != 2 {
println!("wrong number of arguments!");
std::process::exit(1);
}
let file_path = &args[1];
let initial_stones = fs::read_to_string(file_path)
.expect("should be able to read the file")
.trim()
.to_string();
let mut stones = initial_stones
.split(" ")
.map(|s| s.to_string())
.collect::<Vec<String>>();
for _i in 0..25 {
stones = stones.into_iter().flat_map(blink).collect();
}
println!("there were {} stones after 25 blinks", stones.len());
let limit = 75;
let mut count = 0;
for s in initial_stones.split(" ") {
count += blink_recursive(s.parse().unwrap(), limit);
}
println!("there were {count} stones after {limit} blinks!");
}
fn blink(number: String) -> Box<dyn Iterator<Item=String>>{
if number == "0" {
return Box::new(iter::once("1".to_string()));
}
let l = number.len();
if l % 2 == 0 {
return Box::new(iter::once(number[..l/2].parse::<u128>().unwrap().to_string())
.chain(iter::once(number[l/2..].parse::<u128>().unwrap().to_string())));
}
let number = number.parse::<u128>().unwrap();
let new_string = (number*2024).to_string();
return Box::new(iter::once(new_string));
}
#[memoize]
fn blink_recursive(label: i128, blinks: i32) -> i128 {
if blinks == 0 {
return 1;
} else if label == 0 {
return blink_recursive(1, blinks - 1);
}
let s = label.to_string();
let len = s.len();
if len % 2 == 0 {
let first: i128 = s[..len/2].parse().unwrap();
let second: i128 = s[len/2..].parse().unwrap();
return blink_recursive(first, blinks - 1) + blink_recursive(second, blinks - 1);
}
return blink_recursive(2024*label, blinks - 1);
}

6
day12/Cargo.toml Normal file
View File

@ -0,0 +1,6 @@
[package]
name = "day12"
version = "0.1.0"
edition = "2021"
[dependencies]

252
day12/src/main.rs Normal file
View File

@ -0,0 +1,252 @@
use std::collections::HashSet;
use std::collections::HashMap;
use std::fs;
use std::env;
use crate::Dir::{North, South, East, West };
// nodes are (coord, direction) pairs.
// neighbors are step1+turnleft, step2+turnleft, step3+turnleft, step1+turnright, etc
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
enum Dir { North, South, East, West }
fn step(start: &Coord, d: Dir, steps: i32) -> Coord {
match d {
North => (start.0, start.1 - steps),
South => (start.0, start.1 + steps),
East => (start.0 + steps, start.1),
West => (start.0 - steps, start.1),
}
}
type Coord = (i32, i32);
fn main() {
println!("Hello, AoC day 04!");
let args: Vec<String> = env::args().collect();
if args.len() != 2 {
println!("wrong number of arguments!");
std::process::exit(1);
}
let file_path = &args[1];
let contents = fs::read_to_string(file_path).expect("Should have been able to read the file");
let mut grid: HashMap<Coord, char> = HashMap::new();
let mut x;
// build our grid!
let mut y = 0;
for line in contents.lines() {
x = 0;
for c in line.chars() {
let coords = (x,y);
grid.insert(coords, c);
x += 1;
}
y += 1;
}
// part a
let mut sum = 0;
let mut cache: Vec<HashSet<Coord>>= Vec::new();
for (location, _color) in &grid {
cache = fill_region_and_cache(location, &grid, cache);
}
println!("we found {} regions", cache.len());
for region in &cache {
println!("a region with color {} was found to have {} squares",
grid.get(region.iter().nth(0).unwrap()).unwrap(),
region.len());
}
for region in &cache {
let mut total_perimeter = 0;
let mut area = 0;
for location in region {
total_perimeter += perimeter(&location, &grid);
area += 1;
}
sum += area * total_perimeter;
}
println!("the total cost is {sum}");
// part b
// a list of sides (a side is a set of sidesegments)
let mut sides_cache: Vec<HashSet<SideSegment>> = Vec::new();
// for every location in the grid,
for (location, _color) in &grid {
// for every side ajdacent to that location, update the side cache with its side.
for sideseg in sides(location, &grid) {
sides_cache = spread_over_side_and_cache(
&sideseg,
&grid,
sides_cache);
}
}
sum = 0;
for region in &cache {
let mut sides = 0;
let mut area = 0;
for side in &sides_cache {
let first_location = side.iter().nth(0).unwrap().1;
if region.contains(&first_location) {
sides += 1;
}
}
area += region.len() as i32;
let color = grid.get(region.iter().nth(0).unwrap()).unwrap();
println!("a region with label {color} has area {area} and {sides} sides");
sum += area * sides;
}
println!("the total cost is {sum}");
}
fn neighbors(head: &Coord) -> Vec<Coord> {
return vec![North, South, East, West]
.into_iter()
.map(|x| step(head, x, 1))
.collect();
}
fn same_neighbors(location: &Coord, grid: &HashMap<Coord, char>) -> Vec<Coord> {
if let Some(me) = grid.get(location) {
return vec![North, South, East, West]
.into_iter()
.map(|x| step(location, x, 1))
.filter(|x| grid.contains_key(x))
.filter(|x| me == grid.get(x).expect("tried to look for a missing grid square"))
.collect();
}
return vec![];
}
fn fill_region_and_cache(
location: &Coord,
grid: &HashMap<Coord, char>,
mut cache: Vec<HashSet<Coord>>
) -> Vec<HashSet<Coord>> {
let mut new_region: HashSet<Coord> = HashSet::from([*location]);
let mut accumulator: HashSet<Coord> = HashSet::new();
while !new_region.is_empty() {
let mut blah: HashSet<Coord> = HashSet::new();
for x in &new_region {
for existing_region in &cache {
if existing_region.contains(x) {
return cache;
}
}
}
for x in &new_region {
for n in same_neighbors(&x, grid) {
if !accumulator.contains(&n) {
blah.insert(n);
}
}
}
for x in new_region.drain() {
accumulator.insert(x);
}
new_region = blah;
}
cache.push(accumulator);
return cache;
}
fn spread_over_side_and_cache(
side: &SideSegment,
grid: &HashMap<Coord, char>,
mut cache: Vec<HashSet<SideSegment>>
) -> Vec<HashSet<SideSegment>> {
let mut wavefront: HashSet<SideSegment> = HashSet::from([*side]);
let mut accumulator: HashSet<SideSegment> = HashSet::new();
while !wavefront.is_empty() {
// we're iterating over the wavefront of squares to look at for whether they're
// part of the side we're building.
let mut new_wavefront: HashSet<SideSegment> = HashSet::new();
// look at each side in the wavefront
for x in &wavefront {
// if it's in a side that we already have cached, then we're done.
for existing_side in &cache {
if existing_side.contains(x) {
// return the cache unchanged.
return cache;
}
}
}
// look at each side in the wavefrnot, to build up a new wavefront.
for SideSegment(dir, coord) in &wavefront {
for n in same_neighbors(&coord, grid)
// look at neighbors in this region.
.into_iter()
.flat_map(|next_coord| sides(&next_coord, grid))
// look at sides of those neighbors.
.filter(|SideSegment(dir2, _coord2)| dir2 == dir) {
// filter down to just sides in the same direction.
// n is a neighboring side with the same direction as us.
if !accumulator.contains(&n) {
// if n isn't already in the accumulator, add n to the new wavefront.
new_wavefront.insert(n);
}
}
}
// add all the siddes in the wavefront to the accumulator.
for x in wavefront.drain() {
accumulator.insert(x);
}
wavefront = new_wavefront;
}
cache.push(accumulator);
return cache;
}
fn perimeter(location: &Coord, grid: &HashMap<Coord, char>) -> i32 {
let mut perimeter = 0;
for neighbor in neighbors(location) {
let neighboring_region = grid.get(&neighbor);
if neighboring_region != grid.get(location) {
perimeter += 1;
}
}
return perimeter;
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
struct SideSegment (Dir, Coord);
fn sides(location: &Coord, grid: &HashMap<Coord, char>) -> Vec<SideSegment> {
return vec![North, South, East, West]
.into_iter()
.map(|x| SideSegment(x, *location) )
.filter(|SideSegment(dir, loc)| grid.get(loc) != grid.get(&step(loc, *dir, 1)) )
.collect();
}