2022 day7/rust: cleanup
This commit is contained in:
parent
5d846eb88c
commit
a74b1f4bc6
1 changed files with 37 additions and 51 deletions
|
@ -1,76 +1,62 @@
|
||||||
#![warn(clippy::pedantic)]
|
#![warn(clippy::pedantic)]
|
||||||
|
|
||||||
use std::{
|
use std::{collections::HashMap, io::stdin, path::PathBuf};
|
||||||
collections::HashMap,
|
|
||||||
hash::Hash,
|
|
||||||
io::{stdin, Read},
|
|
||||||
path::{Path, PathBuf},
|
|
||||||
};
|
|
||||||
|
|
||||||
use aoc::*;
|
use aoc::PathExt;
|
||||||
|
|
||||||
|
fn find_dir_to_delete(dirs: &HashMap<PathBuf, usize>) -> usize {
|
||||||
|
const TOTAL_SPACE: usize = 70_000_000;
|
||||||
|
const REQUIRED_SPACE: usize = 30_000_000;
|
||||||
|
|
||||||
|
let unused = TOTAL_SPACE - dirs[&*PathBuf::from("/")];
|
||||||
|
let to_free = REQUIRED_SPACE - unused;
|
||||||
|
|
||||||
|
let mut sizes: Vec<_> = dirs.values().copied().collect();
|
||||||
|
sizes.sort_unstable();
|
||||||
|
match sizes.binary_search(&to_free) {
|
||||||
|
Ok(i) | Err(i) => sizes[i],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let mut data = String::new();
|
|
||||||
stdin().read_to_string(&mut data).unwrap();
|
|
||||||
|
|
||||||
let mut cwd = PathBuf::new();
|
let mut cwd = PathBuf::new();
|
||||||
cwd.push("/");
|
cwd.push("/");
|
||||||
let mut dirs: HashMap<PathBuf, usize> = HashMap::new();
|
let mut dirs: HashMap<PathBuf, usize> = HashMap::new();
|
||||||
|
|
||||||
for line in data.lines() {
|
for line in stdin().lines() {
|
||||||
|
let line = line.unwrap();
|
||||||
|
|
||||||
if let Some(command) = line.strip_prefix("$ ") {
|
if let Some(command) = line.strip_prefix("$ ") {
|
||||||
let mut args = command.split_whitespace();
|
let mut args = command.split_whitespace();
|
||||||
let command = args.next().unwrap();
|
|
||||||
match command {
|
if args.next().unwrap() != "cd" {
|
||||||
"ls" => {
|
continue;
|
||||||
dirs.remove(&cwd);
|
}
|
||||||
|
|
||||||
|
match args.next().unwrap() {
|
||||||
|
".." => assert!(cwd.pop()),
|
||||||
|
path => {
|
||||||
|
cwd.push(path);
|
||||||
|
assert!(!dirs.contains_key(&cwd));
|
||||||
}
|
}
|
||||||
"cd" => {
|
|
||||||
let dir = args.next().unwrap();
|
|
||||||
match dir {
|
|
||||||
".." => assert!(cwd.pop()),
|
|
||||||
path => cwd.push(path),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => panic!("unknown command {command}"),
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let (size, _name) = line.split_once(' ').unwrap();
|
let size = line.split_whitespace().next().unwrap();
|
||||||
if size == "dir" {
|
if size == "dir" {
|
||||||
// skip
|
continue;
|
||||||
} else {
|
|
||||||
let size: usize = size.parse().unwrap();
|
|
||||||
*dirs.entry(cwd.clone()).or_default() += size;
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut dirs_recursive: HashMap<&Path, usize> = HashMap::new();
|
let size: usize = size.parse().unwrap();
|
||||||
for (dir, size) in &dirs {
|
for dir in cwd.parents() {
|
||||||
for parent in std::iter::successors(Some(&**dir), |dir| dir.parent()) {
|
*dirs.entry(dir.to_path_buf()).or_default() += size;
|
||||||
*dirs_recursive.entry(parent).or_default() += size;
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
println!(
|
println!(
|
||||||
"{}",
|
"{}",
|
||||||
dirs_recursive
|
dirs.values().filter(|&&s| s <= 100_000).sum::<usize>()
|
||||||
.values()
|
|
||||||
.filter(|&&s| s <= 100_000)
|
|
||||||
.sum::<usize>()
|
|
||||||
);
|
);
|
||||||
|
|
||||||
const TOTAL_SPACE: usize = 70_000_000;
|
println!("{}", find_dir_to_delete(&dirs));
|
||||||
const REQUIRED_SPACE: usize = 30_000_000;
|
|
||||||
|
|
||||||
let unused = TOTAL_SPACE - dirs_recursive[&*PathBuf::from("/")];
|
|
||||||
let to_free = REQUIRED_SPACE - unused;
|
|
||||||
let mut sizes: Vec<_> = dirs_recursive.values().copied().collect();
|
|
||||||
sizes.sort_unstable();
|
|
||||||
let size = match sizes.binary_search(&to_free) {
|
|
||||||
Ok(i) => sizes[i],
|
|
||||||
Err(larger) => sizes[larger],
|
|
||||||
};
|
|
||||||
|
|
||||||
println!("{}", size);
|
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue