Init + day 1 and 2
This commit is contained in:
commit
9152c089c7
10 changed files with 651 additions and 0 deletions
5
.gitignore
vendored
Normal file
5
.gitignore
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
*zig-cache
|
||||
*zig-out
|
||||
|
||||
# Input files
|
||||
src/days/inputs/day*.txt
|
92
build.zig
Normal file
92
build.zig
Normal file
|
@ -0,0 +1,92 @@
|
|||
const std = @import("std");
|
||||
|
||||
pub fn build(b: *std.Build) void {
|
||||
const target = b.standardTargetOptions(.{});
|
||||
const optimize = b.standardOptimizeOption(.{});
|
||||
|
||||
const options = b.addOptions();
|
||||
|
||||
const day = b.option(u8, "day", "Which day to run");
|
||||
const day_path: ?[]const u8 = blk: {
|
||||
if (day) |d| {
|
||||
const dp = std.fmt.allocPrint(b.allocator, "days/day{}.zig", .{d}) catch |err| {
|
||||
std.log.err("Unable to create path to file day{}: {}", .{ d, err });
|
||||
return;
|
||||
};
|
||||
break :blk dp;
|
||||
}
|
||||
break :blk null;
|
||||
};
|
||||
|
||||
options.addOption(?[]const u8, "day_path", day_path);
|
||||
options.addOption(?u8, "day", day);
|
||||
|
||||
const aoc_lib = b.addModule("aoc", .{
|
||||
.root_source_file = b.path("src/root.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
});
|
||||
|
||||
const exe = b.addExecutable(.{
|
||||
.name = "aoc-2024",
|
||||
.root_source_file = b.path("src/main.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
});
|
||||
exe.root_module.addOptions("build-options", options);
|
||||
exe.root_module.addImport("aoc", aoc_lib);
|
||||
b.installArtifact(exe);
|
||||
|
||||
const install = b.getInstallStep();
|
||||
const install_inputs = b.addInstallDirectory(.{
|
||||
.source_dir = b.path("inputs"),
|
||||
.install_dir = .{ .bin = {} },
|
||||
.install_subdir = "inputs",
|
||||
});
|
||||
install.dependOn(&install_inputs.step);
|
||||
|
||||
const run_cmd = b.addRunArtifact(exe);
|
||||
run_cmd.step.dependOn(b.getInstallStep());
|
||||
|
||||
if (b.args) |args| {
|
||||
run_cmd.addArgs(args);
|
||||
}
|
||||
|
||||
const run_step = b.step("run", "Run the app");
|
||||
run_step.dependOn(&run_cmd.step);
|
||||
|
||||
const exe_unit_tests = b.addTest(.{
|
||||
.root_source_file = b.path("src/main.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
});
|
||||
exe_unit_tests.root_module.addImport("aoc", aoc_lib);
|
||||
|
||||
const run_exe_unit_tests = b.addRunArtifact(exe_unit_tests);
|
||||
const test_step = b.step("test", "Run unit tests");
|
||||
test_step.dependOn(&run_exe_unit_tests.step);
|
||||
|
||||
if (day) |d| {
|
||||
var day_path_test: []u8 = "";
|
||||
if (d < 10) {
|
||||
day_path_test = std.fmt.allocPrint(b.allocator, "src/days/day0{}.zig", .{d}) catch |err| {
|
||||
std.log.err("Unable to create path to file day{}: {}", .{ d, err });
|
||||
return;
|
||||
};
|
||||
} else {
|
||||
day_path_test = std.fmt.allocPrint(b.allocator, "src/days/day{}.zig", .{d}) catch |err| {
|
||||
std.log.err("Unable to create path to file day{}: {}", .{ d, err });
|
||||
return;
|
||||
};
|
||||
}
|
||||
|
||||
const day_unit_tests = b.addTest(.{
|
||||
.optimize = optimize,
|
||||
.target = target,
|
||||
.root_source_file = b.path(day_path_test),
|
||||
});
|
||||
day_unit_tests.root_module.addImport("aoc", aoc_lib);
|
||||
const run_day_unit_tests = b.addRunArtifact(day_unit_tests);
|
||||
test_step.dependOn(&run_day_unit_tests.step);
|
||||
}
|
||||
}
|
72
build.zig.zon
Normal file
72
build.zig.zon
Normal file
|
@ -0,0 +1,72 @@
|
|||
.{
|
||||
// This is the default name used by packages depending on this one. For
|
||||
// example, when a user runs `zig fetch --save <url>`, this field is used
|
||||
// as the key in the `dependencies` table. Although the user can choose a
|
||||
// different name, most users will stick with this provided value.
|
||||
//
|
||||
// It is redundant to include "zig" in this name because it is already
|
||||
// within the Zig package namespace.
|
||||
.name = "aoc-2024",
|
||||
|
||||
// This is a [Semantic Version](https://semver.org/).
|
||||
// In a future version of Zig it will be used for package deduplication.
|
||||
.version = "0.0.0",
|
||||
|
||||
// This field is optional.
|
||||
// This is currently advisory only; Zig does not yet do anything
|
||||
// with this value.
|
||||
//.minimum_zig_version = "0.11.0",
|
||||
|
||||
// This field is optional.
|
||||
// Each dependency must either provide a `url` and `hash`, or a `path`.
|
||||
// `zig build --fetch` can be used to fetch all dependencies of a package, recursively.
|
||||
// Once all dependencies are fetched, `zig build` no longer requires
|
||||
// internet connectivity.
|
||||
.dependencies = .{
|
||||
// See `zig fetch --save <url>` for a command-line interface for adding dependencies.
|
||||
//.example = .{
|
||||
// // When updating this field to a new URL, be sure to delete the corresponding
|
||||
// // `hash`, otherwise you are communicating that you expect to find the old hash at
|
||||
// // the new URL.
|
||||
// .url = "https://example.com/foo.tar.gz",
|
||||
//
|
||||
// // This is computed from the file contents of the directory of files that is
|
||||
// // obtained after fetching `url` and applying the inclusion rules given by
|
||||
// // `paths`.
|
||||
// //
|
||||
// // This field is the source of truth; packages do not come from a `url`; they
|
||||
// // come from a `hash`. `url` is just one of many possible mirrors for how to
|
||||
// // obtain a package matching this `hash`.
|
||||
// //
|
||||
// // Uses the [multihash](https://multiformats.io/multihash/) format.
|
||||
// .hash = "...",
|
||||
//
|
||||
// // When this is provided, the package is found in a directory relative to the
|
||||
// // build root. In this case the package's hash is irrelevant and therefore not
|
||||
// // computed. This field and `url` are mutually exclusive.
|
||||
// .path = "foo",
|
||||
//
|
||||
// // When this is set to `true`, a package is declared to be lazily
|
||||
// // fetched. This makes the dependency only get fetched if it is
|
||||
// // actually used.
|
||||
// .lazy = false,
|
||||
//},
|
||||
},
|
||||
|
||||
// Specifies the set of files and directories that are included in this package.
|
||||
// Only files and directories listed here are included in the `hash` that
|
||||
// is computed for this package. Only files listed here will remain on disk
|
||||
// when using the zig package manager. As a rule of thumb, one should list
|
||||
// files required for compilation plus any license(s).
|
||||
// Paths are relative to the build root. Use the empty string (`""`) to refer to
|
||||
// the build root itself.
|
||||
// A directory listed here means that all files within, recursively, are included.
|
||||
.paths = .{
|
||||
"build.zig",
|
||||
"build.zig.zon",
|
||||
"src",
|
||||
// For example...
|
||||
//"LICENSE",
|
||||
//"README.md",
|
||||
},
|
||||
}
|
0
inputs/.gitkeep
Normal file
0
inputs/.gitkeep
Normal file
19
src/days.zig
Normal file
19
src/days.zig
Normal file
|
@ -0,0 +1,19 @@
|
|||
const std = @import("std");
|
||||
const mem = std.mem;
|
||||
|
||||
const options = @import("build-options");
|
||||
|
||||
pub const days = .{
|
||||
@import("days/day01.zig"),
|
||||
@import("days/day02.zig"),
|
||||
};
|
||||
|
||||
pub const run = blk: {
|
||||
if (options.day) |d| break :blk days[d - 1].run;
|
||||
break :blk r;
|
||||
};
|
||||
|
||||
fn r(alloc: mem.Allocator, progress: *std.Progress.Node) !void {
|
||||
_ = alloc;
|
||||
_ = progress;
|
||||
}
|
25
src/days/day00.zig
Normal file
25
src/days/day00.zig
Normal file
|
@ -0,0 +1,25 @@
|
|||
const std = @import("std");
|
||||
const mem = std.mem;
|
||||
const log = std.log.scoped(.@"aoc-2024-day00");
|
||||
|
||||
const aoc = @import("aoc");
|
||||
|
||||
pub fn run(alloc: mem.Allocator, progress: *std.Progress.Node) !void {
|
||||
const cwd = std.fs.cwd();
|
||||
const input_file = try cwd.openFile("inputs/day00.txt", .{});
|
||||
defer input_file.close();
|
||||
|
||||
var input_reader = input_file.reader();
|
||||
_ = input_reader; // autofix
|
||||
|
||||
_ = alloc; // autofix
|
||||
_ = progress; // autofix
|
||||
}
|
||||
|
||||
test "day 00" {
|
||||
const testing = std.testing;
|
||||
_ = testing; // autofix
|
||||
|
||||
const input = undefined;
|
||||
_ = input; // autofix
|
||||
}
|
128
src/days/day01.zig
Normal file
128
src/days/day01.zig
Normal file
|
@ -0,0 +1,128 @@
|
|||
const std = @import("std");
|
||||
const mem = std.mem;
|
||||
const log = std.log.scoped(.@"aoc-2024-day01");
|
||||
|
||||
const aoc = @import("aoc");
|
||||
|
||||
pub fn run(alloc: mem.Allocator, progress: *std.Progress.Node) !void {
|
||||
const cwd = std.fs.cwd();
|
||||
const input_file = try cwd.openFile("inputs/day01.txt", .{});
|
||||
defer input_file.close();
|
||||
|
||||
var input_reader = input_file.reader();
|
||||
|
||||
var side_a = std.ArrayList(u64).init(alloc);
|
||||
defer side_a.deinit();
|
||||
|
||||
var side_b = std.ArrayList(u64).init(alloc);
|
||||
defer side_b.deinit();
|
||||
|
||||
while (true) {
|
||||
const line = input_reader.readUntilDelimiterOrEofAlloc(alloc, '\n', 1028) catch break orelse break;
|
||||
defer alloc.free(line);
|
||||
const a, const b = try parseLine(line);
|
||||
try side_a.append(a);
|
||||
try side_b.append(b);
|
||||
}
|
||||
|
||||
// Sorting
|
||||
aoc.Sort.ascending(u64, side_a.items);
|
||||
aoc.Sort.ascending(u64, side_b.items);
|
||||
|
||||
// Part 1
|
||||
const result = calcResult(side_a.items, side_b.items);
|
||||
log.info("Result Part 1: {}", .{result});
|
||||
|
||||
// Part 2
|
||||
const score = try calcSimilarityScore(alloc, side_a.items, side_b.items);
|
||||
log.info("Result Part 2: {}", .{score});
|
||||
|
||||
_ = progress; // autofix
|
||||
}
|
||||
|
||||
fn parseLine(line: []const u8) !struct { u64, u64 } {
|
||||
var iter = mem.tokenizeAny(u8, line, " ");
|
||||
const a_str = iter.next() orelse return error.LineEmpty;
|
||||
const b_str = iter.next() orelse return error.LineEmpty;
|
||||
std.debug.assert(iter.next() == null);
|
||||
|
||||
const a = try std.fmt.parseInt(u64, a_str, 0);
|
||||
const b = try std.fmt.parseInt(u64, b_str, 0);
|
||||
|
||||
return .{ a, b };
|
||||
}
|
||||
|
||||
fn calcResult(side_a: []u64, side_b: []u64) u64 {
|
||||
var result: u64 = 0;
|
||||
|
||||
for (0..side_a.len) |i| {
|
||||
if (side_a[i] >= side_b[i]) {
|
||||
result += side_a[i] - side_b[i];
|
||||
} else {
|
||||
result += side_b[i] - side_a[i];
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
fn calcSimilarityScore(alloc: mem.Allocator, side_a: []u64, side_b: []u64) !u64 {
|
||||
var map = std.AutoHashMap(u64, u64).init(alloc);
|
||||
defer map.deinit();
|
||||
|
||||
for (0..side_a.len) |i| {
|
||||
const a = side_a[i];
|
||||
if (map.get(a) != null) continue;
|
||||
var needle: [1]u64 = undefined;
|
||||
needle[0] = a;
|
||||
const count_a = mem.count(u64, side_a, &needle);
|
||||
const count_b = mem.count(u64, side_b, &needle);
|
||||
_ = try map.put(a, a * count_b * count_a);
|
||||
}
|
||||
|
||||
var score: u64 = 0;
|
||||
var iter = map.valueIterator();
|
||||
while (iter.next()) |entry| {
|
||||
score += entry.*;
|
||||
}
|
||||
|
||||
return score;
|
||||
}
|
||||
|
||||
test "day 01" {
|
||||
// const testing = std.testing;
|
||||
//
|
||||
// const input =
|
||||
// \\3 4
|
||||
// \\4 3
|
||||
// \\2 5
|
||||
// \\1 3
|
||||
// \\3 9
|
||||
// \\3 3
|
||||
// ;
|
||||
// const expected_result: u64 = 11;
|
||||
//
|
||||
// var side_a = std.ArrayList(u64).init(testing.allocator);
|
||||
// defer side_a.deinit();
|
||||
//
|
||||
// var side_b = std.ArrayList(u64).init(testing.allocator);
|
||||
// defer side_b.deinit();
|
||||
//
|
||||
// var line_iter = mem.tokenizeScalar(u8, input, ' ');
|
||||
//
|
||||
// while (line_iter.next()) |line| {
|
||||
// const a, const b = parseLine(line) catch continue;
|
||||
// try side_a.append(a);
|
||||
// try side_b.append(b);
|
||||
// std.debug.print("Hello", .{});
|
||||
// }
|
||||
//
|
||||
// // Sorting
|
||||
// aoc.Sort.ascending(u64, side_a.items);
|
||||
// aoc.Sort.ascending(u64, side_b.items);
|
||||
//
|
||||
// // Calc
|
||||
// const result = calculateResult(side_a.items, side_b.items);
|
||||
//
|
||||
// try testing.expectEqual(expected_result, result);
|
||||
}
|
226
src/days/day02.zig
Normal file
226
src/days/day02.zig
Normal file
|
@ -0,0 +1,226 @@
|
|||
const std = @import("std");
|
||||
const mem = std.mem;
|
||||
const log = std.log;
|
||||
|
||||
const aoc = @import("aoc");
|
||||
|
||||
const StatusLevel = enum {
|
||||
Safe,
|
||||
Tolerated,
|
||||
Unsafe,
|
||||
};
|
||||
|
||||
const Status = struct {
|
||||
level: StatusLevel,
|
||||
tolerated: u64 = 0,
|
||||
inc: bool,
|
||||
};
|
||||
|
||||
pub fn run(alloc: mem.Allocator, progress: *std.Progress.Node) !void {
|
||||
const cwd = std.fs.cwd();
|
||||
const input_file = try cwd.openFile("inputs/day02.test.txt", .{});
|
||||
// const input_file = try cwd.openFile("inputs/day02.txt", .{});
|
||||
defer input_file.close();
|
||||
|
||||
var input_reader = input_file.reader();
|
||||
|
||||
var stats1 = std.ArrayList(Status).init(alloc);
|
||||
defer stats1.deinit();
|
||||
|
||||
var stats2 = std.ArrayList(Status).init(alloc);
|
||||
defer stats2.deinit();
|
||||
|
||||
while (true) {
|
||||
const line = input_reader.readUntilDelimiterOrEofAlloc(alloc, '\n', 1028) catch break orelse break;
|
||||
defer alloc.free(line);
|
||||
|
||||
const safety1 = parseLine(line) catch continue;
|
||||
const safety2 = parseLineWithSafetyLevel(alloc, line) catch continue;
|
||||
log.debug("Line: {s} = {}", .{ line, safety2 });
|
||||
try stats1.append(safety1);
|
||||
try stats2.append(safety2);
|
||||
}
|
||||
|
||||
log.info(
|
||||
"Result Part 1: {}",
|
||||
.{stats1.items.len - count(stats1.items)},
|
||||
);
|
||||
log.info(
|
||||
"Result Part 2: {}",
|
||||
.{stats2.items.len - count(stats2.items)},
|
||||
);
|
||||
|
||||
_ = progress; // autofix
|
||||
}
|
||||
|
||||
fn parseLine(line: []u8) !Status {
|
||||
var tokens = mem.tokenizeScalar(u8, line, ' ');
|
||||
|
||||
var status: u64 = undefined;
|
||||
if (tokens.next()) |first| {
|
||||
status = try std.fmt.parseInt(u64, first, 0);
|
||||
} else return error.WrongFormat;
|
||||
|
||||
var increasing: bool = undefined;
|
||||
if (tokens.next()) |second| {
|
||||
const s = try std.fmt.parseInt(u64, second, 0);
|
||||
const diff = @as(i64, @intCast(s)) - @as(i64, @intCast(status));
|
||||
if (diff >= 1 and diff <= 3) {
|
||||
increasing = true;
|
||||
} else if (diff <= -1 and diff >= -3) {
|
||||
increasing = false;
|
||||
} else {
|
||||
return .{ .level = .Unsafe, .inc = increasing };
|
||||
}
|
||||
|
||||
status = s;
|
||||
}
|
||||
|
||||
while (tokens.next()) |raw| {
|
||||
const s = try std.fmt.parseInt(u64, raw, 0);
|
||||
const diff = @as(i64, @intCast(s)) - @as(i64, @intCast(status));
|
||||
|
||||
if ((diff >= 1 and diff <= 3 and increasing) or (diff <= -1 and diff >= -3 and !increasing)) {
|
||||
status = s;
|
||||
} else {
|
||||
return .{ .level = .Unsafe, .inc = increasing };
|
||||
}
|
||||
}
|
||||
|
||||
return .{ .level = .Safe, .inc = increasing };
|
||||
}
|
||||
|
||||
fn parseLineWithSafetyLevel(alloc: mem.Allocator, line: []u8) !Status {
|
||||
var tokens = mem.tokenizeScalar(u8, line, ' ');
|
||||
|
||||
var first: u64 = undefined;
|
||||
// var old_status: ?u64 = null;
|
||||
|
||||
var stats = std.ArrayList(u64).init(alloc);
|
||||
defer stats.deinit();
|
||||
|
||||
if (tokens.next()) |f| {
|
||||
first = try std.fmt.parseInt(u64, f, 0);
|
||||
try stats.append(first);
|
||||
} else return error.WrongFormat;
|
||||
|
||||
var skipped: u64 = 0;
|
||||
var skipped_level: u64 = 0;
|
||||
var increasing: ?bool = null;
|
||||
|
||||
while (tokens.next()) |raw| {
|
||||
const s = try std.fmt.parseInt(u64, raw, 0);
|
||||
|
||||
if (increasing == null) {
|
||||
const diff: i64 = @as(i64, @intCast(s)) - @as(i64, @intCast(first));
|
||||
|
||||
if (diff >= 0) {
|
||||
increasing = true;
|
||||
} else {
|
||||
increasing = false;
|
||||
}
|
||||
}
|
||||
|
||||
try stats.append(s);
|
||||
|
||||
// if (increasing) |inc| {
|
||||
// if ((!inRange(diff, 1, 3) and inc) or (!inRange(diff, -3, -1) and !inc)) {
|
||||
// if (old_status) |os| {
|
||||
// const odiff = @as(i64, @intCast(s)) - @as(i64, @intCast(os));
|
||||
// if ((inRange(odiff, 1, 3) and inc) or (inRange(odiff, -3, -1) and !inc)) {
|
||||
// // safety -= blk: {
|
||||
// // if (safety == 0) break :blk 0;
|
||||
// // break :blk 1;
|
||||
// // };
|
||||
// skipped += 1;
|
||||
// if (skipped == 1) {
|
||||
// skipped_level = s;
|
||||
// }
|
||||
// } else {
|
||||
// return .{ .level = .Unsafe };
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
// old_status = status;
|
||||
// status = s;
|
||||
}
|
||||
|
||||
if (increasing) |inc| {
|
||||
var i: usize = 0;
|
||||
var last_skip: usize = 0;
|
||||
|
||||
while (i < stats.items.len) {
|
||||
if (i == 0) {
|
||||
i += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
const level = stats.items[i];
|
||||
|
||||
const last = blk: {
|
||||
if (skipped_level == i - 1 and i > 1) {
|
||||
break :blk stats.items[i - 2];
|
||||
}
|
||||
break :blk stats.items[i - 1];
|
||||
};
|
||||
const diff: i64 = @as(i64, @intCast(level)) - @as(i64, @intCast(last));
|
||||
|
||||
if ((!inRange(diff, 1, 3) and inc) or (!inRange(diff, -3, -1) and !inc)) {
|
||||
if (i < stats.items.len - 1) {
|
||||
// const next = stats.items[i + 1];
|
||||
const llast = stats.items[i - 2];
|
||||
const ndiff: i64 = @as(i64, @intCast(llast)) - @as(i64, @intCast(level));
|
||||
|
||||
if ((inRange(ndiff, 1, 3) and inc) or (inRange(ndiff, -3, -1) and !inc)) {
|
||||
if (skipped == 0) skipped_level = level;
|
||||
|
||||
std.log.debug("Skipping: {}", .{level});
|
||||
|
||||
skipped += 1;
|
||||
i += 2;
|
||||
last_skip = i;
|
||||
continue;
|
||||
} else {
|
||||
std.log.debug("Aborting at level: {}", .{level});
|
||||
return .{ .level = .Unsafe, .inc = increasing.? };
|
||||
}
|
||||
}
|
||||
|
||||
std.log.debug("Aborting at level: {}", .{level});
|
||||
return .{ .level = .Unsafe, .inc = increasing.? };
|
||||
}
|
||||
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
switch (skipped) {
|
||||
0 => return .{ .level = .Safe, .inc = increasing.? },
|
||||
1 => return .{ .level = .Tolerated, .tolerated = skipped_level, .inc = increasing.? },
|
||||
else => return .{ .level = .Unsafe, .inc = increasing.? },
|
||||
}
|
||||
}
|
||||
|
||||
fn inRange(diff: i64, lower: i64, upper: i64) bool {
|
||||
return diff >= lower and diff <= upper;
|
||||
}
|
||||
|
||||
fn count(levels: []Status) u64 {
|
||||
var c: u64 = 0;
|
||||
|
||||
for (levels) |level| {
|
||||
if (level.level != .Unsafe) c += 1;
|
||||
}
|
||||
|
||||
return c;
|
||||
}
|
||||
|
||||
test "day 02" {
|
||||
const testing = std.testing;
|
||||
_ = testing; // autofix
|
||||
|
||||
const input = undefined;
|
||||
_ = input; // autofix
|
||||
}
|
55
src/main.zig
Normal file
55
src/main.zig
Normal file
|
@ -0,0 +1,55 @@
|
|||
const std = @import("std");
|
||||
const log = std.log.scoped(.@"aoc-2024");
|
||||
|
||||
pub const std_options = std.Options{
|
||||
.log_level = .debug,
|
||||
};
|
||||
|
||||
const options = @import("build-options");
|
||||
const days = @import("days.zig");
|
||||
|
||||
pub fn main() !void {
|
||||
const day = blk: {
|
||||
if (options.day) |d| break :blk d;
|
||||
@compileError("please specify which day to build with the `-Dday` flag");
|
||||
};
|
||||
|
||||
var gpa = std.heap.GeneralPurposeAllocator(.{}){};
|
||||
const allocator = gpa.allocator();
|
||||
defer {
|
||||
const deinit_status = gpa.deinit();
|
||||
if (deinit_status == .leak) @panic("Memory leak");
|
||||
}
|
||||
|
||||
const day_str = try std.fmt.allocPrint(allocator, "Day {} - Advent of Code 2024", .{day});
|
||||
defer allocator.free(day_str);
|
||||
|
||||
var progress = std.Progress.start(.{ .root_name = day_str });
|
||||
defer progress.end();
|
||||
|
||||
var timer = try std.time.Timer.start();
|
||||
try days.run(allocator, &progress);
|
||||
const excution_time = timer.lap();
|
||||
printTimerResult(excution_time);
|
||||
}
|
||||
|
||||
fn printTimerResult(t: u64) void {
|
||||
var rt = t;
|
||||
|
||||
const hours = rt / std.time.ns_per_hour;
|
||||
rt = rt - (hours * std.time.ns_per_hour);
|
||||
|
||||
const minutes = rt / std.time.ns_per_min;
|
||||
rt = rt - (minutes * std.time.ns_per_min);
|
||||
|
||||
const seconds = rt / std.time.ns_per_s;
|
||||
rt = rt - (seconds * std.time.ns_per_s);
|
||||
|
||||
const ms = rt / std.time.ns_per_ms;
|
||||
rt = rt - (ms * std.time.ns_per_ms);
|
||||
|
||||
log.info(
|
||||
"Executed in: {}h {}m {}s {}ms (raw ms: {}ms)",
|
||||
.{ hours, minutes, seconds, ms, t / std.time.ns_per_ms },
|
||||
);
|
||||
}
|
29
src/root.zig
Normal file
29
src/root.zig
Normal file
|
@ -0,0 +1,29 @@
|
|||
//! Util library for the 2022 advent of code
|
||||
const std = @import("std");
|
||||
|
||||
/// Sort a slice in place
|
||||
pub const Sort = struct {
|
||||
pub fn Ascending(comptime T: type) type {
|
||||
return struct {
|
||||
fn sort(_: @TypeOf(.{}), lhs: T, rhs: T) bool {
|
||||
return lhs < rhs;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub fn ascending(comptime T: type, items: []T) void {
|
||||
std.mem.sort(T, items, .{}, Ascending(T).sort);
|
||||
}
|
||||
|
||||
pub fn Descending(comptime T: type) type {
|
||||
return struct {
|
||||
fn sort(_: @TypeOf(.{}), lhs: T, rhs: T) bool {
|
||||
return lhs > rhs;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub fn descending(comptime T: type, items: []T) void {
|
||||
std.mem.sort(T, items, .{}, Descending(T).sort);
|
||||
}
|
||||
};
|
Loading…
Reference in a new issue