diff --git a/build.zig b/build.zig index 2c8e1cd..08f9163 100644 --- a/build.zig +++ b/build.zig @@ -1,5 +1,7 @@ const std = @import("std"); -const version = std.SemanticVersion{ .major = 0, .minor = 1, .patch = 0 }; +const log = std.log.scoped(.zbench_build); + +const version = std.SemanticVersion{ .major = 0, .minor = 1, .patch = 2 }; pub fn build(b: *std.Build) void { const target = b.standardTargetOptions(.{}); @@ -43,7 +45,7 @@ fn setupTesting(b: *std.Build, target: std.zig.CrossTarget, optimize: std.builti const test_step = b.step("test", "Run unit tests"); test_step.dependOn(&run_unit_tests.step); - const test_dirs = [_][]const u8{"util", "."}; + const test_dirs = [_][]const u8{ "util", "." }; for (test_dirs) |dir| { addTestsFromDir(b, test_step, dir, target, optimize); } @@ -51,7 +53,7 @@ fn setupTesting(b: *std.Build, target: std.zig.CrossTarget, optimize: std.builti fn addTestsFromDir(b: *std.Build, test_step: *std.Build.Step, dir_path: []const u8, target: std.zig.CrossTarget, optimize: std.builtin.OptimizeMode) void { const iterableDir = std.fs.cwd().openIterableDir(dir_path, .{}) catch { - std.debug.print("Failed to open directory: {any}\n", .{dir_path}); + log.warn("Failed to open directory: {s}", .{dir_path}); return; }; @@ -60,7 +62,7 @@ fn addTestsFromDir(b: *std.Build, test_step: *std.Build.Step, dir_path: []const const optionalEntry = it.next() catch |err| { //TODO: break if access denied //if (err == std.fs.IterableDir.ChmodError) break; - std.debug.print("Directory iteration error: {any}\n", .{err}); + log.warn("Directory iteration error: {any}", .{err}); continue; }; diff --git a/zbench.zig b/zbench.zig index f3186a8..712639f 100644 --- a/zbench.zig +++ b/zbench.zig @@ -3,6 +3,8 @@ //!zig-autodoc-guide: docs/advanced.md const std = @import("std"); +const log = std.log.scoped(.zbench); + const c = @import("./util/color.zig"); const format = @import("./util/format.zig"); @@ -81,11 +83,6 @@ pub const Benchmark = struct { self.total_operations = ops; } - /// Prints a report of total operations performed during the benchmark. - pub fn report(self: *Benchmark) void { - std.debug.print("Total operations: {}\n", .{self.total_operations}); - } - pub const Percentiles = struct { p75: u64, p99: u64, @@ -125,7 +122,7 @@ pub const Benchmark = struct { if (len > 1) { lastIndex = len - 1; } else { - std.debug.print("Cannot calculate percentiles: recorded less than two durations\n", .{}); + log.debug("Cannot calculate percentiles: recorded less than two durations", .{}); return Percentiles{ .p75 = 0, .p99 = 0, .p995 = 0 }; } quickSort(self.durations.items, 0, lastIndex - 1); @@ -141,7 +138,8 @@ pub const Benchmark = struct { return Percentiles{ .p75 = p75, .p99 = p99, .p995 = p995 }; } - pub fn prettyPrint(self: Benchmark) !void { + /// Prints a report of total operations and timing statistics. + pub fn report(self: Benchmark) !void { const percentiles = self.calculatePercentiles(); var p75_buffer: [128]u8 = undefined; @@ -168,9 +166,17 @@ pub const Benchmark = struct { var min_max_buffer: [128]u8 = undefined; const min_max_str = try std.fmt.bufPrint(min_max_buffer[0..], "({s} ... {s})", .{ min_str, max_str }); - std.debug.print("{s:<22} {s:<8} {s:<22} {s:<28} {s:<10} {s:<10} {s:<10}\n", .{ "benchmark", "runs", "time (avg ± σ)", "(min ... max)", "p75", "p99", "p995" }); - std.debug.print("---------------------------------------------------------------------------------------------------------------\n", .{}); - std.debug.print("{s:<22} \x1b[90m{d:<8} \x1b[33m{s:<22} \x1b[95m{s:<28} \x1b[90m{s:<10} {s:<10} {s:<10}\x1b[0m\n", .{ self.name, self.total_operations, avg_std_str, min_max_str, p75_str, p99_str, p995_str }); + const stdout = std.io.getStdOut().writer(); + try stdout.print( + "\n{s:<22} {s:<8} {s:<22} {s:<28} {s:<10} {s:<10} {s:<10}\n", + .{ "benchmark", "runs", "time (avg ± σ)", "(min ... max)", "p75", "p99", "p995" }, + ); + try stdout.print("---------------------------------------------------------------------------------------------------------------\n", .{}); + try stdout.print( + "{s:<22} \x1b[90m{d:<8} \x1b[33m{s:<22} \x1b[95m{s:<28} \x1b[90m{s:<10} {s:<10} {s:<10}\x1b[0m\n", + .{ self.name, self.total_operations, avg_std_str, min_max_str, p75_str, p99_str, p995_str }, + ); + try stdout.print("\n", .{}); } /// Calculate the average duration @@ -248,7 +254,7 @@ pub const BenchmarkResults = struct { /// Formats and prints the benchmark results in a readable format. pub fn prettyPrint(self: BenchmarkResults) !void { const stdout = std.io.getStdOut().writer(); - std.debug.print("--------------------------------------------------------------------------------------\n", .{}); + stdout.print("--------------------------------------------------------------------------------------\n", .{}); for (self.results.items) |result| { try stdout.print("{s}", .{result.name}); @@ -317,7 +323,6 @@ pub fn run(comptime func: BenchFunc, bench: *Benchmark, benchResult: *BenchmarkR }); bench.setTotalOperations(bench.N); - bench.report(); - try bench.prettyPrint(); + try bench.report(); }