From e672c9d7d51c65eeaf0eff2d330383ab7cf8ae1d Mon Sep 17 00:00:00 2001 From: Mitchell Hashimoto Date: Fri, 15 Apr 2022 08:16:22 -0700 Subject: [PATCH] calculate grid size in dedicated struct, tests --- build.zig | 2 + shaders/cell.v.glsl | 6 +-- src/FontAtlas.zig | 2 +- src/Grid.zig | 128 +++++++++++++++++++++++++++++--------------- src/Window.zig | 12 +++-- src/main.zig | 1 + 6 files changed, 101 insertions(+), 50 deletions(-) diff --git a/build.zig b/build.zig index ba6a67d17..18ea397f5 100644 --- a/build.zig +++ b/build.zig @@ -46,5 +46,7 @@ pub fn build(b: *std.build.Builder) !void { const test_step = b.step("test", "Run all tests"); const lib_tests = b.addTest("src/main.zig"); ftlib.link(lib_tests); + lib_tests.addIncludeDir("vendor/glad/include/"); + lib_tests.addCSourceFile("vendor/glad/src/gl.c", &.{}); test_step.dependOn(&lib_tests.step); } diff --git a/shaders/cell.v.glsl b/shaders/cell.v.glsl index 2696a8dde..aaa03d78e 100644 --- a/shaders/cell.v.glsl +++ b/shaders/cell.v.glsl @@ -9,7 +9,7 @@ layout (location = 1) in vec4 bg_color_in; // The background color for this cell in RGBA (0 to 1.0) flat out vec4 bg_color; -uniform vec2 cell_dims; +uniform vec2 cell_size; uniform mat4 projection; vec3 hsv2rgb(vec3 c) @@ -21,7 +21,7 @@ vec3 hsv2rgb(vec3 c) void main() { // Top-left cell coordinates converted to world space - vec2 cell_pos = cell_dims * grid_coord; + vec2 cell_pos = cell_size * grid_coord; // Turn the cell position into a vertex point depending on the // gl_VertexID. Since we use instanced drawing, we have 4 vertices @@ -31,7 +31,7 @@ void main() { vec2 position; position.x = (gl_VertexID == 0 || gl_VertexID == 1) ? 1. : 0.; position.y = (gl_VertexID == 0 || gl_VertexID == 3) ? 0. : 1.; - cell_pos = cell_pos + cell_dims * position; + cell_pos = cell_pos + cell_size * position; gl_Position = projection * vec4(cell_pos, 1.0, 1.0); bg_color = vec4(hsv2rgb(bg_color_in.rgb), 1.0); diff --git a/src/FontAtlas.zig b/src/FontAtlas.zig index 58804faff..fb56aabf9 100644 --- a/src/FontAtlas.zig +++ b/src/FontAtlas.zig @@ -208,7 +208,7 @@ test { // Generate all visible ASCII var i: u8 = 32; while (i < 127) : (i += 1) { - try font.addGlyph(alloc, i); + _ = try font.addGlyph(alloc, i); } i = 32; diff --git a/src/Grid.zig b/src/Grid.zig index 5441a5110..788bd90f1 100644 --- a/src/Grid.zig +++ b/src/Grid.zig @@ -3,6 +3,7 @@ const Grid = @This(); const std = @import("std"); const assert = std.debug.assert; +const testing = std.testing; const Allocator = std.mem.Allocator; const Atlas = @import("Atlas.zig"); const FontAtlas = @import("FontAtlas.zig"); @@ -11,38 +12,13 @@ const gb = @import("gb_math.zig"); const log = std.log.scoped(.grid); -/// The dimensions of a single "cell" in the terminal grid. -/// -/// The dimensions are dependent on the current loaded set of font glyphs. -/// We calculate the width based on the widest character and the height based -/// on the height requirement for an underscore (the "lowest" -- visually -- -/// character). -/// -/// The units for the width and height are in world space. They have to -/// be normalized using the screen projection. -/// -/// TODO(mitchellh): we should recalculate cell dimensions when new glyphs -/// are loaded. -const CellDim = struct { - width: f32, - height: f32, -}; - -/// The dimensions of the screen that the grid is rendered to. This is the -/// terminal screen, so it is likely a subset of the window size. The dimensions -/// should be in pixels. -const ScreenDim = struct { - width: i32, - height: i32, -}; - alloc: std.mem.Allocator, -/// Current cell dimensions for this grid. -cell_dims: CellDim, +/// Current dimensions for this grid. +size: GridSize, -columns: u32 = 0, -rows: u32 = 0, +/// Current cell dimensions for this grid. +cell_size: CellSize, /// Shader program for cell rendering. program: gl.Program, @@ -95,11 +71,12 @@ pub fn init(alloc: Allocator) !Grid { // Set our cell dimensions const pbind = try program.use(); defer pbind.unbind(); - try program.setUniform("cell_dims", @Vector(2, f32){ cell_width, cell_height }); + try program.setUniform("cell_size", @Vector(2, f32){ cell_width, cell_height }); return Grid{ .alloc = alloc, - .cell_dims = .{ .width = cell_width, .height = cell_height }, + .cell_size = .{ .width = cell_width, .height = cell_height }, + .size = .{ .rows = 0, .columns = 0 }, .program = program, }; } @@ -111,7 +88,7 @@ pub fn deinit(self: *Grid) void { /// Set the screen size for rendering. This will update the projection /// used for the shader so that the scaling of the grid is correct. -pub fn setScreenSize(self: *Grid, dim: ScreenDim) !void { +pub fn setScreenSize(self: *Grid, dim: ScreenSize) !void { // Create a 2D orthographic projection matrix with the full width/height. var projection: gb.gbMat4 = undefined; gb.gb_mat4_ortho2d( @@ -122,18 +99,15 @@ pub fn setScreenSize(self: *Grid, dim: ScreenDim) !void { 0, ); - self.columns = @floatToInt(u32, @intToFloat(f32, dim.width) / self.cell_dims.width); - self.rows = @floatToInt(u32, @intToFloat(f32, dim.height) / self.cell_dims.width); - // Update the projection uniform within our shader const bind = try self.program.use(); defer bind.unbind(); try self.program.setUniform("projection", projection); - log.debug("screen size w={d} h={d} cols={d} rows={d}", .{ - dim.width, dim.height, - self.columns, self.rows, - }); + // Recalculate the rows/columns. + self.size.update(dim, self.cell_size); + + log.debug("screen size screen={} grid={}", .{ dim, self.size }); } pub fn render(self: Grid) !void { @@ -157,15 +131,15 @@ pub fn render(self: Grid) !void { // Build our data var vertices: std.ArrayListUnmanaged([6]f32) = .{}; - try vertices.ensureUnusedCapacity(self.alloc, self.columns * self.rows); + try vertices.ensureUnusedCapacity(self.alloc, self.size.columns * self.size.rows); defer vertices.deinit(self.alloc); var row: u32 = 0; - while (row < self.rows) : (row += 1) { + while (row < self.size.rows) : (row += 1) { var col: u32 = 0; - while (col < self.columns) : (col += 1) { + while (col < self.size.columns) : (col += 1) { const rowf = @intToFloat(f32, row); const colf = @intToFloat(f32, col); - const hue = ((colf * @intToFloat(f32, self.rows)) + rowf) / @intToFloat(f32, self.columns * self.rows); + const hue = ((colf * @intToFloat(f32, self.size.rows)) + rowf) / @intToFloat(f32, self.size.columns * self.size.rows); vertices.appendAssumeCapacity([6]f32{ colf, rowf, @@ -197,4 +171,72 @@ pub fn render(self: Grid) !void { try gl.VertexArray.unbind(); } +/// The dimensions of a single "cell" in the terminal grid. +/// +/// The dimensions are dependent on the current loaded set of font glyphs. +/// We calculate the width based on the widest character and the height based +/// on the height requirement for an underscore (the "lowest" -- visually -- +/// character). +/// +/// The units for the width and height are in world space. They have to +/// be normalized using the screen projection. +/// +/// TODO(mitchellh): we should recalculate cell dimensions when new glyphs +/// are loaded. +const CellSize = struct { + width: f32, + height: f32, +}; + +/// The dimensions of the screen that the grid is rendered to. This is the +/// terminal screen, so it is likely a subset of the window size. The dimensions +/// should be in pixels. +const ScreenSize = struct { + width: u32, + height: u32, +}; + +/// The dimensions of the grid itself, in rows/columns units. +const GridSize = struct { + const Unit = u32; + + columns: Unit = 0, + rows: Unit = 0, + + /// Update the columns/rows for the grid based on the given screen and + /// cell size. + fn update(self: *GridSize, screen: ScreenSize, cell: CellSize) void { + self.columns = @floatToInt(Unit, @intToFloat(f32, screen.width) / cell.width); + self.rows = @floatToInt(Unit, @intToFloat(f32, screen.height) / cell.height); + } +}; + +test "GridSize update exact" { + var grid: GridSize = .{}; + grid.update(.{ + .width = 100, + .height = 40, + }, .{ + .width = 5, + .height = 10, + }); + + try testing.expectEqual(@as(GridSize.Unit, 20), grid.columns); + try testing.expectEqual(@as(GridSize.Unit, 4), grid.rows); +} + +test "GridSize update rounding" { + var grid: GridSize = .{}; + grid.update(.{ + .width = 20, + .height = 40, + }, .{ + .width = 6, + .height = 15, + }); + + try testing.expectEqual(@as(GridSize.Unit, 3), grid.columns); + try testing.expectEqual(@as(GridSize.Unit, 2), grid.rows); +} + const face_ttf = @embedFile("../fonts/FiraCode-Regular.ttf"); diff --git a/src/Window.zig b/src/Window.zig index 9df46511d..8e1f276f1 100644 --- a/src/Window.zig +++ b/src/Window.zig @@ -6,6 +6,7 @@ const Window = @This(); const std = @import("std"); +const assert = std.debug.assert; const Allocator = std.mem.Allocator; const Grid = @import("Grid.zig"); const glfw = @import("glfw"); @@ -78,6 +79,7 @@ pub fn create(alloc: Allocator) !*Window { pub fn destroy(self: *Window, alloc: Allocator) void { self.grid.deinit(); + self.window.destroy(); alloc.destroy(self); } @@ -97,12 +99,16 @@ pub fn run(self: Window) !void { } fn sizeCallback(window: glfw.Window, width: i32, height: i32) void { - const win = window.getUserPointer(Window) orelse return; + // glfw gives us signed integers, but negative width/height is n + // non-sensical so we use unsigned throughout, so assert. + assert(width >= 0); + assert(height >= 0); // Update our grid so that the projections on render are correct. + const win = window.getUserPointer(Window) orelse return; win.grid.setScreenSize(.{ - .width = width, - .height = height, + .width = @intCast(u32, width), + .height = @intCast(u32, height), }) catch |err| log.err("error updating grid screen size err={}", .{err}); // Update our viewport for this context to be the entire window diff --git a/src/main.zig b/src/main.zig index 3599615ae..432033cd4 100644 --- a/src/main.zig +++ b/src/main.zig @@ -21,4 +21,5 @@ pub fn main() !void { test { _ = @import("Atlas.zig"); _ = @import("FontAtlas.zig"); + _ = @import("Grid.zig"); }