calculate grid size in dedicated struct, tests

This commit is contained in:
Mitchell Hashimoto
2022-04-15 08:16:22 -07:00
parent d5ab024521
commit e672c9d7d5
6 changed files with 101 additions and 50 deletions

View File

@ -46,5 +46,7 @@ pub fn build(b: *std.build.Builder) !void {
const test_step = b.step("test", "Run all tests"); const test_step = b.step("test", "Run all tests");
const lib_tests = b.addTest("src/main.zig"); const lib_tests = b.addTest("src/main.zig");
ftlib.link(lib_tests); ftlib.link(lib_tests);
lib_tests.addIncludeDir("vendor/glad/include/");
lib_tests.addCSourceFile("vendor/glad/src/gl.c", &.{});
test_step.dependOn(&lib_tests.step); test_step.dependOn(&lib_tests.step);
} }

View File

@ -9,7 +9,7 @@ layout (location = 1) in vec4 bg_color_in;
// The background color for this cell in RGBA (0 to 1.0) // The background color for this cell in RGBA (0 to 1.0)
flat out vec4 bg_color; flat out vec4 bg_color;
uniform vec2 cell_dims; uniform vec2 cell_size;
uniform mat4 projection; uniform mat4 projection;
vec3 hsv2rgb(vec3 c) vec3 hsv2rgb(vec3 c)
@ -21,7 +21,7 @@ vec3 hsv2rgb(vec3 c)
void main() { void main() {
// Top-left cell coordinates converted to world space // Top-left cell coordinates converted to world space
vec2 cell_pos = cell_dims * grid_coord; vec2 cell_pos = cell_size * grid_coord;
// Turn the cell position into a vertex point depending on the // Turn the cell position into a vertex point depending on the
// gl_VertexID. Since we use instanced drawing, we have 4 vertices // gl_VertexID. Since we use instanced drawing, we have 4 vertices
@ -31,7 +31,7 @@ void main() {
vec2 position; vec2 position;
position.x = (gl_VertexID == 0 || gl_VertexID == 1) ? 1. : 0.; position.x = (gl_VertexID == 0 || gl_VertexID == 1) ? 1. : 0.;
position.y = (gl_VertexID == 0 || gl_VertexID == 3) ? 0. : 1.; position.y = (gl_VertexID == 0 || gl_VertexID == 3) ? 0. : 1.;
cell_pos = cell_pos + cell_dims * position; cell_pos = cell_pos + cell_size * position;
gl_Position = projection * vec4(cell_pos, 1.0, 1.0); gl_Position = projection * vec4(cell_pos, 1.0, 1.0);
bg_color = vec4(hsv2rgb(bg_color_in.rgb), 1.0); bg_color = vec4(hsv2rgb(bg_color_in.rgb), 1.0);

View File

@ -208,7 +208,7 @@ test {
// Generate all visible ASCII // Generate all visible ASCII
var i: u8 = 32; var i: u8 = 32;
while (i < 127) : (i += 1) { while (i < 127) : (i += 1) {
try font.addGlyph(alloc, i); _ = try font.addGlyph(alloc, i);
} }
i = 32; i = 32;

View File

@ -3,6 +3,7 @@ const Grid = @This();
const std = @import("std"); const std = @import("std");
const assert = std.debug.assert; const assert = std.debug.assert;
const testing = std.testing;
const Allocator = std.mem.Allocator; const Allocator = std.mem.Allocator;
const Atlas = @import("Atlas.zig"); const Atlas = @import("Atlas.zig");
const FontAtlas = @import("FontAtlas.zig"); const FontAtlas = @import("FontAtlas.zig");
@ -11,38 +12,13 @@ const gb = @import("gb_math.zig");
const log = std.log.scoped(.grid); const log = std.log.scoped(.grid);
/// The dimensions of a single "cell" in the terminal grid.
///
/// The dimensions are dependent on the current loaded set of font glyphs.
/// We calculate the width based on the widest character and the height based
/// on the height requirement for an underscore (the "lowest" -- visually --
/// character).
///
/// The units for the width and height are in world space. They have to
/// be normalized using the screen projection.
///
/// TODO(mitchellh): we should recalculate cell dimensions when new glyphs
/// are loaded.
const CellDim = struct {
width: f32,
height: f32,
};
/// The dimensions of the screen that the grid is rendered to. This is the
/// terminal screen, so it is likely a subset of the window size. The dimensions
/// should be in pixels.
const ScreenDim = struct {
width: i32,
height: i32,
};
alloc: std.mem.Allocator, alloc: std.mem.Allocator,
/// Current cell dimensions for this grid. /// Current dimensions for this grid.
cell_dims: CellDim, size: GridSize,
columns: u32 = 0, /// Current cell dimensions for this grid.
rows: u32 = 0, cell_size: CellSize,
/// Shader program for cell rendering. /// Shader program for cell rendering.
program: gl.Program, program: gl.Program,
@ -95,11 +71,12 @@ pub fn init(alloc: Allocator) !Grid {
// Set our cell dimensions // Set our cell dimensions
const pbind = try program.use(); const pbind = try program.use();
defer pbind.unbind(); defer pbind.unbind();
try program.setUniform("cell_dims", @Vector(2, f32){ cell_width, cell_height }); try program.setUniform("cell_size", @Vector(2, f32){ cell_width, cell_height });
return Grid{ return Grid{
.alloc = alloc, .alloc = alloc,
.cell_dims = .{ .width = cell_width, .height = cell_height }, .cell_size = .{ .width = cell_width, .height = cell_height },
.size = .{ .rows = 0, .columns = 0 },
.program = program, .program = program,
}; };
} }
@ -111,7 +88,7 @@ pub fn deinit(self: *Grid) void {
/// Set the screen size for rendering. This will update the projection /// Set the screen size for rendering. This will update the projection
/// used for the shader so that the scaling of the grid is correct. /// used for the shader so that the scaling of the grid is correct.
pub fn setScreenSize(self: *Grid, dim: ScreenDim) !void { pub fn setScreenSize(self: *Grid, dim: ScreenSize) !void {
// Create a 2D orthographic projection matrix with the full width/height. // Create a 2D orthographic projection matrix with the full width/height.
var projection: gb.gbMat4 = undefined; var projection: gb.gbMat4 = undefined;
gb.gb_mat4_ortho2d( gb.gb_mat4_ortho2d(
@ -122,18 +99,15 @@ pub fn setScreenSize(self: *Grid, dim: ScreenDim) !void {
0, 0,
); );
self.columns = @floatToInt(u32, @intToFloat(f32, dim.width) / self.cell_dims.width);
self.rows = @floatToInt(u32, @intToFloat(f32, dim.height) / self.cell_dims.width);
// Update the projection uniform within our shader // Update the projection uniform within our shader
const bind = try self.program.use(); const bind = try self.program.use();
defer bind.unbind(); defer bind.unbind();
try self.program.setUniform("projection", projection); try self.program.setUniform("projection", projection);
log.debug("screen size w={d} h={d} cols={d} rows={d}", .{ // Recalculate the rows/columns.
dim.width, dim.height, self.size.update(dim, self.cell_size);
self.columns, self.rows,
}); log.debug("screen size screen={} grid={}", .{ dim, self.size });
} }
pub fn render(self: Grid) !void { pub fn render(self: Grid) !void {
@ -157,15 +131,15 @@ pub fn render(self: Grid) !void {
// Build our data // Build our data
var vertices: std.ArrayListUnmanaged([6]f32) = .{}; var vertices: std.ArrayListUnmanaged([6]f32) = .{};
try vertices.ensureUnusedCapacity(self.alloc, self.columns * self.rows); try vertices.ensureUnusedCapacity(self.alloc, self.size.columns * self.size.rows);
defer vertices.deinit(self.alloc); defer vertices.deinit(self.alloc);
var row: u32 = 0; var row: u32 = 0;
while (row < self.rows) : (row += 1) { while (row < self.size.rows) : (row += 1) {
var col: u32 = 0; var col: u32 = 0;
while (col < self.columns) : (col += 1) { while (col < self.size.columns) : (col += 1) {
const rowf = @intToFloat(f32, row); const rowf = @intToFloat(f32, row);
const colf = @intToFloat(f32, col); const colf = @intToFloat(f32, col);
const hue = ((colf * @intToFloat(f32, self.rows)) + rowf) / @intToFloat(f32, self.columns * self.rows); const hue = ((colf * @intToFloat(f32, self.size.rows)) + rowf) / @intToFloat(f32, self.size.columns * self.size.rows);
vertices.appendAssumeCapacity([6]f32{ vertices.appendAssumeCapacity([6]f32{
colf, colf,
rowf, rowf,
@ -197,4 +171,72 @@ pub fn render(self: Grid) !void {
try gl.VertexArray.unbind(); try gl.VertexArray.unbind();
} }
/// The dimensions of a single "cell" in the terminal grid.
///
/// The dimensions are dependent on the current loaded set of font glyphs.
/// We calculate the width based on the widest character and the height based
/// on the height requirement for an underscore (the "lowest" -- visually --
/// character).
///
/// The units for the width and height are in world space. They have to
/// be normalized using the screen projection.
///
/// TODO(mitchellh): we should recalculate cell dimensions when new glyphs
/// are loaded.
const CellSize = struct {
width: f32,
height: f32,
};
/// The dimensions of the screen that the grid is rendered to. This is the
/// terminal screen, so it is likely a subset of the window size. The dimensions
/// should be in pixels.
const ScreenSize = struct {
width: u32,
height: u32,
};
/// The dimensions of the grid itself, in rows/columns units.
const GridSize = struct {
const Unit = u32;
columns: Unit = 0,
rows: Unit = 0,
/// Update the columns/rows for the grid based on the given screen and
/// cell size.
fn update(self: *GridSize, screen: ScreenSize, cell: CellSize) void {
self.columns = @floatToInt(Unit, @intToFloat(f32, screen.width) / cell.width);
self.rows = @floatToInt(Unit, @intToFloat(f32, screen.height) / cell.height);
}
};
test "GridSize update exact" {
var grid: GridSize = .{};
grid.update(.{
.width = 100,
.height = 40,
}, .{
.width = 5,
.height = 10,
});
try testing.expectEqual(@as(GridSize.Unit, 20), grid.columns);
try testing.expectEqual(@as(GridSize.Unit, 4), grid.rows);
}
test "GridSize update rounding" {
var grid: GridSize = .{};
grid.update(.{
.width = 20,
.height = 40,
}, .{
.width = 6,
.height = 15,
});
try testing.expectEqual(@as(GridSize.Unit, 3), grid.columns);
try testing.expectEqual(@as(GridSize.Unit, 2), grid.rows);
}
const face_ttf = @embedFile("../fonts/FiraCode-Regular.ttf"); const face_ttf = @embedFile("../fonts/FiraCode-Regular.ttf");

View File

@ -6,6 +6,7 @@
const Window = @This(); const Window = @This();
const std = @import("std"); const std = @import("std");
const assert = std.debug.assert;
const Allocator = std.mem.Allocator; const Allocator = std.mem.Allocator;
const Grid = @import("Grid.zig"); const Grid = @import("Grid.zig");
const glfw = @import("glfw"); const glfw = @import("glfw");
@ -78,6 +79,7 @@ pub fn create(alloc: Allocator) !*Window {
pub fn destroy(self: *Window, alloc: Allocator) void { pub fn destroy(self: *Window, alloc: Allocator) void {
self.grid.deinit(); self.grid.deinit();
self.window.destroy();
alloc.destroy(self); alloc.destroy(self);
} }
@ -97,12 +99,16 @@ pub fn run(self: Window) !void {
} }
fn sizeCallback(window: glfw.Window, width: i32, height: i32) void { fn sizeCallback(window: glfw.Window, width: i32, height: i32) void {
const win = window.getUserPointer(Window) orelse return; // glfw gives us signed integers, but negative width/height is n
// non-sensical so we use unsigned throughout, so assert.
assert(width >= 0);
assert(height >= 0);
// Update our grid so that the projections on render are correct. // Update our grid so that the projections on render are correct.
const win = window.getUserPointer(Window) orelse return;
win.grid.setScreenSize(.{ win.grid.setScreenSize(.{
.width = width, .width = @intCast(u32, width),
.height = height, .height = @intCast(u32, height),
}) catch |err| log.err("error updating grid screen size err={}", .{err}); }) catch |err| log.err("error updating grid screen size err={}", .{err});
// Update our viewport for this context to be the entire window // Update our viewport for this context to be the entire window

View File

@ -21,4 +21,5 @@ pub fn main() !void {
test { test {
_ = @import("Atlas.zig"); _ = @import("Atlas.zig");
_ = @import("FontAtlas.zig"); _ = @import("FontAtlas.zig");
_ = @import("Grid.zig");
} }