renderer: extract the size structs to a shared file

This commit is contained in:
Mitchell Hashimoto
2022-10-24 09:27:09 -07:00
parent aa3d132947
commit ce384c2356
3 changed files with 105 additions and 71 deletions

View File

@ -7,6 +7,7 @@
//! APIs. The renderers in this package assume that the renderer is already
//! setup (OpenGL has a context, Vulkan has a surface, etc.)
pub usingnamespace @import("renderer/size.zig");
pub const OpenGL = @import("renderer/OpenGL.zig");
pub const Thread = @import("renderer/Thread.zig");
pub const State = @import("renderer/State.zig");

View File

@ -32,10 +32,10 @@ const CellsLRU = lru.AutoHashMap(struct {
alloc: std.mem.Allocator,
/// Current dimensions for this grid.
size: GridSize,
size: renderer.GridSize,
/// Current cell dimensions for this grid.
cell_size: CellSize,
cell_size: renderer.CellSize,
/// The current set of cells to render.
cells: std.ArrayListUnmanaged(GPUCell),
@ -787,7 +787,7 @@ pub fn updateCell(
/// Set the screen size for rendering. This will update the projection
/// used for the shader so that the scaling of the grid is correct.
pub fn setScreenSize(self: *OpenGL, dim: ScreenSize) !void {
pub fn setScreenSize(self: *OpenGL, dim: renderer.ScreenSize) !void {
// Update the projection uniform within our shader
const bind = try self.program.use();
defer bind.unbind();
@ -960,71 +960,3 @@ pub fn draw(self: *OpenGL) !void {
self.cells.items.len,
);
}
/// The dimensions of a single "cell" in the terminal grid.
///
/// The dimensions are dependent on the current loaded set of font glyphs.
/// We calculate the width based on the widest character and the height based
/// on the height requirement for an underscore (the "lowest" -- visually --
/// character).
///
/// The units for the width and height are in world space. They have to
/// be normalized using the screen projection.
///
/// TODO(mitchellh): we should recalculate cell dimensions when new glyphs
/// are loaded.
const CellSize = struct {
width: f32,
height: f32,
};
/// The dimensions of the screen that the grid is rendered to. This is the
/// terminal screen, so it is likely a subset of the window size. The dimensions
/// should be in pixels.
const ScreenSize = struct {
width: u32,
height: u32,
};
/// The dimensions of the grid itself, in rows/columns units.
const GridSize = struct {
const Unit = u32;
columns: Unit = 0,
rows: Unit = 0,
/// Update the columns/rows for the grid based on the given screen and
/// cell size.
fn update(self: *GridSize, screen: ScreenSize, cell: CellSize) void {
self.columns = @floatToInt(Unit, @intToFloat(f32, screen.width) / cell.width);
self.rows = @floatToInt(Unit, @intToFloat(f32, screen.height) / cell.height);
}
};
test "GridSize update exact" {
var grid: GridSize = .{};
grid.update(.{
.width = 100,
.height = 40,
}, .{
.width = 5,
.height = 10,
});
try testing.expectEqual(@as(GridSize.Unit, 20), grid.columns);
try testing.expectEqual(@as(GridSize.Unit, 4), grid.rows);
}
test "GridSize update rounding" {
var grid: GridSize = .{};
grid.update(.{
.width = 20,
.height = 40,
}, .{
.width = 6,
.height = 15,
});
try testing.expectEqual(@as(GridSize.Unit, 3), grid.columns);
try testing.expectEqual(@as(GridSize.Unit, 2), grid.rows);
}

101
src/renderer/size.zig Normal file
View File

@ -0,0 +1,101 @@
const std = @import("std");
const Allocator = std.mem.Allocator;
const font = @import("../font/main.zig");
const log = std.log.scoped(.renderer_size);
/// The dimensions of a single "cell" in the terminal grid.
///
/// The dimensions are dependent on the current loaded set of font glyphs.
/// We calculate the width based on the widest character and the height based
/// on the height requirement for an underscore (the "lowest" -- visually --
/// character).
///
/// The units for the width and height are in world space. They have to
/// be normalized for any renderer implementation.
pub const CellSize = struct {
width: f32,
height: f32,
/// Initialize the cell size information from a font group. This ensures
/// that all renderers use the same cell sizing information for the same
/// fonts.
pub fn init(alloc: Allocator, group: *font.GroupCache) !CellSize {
// Get our cell metrics based on a regular font ascii 'M'. Why 'M'?
// Doesn't matter, any normal ASCII will do we're just trying to make
// sure we use the regular font.
const metrics = metrics: {
const index = (try group.indexForCodepoint(alloc, 'M', .regular, .text)).?;
const face = try group.group.faceFromIndex(index);
break :metrics face.metrics;
};
log.debug("cell dimensions={}", .{metrics});
return CellSize{
.width = metrics.cell_width,
.height = metrics.cell_height,
};
}
};
/// The dimensions of the screen that the grid is rendered to. This is the
/// terminal screen, so it is likely a subset of the window size. The dimensions
/// should be in pixels.
pub const ScreenSize = struct {
width: u32,
height: u32,
};
/// The dimensions of the grid itself, in rows/columns units.
pub const GridSize = struct {
const Unit = u32;
columns: Unit = 0,
rows: Unit = 0,
/// Initialize a grid size based on a screen and cell size.
pub fn init(screen: ScreenSize, cell: CellSize) GridSize {
var result: GridSize = undefined;
result.update(screen, cell);
return result;
}
/// Update the columns/rows for the grid based on the given screen and
/// cell size.
pub fn update(self: *GridSize, screen: ScreenSize, cell: CellSize) void {
self.columns = @floatToInt(Unit, @intToFloat(f32, screen.width) / cell.width);
self.rows = @floatToInt(Unit, @intToFloat(f32, screen.height) / cell.height);
}
};
test "GridSize update exact" {
const testing = std.testing;
var grid: GridSize = .{};
grid.update(.{
.width = 100,
.height = 40,
}, .{
.width = 5,
.height = 10,
});
try testing.expectEqual(@as(GridSize.Unit, 20), grid.columns);
try testing.expectEqual(@as(GridSize.Unit, 4), grid.rows);
}
test "GridSize update rounding" {
const testing = std.testing;
var grid: GridSize = .{};
grid.update(.{
.width = 20,
.height = 40,
}, .{
.width = 6,
.height = 15,
});
try testing.expectEqual(@as(GridSize.Unit, 3), grid.columns);
try testing.expectEqual(@as(GridSize.Unit, 2), grid.rows);
}