use new font.GroupCache, remove unused font structs

This commit is contained in:
Mitchell Hashimoto
2022-08-29 11:34:21 -07:00
parent a0aa100815
commit bd9c048c02
6 changed files with 85 additions and 386 deletions

View File

@ -45,7 +45,7 @@ texture_color: gl.Texture,
/// The font atlas.
font_lib: font.Library,
font_set: font.FallbackSet,
font_group: font.GroupCache,
/// Whether the cursor is visible or not. This is used to control cursor
/// blinking.
@ -138,49 +138,47 @@ pub fn init(
alloc: Allocator,
font_size: font.Face.DesiredSize,
) !Grid {
// Initialize our font atlas. We will initially populate the
// font atlas with all the visible ASCII characters since they are common.
var atlas = try Atlas.init(alloc, 512, .greyscale);
errdefer atlas.deinit(alloc);
// Load our emoji font
var atlas_color = try Atlas.init(alloc, 512, .rgba);
errdefer atlas_color.deinit(alloc);
// Build our fallback set so we can look up all codepoints
var font_set: font.FallbackSet = .{};
try font_set.families.ensureTotalCapacity(alloc, 2);
errdefer font_set.deinit(alloc);
// Build our font group
var font_lib = try font.Library.init();
errdefer font_lib.deinit();
var font_group = try font.GroupCache.init(alloc, group: {
var group = try font.Group.init(alloc);
errdefer group.deinit(alloc);
// Regular text
font_set.families.appendAssumeCapacity(fam: {
var fam = font.Family.init(font_lib, atlas);
errdefer fam.deinit(alloc);
try fam.loadFaceFromMemory(.regular, face_ttf, font_size);
try fam.loadFaceFromMemory(.bold, face_bold_ttf, font_size);
break :fam fam;
});
// Our regular font
try group.addFace(
alloc,
.regular,
try font.Face.init(font_lib, face_ttf, font_size),
);
try group.addFace(
alloc,
.bold,
try font.Face.init(font_lib, face_bold_ttf, font_size),
);
// Emoji
font_set.families.appendAssumeCapacity(fam: {
var fam_emoji = font.Family.init(font_lib, atlas_color);
errdefer fam_emoji.deinit(alloc);
try fam_emoji.loadFaceFromMemory(.regular, face_emoji_ttf, font_size);
break :fam fam_emoji;
// Emoji
try group.addFace(
alloc,
.regular,
try font.Face.init(font_lib, face_emoji_ttf, font_size),
);
break :group group;
});
// Load all visible ASCII characters and build our cell width based on
// the widest character that we see.
const cell_width: f32 = cell_width: {
var cell_width: f32 = 0;
var i: u8 = 32;
var i: u32 = 32;
while (i <= 126) : (i += 1) {
const goa = try font_set.getOrAddGlyph(alloc, i, .regular);
if (goa.glyph.advance_x > cell_width) {
cell_width = @ceil(goa.glyph.advance_x);
const index = (try font_group.indexForCodepoint(alloc, .regular, i)).?;
const face = font_group.group.faceFromIndex(index);
const glyph_index = face.glyphIndex(i).?;
const glyph = try font_group.renderGlyph(alloc, index, glyph_index);
if (glyph.advance_x > cell_width) {
cell_width = @ceil(glyph.advance_x);
}
}
@ -190,14 +188,17 @@ pub fn init(
// The cell height is the vertical height required to render underscore
// '_' which should live at the bottom of a cell.
const cell_height: f32 = cell_height: {
const fam = &font_set.families.items[0];
// Get the '_' char for height
const index = (try font_group.indexForCodepoint(alloc, .regular, '_')).?;
const face = font_group.group.faceFromIndex(index);
const glyph_index = face.glyphIndex('_').?;
const glyph = try font_group.renderGlyph(alloc, index, glyph_index);
// This is the height reported by the font face
const face_height: i32 = fam.regular.?.unitsToPxY(fam.regular.?.face.handle.*.height);
const face_height: i32 = face.unitsToPxY(face.face.handle.*.height);
// Determine the height of the underscore char
const glyph = font_set.families.items[0].getGlyph('_', .regular).?;
var res: i32 = fam.regular.?.unitsToPxY(fam.regular.?.face.handle.*.ascender);
var res: i32 = face.unitsToPxY(face.face.handle.*.ascender);
res -= glyph.offset_y;
res += @intCast(i32, glyph.height);
@ -208,10 +209,10 @@ pub fn init(
break :cell_height @intToFloat(f32, res);
};
const cell_baseline = cell_baseline: {
const fam = &font_set.families.items[0];
const face = font_group.group.faces.get(.regular).items[0];
break :cell_baseline cell_height - @intToFloat(
f32,
fam.regular.?.unitsToPxY(fam.regular.?.face.handle.*.ascender),
face.unitsToPxY(face.face.handle.*.ascender),
);
};
log.debug("cell dimensions w={d} h={d} baseline={d}", .{ cell_width, cell_height, cell_baseline });
@ -294,12 +295,12 @@ pub fn init(
try texbind.image2D(
0,
.Red,
@intCast(c_int, atlas.size),
@intCast(c_int, atlas.size),
@intCast(c_int, font_group.atlas_greyscale.size),
@intCast(c_int, font_group.atlas_greyscale.size),
0,
.Red,
.UnsignedByte,
atlas.data.ptr,
font_group.atlas_greyscale.data.ptr,
);
}
@ -315,12 +316,12 @@ pub fn init(
try texbind.image2D(
0,
.RGBA,
@intCast(c_int, atlas_color.size),
@intCast(c_int, atlas_color.size),
@intCast(c_int, font_group.atlas_color.size),
@intCast(c_int, font_group.atlas_color.size),
0,
.BGRA,
.UnsignedByte,
atlas_color.data.ptr,
font_group.atlas_color.data.ptr,
);
}
@ -336,7 +337,7 @@ pub fn init(
.texture = tex,
.texture_color = tex_color,
.font_lib = font_lib,
.font_set = font_set,
.font_group = font_group,
.cursor_visible = true,
.cursor_style = .box,
.background = .{ .r = 0, .g = 0, .b = 0 },
@ -345,11 +346,7 @@ pub fn init(
}
pub fn deinit(self: *Grid) void {
for (self.font_set.families.items) |*family| {
family.atlas.deinit(self.alloc);
family.deinit(self.alloc);
}
self.font_set.deinit(self.alloc);
self.font_group.deinit(self.alloc);
self.font_lib.deinit();
self.texture.destroy();
@ -565,10 +562,30 @@ pub fn updateCell(
var mode: GPUCellMode = .fg;
// Get our glyph. Try our normal font atlas first.
const goa = try self.font_set.getOrAddGlyph(self.alloc, cell.char, style);
if (goa.family == 1) mode = .fg_color;
const glyph = goa.glyph;
// Get the glyph that we're going to use. We first try what the cell
// wants, then the Unicode replacement char, then finally a space.
const FontInfo = struct { index: font.Group.FontIndex, ch: u32 };
const font_info: FontInfo = font_info: {
var chars = [_]u32{ @intCast(u32, cell.char), 0xFFFD, ' ' };
for (chars) |char| {
if (try self.font_group.indexForCodepoint(self.alloc, style, char)) |idx| {
break :font_info FontInfo{
.index = idx,
.ch = char,
};
}
}
@panic("all fonts require at least space");
};
// Render
const face = self.font_group.group.faceFromIndex(font_info.index);
const glyph_index = face.glyphIndex(font_info.ch).?;
const glyph = try self.font_group.renderGlyph(self.alloc, font_info.index, glyph_index);
// If we're rendering a color font, we use the color atlas
if (face.hasColor()) mode = .fg_color;
// If the cell is wide, we need to note that in the mode
if (cell.attrs.wide) mode = mode.mask(.wide_mask);
@ -649,7 +666,7 @@ pub fn setScreenSize(self: *Grid, dim: ScreenSize) !void {
/// Updates the font texture atlas if it is dirty.
fn flushAtlas(self: *Grid) !void {
{
const atlas = &self.font_set.families.items[0].atlas;
const atlas = &self.font_group.atlas_greyscale;
if (atlas.modified) {
atlas.modified = false;
var texbind = try self.texture.bind(.@"2D");
@ -683,7 +700,7 @@ fn flushAtlas(self: *Grid) !void {
}
{
const atlas = &self.font_set.families.items[1].atlas;
const atlas = &self.font_group.atlas_color;
if (atlas.modified) {
atlas.modified = false;
var texbind = try self.texture_color.bind(.@"2D");

View File

@ -102,14 +102,8 @@ pub fn hasColor(self: Face) bool {
return self.face.hasColor();
}
/// Load a glyph for this face. The codepoint can be either a u8 or
/// []const u8 depending on if you know it is ASCII or must be UTF-8 decoded.
pub fn loadGlyph(self: Face, alloc: Allocator, atlas: *Atlas, cp: u32) !Glyph {
// We need a UTF32 codepoint for freetype
const glyph_index = self.glyphIndex(cp) orelse return error.GlyphNotFound;
return self.renderGlyph(alloc, atlas, glyph_index);
}
/// Render a glyph using the glyph index. The rendered glyph is stored in the
/// given texture atlas.
pub fn renderGlyph(self: Face, alloc: Allocator, atlas: *Atlas, glyph_index: u32) !Glyph {
// If our glyph has color, we want to render the color
try self.face.loadGlyph(glyph_index, .{
@ -211,7 +205,7 @@ test {
// Generate all visible ASCII
var i: u8 = 32;
while (i < 127) : (i += 1) {
_ = try font.loadGlyph(alloc, &atlas, i);
_ = try font.renderGlyph(alloc, &atlas, font.glyphIndex(i).?);
}
}
@ -228,5 +222,5 @@ test "color emoji" {
var font = try init(lib, testFont, .{ .points = 12 });
defer font.deinit();
_ = try font.loadGlyph(alloc, &atlas, '🥸');
_ = try font.renderGlyph(alloc, &atlas, font.glyphIndex('🥸').?);
}

View File

@ -1,159 +0,0 @@
//! FallbackSet represents a set of families in priority order to load a glyph.
//! This can be used to merge multiple font families together to find a glyph
//! for a codepoint.
const FallbackSet = @This();
const std = @import("std");
const assert = std.debug.assert;
const Allocator = std.mem.Allocator;
const Atlas = @import("../Atlas.zig");
const Family = @import("main.zig").Family;
const Library = @import("main.zig").Library;
const Glyph = @import("main.zig").Glyph;
const Style = @import("main.zig").Style;
const codepoint = @import("main.zig").codepoint;
const log = std.log.scoped(.font_fallback);
/// The families to look for in order. This should be managed directly
/// by the caller of the set. Deinit will deallocate this.
families: std.ArrayListUnmanaged(Family) = .{},
/// A quick lookup that points directly to the family that loaded a glyph.
glyphs: std.AutoHashMapUnmanaged(GlyphKey, usize) = .{},
const GlyphKey = struct {
style: Style,
codepoint: u32,
};
pub fn deinit(self: *FallbackSet, alloc: Allocator) void {
self.families.deinit(alloc);
self.glyphs.deinit(alloc);
self.* = undefined;
}
pub const GetOrAdd = struct {
/// Index of the family where the glyph was loaded from
family: usize,
/// True if the glyph was found or whether it was newly loaded
found_existing: bool,
/// The glyph
glyph: *Glyph,
};
pub fn getOrAddGlyph(
self: *FallbackSet,
alloc: Allocator,
v: anytype,
style: Style,
) !GetOrAdd {
assert(self.families.items.len > 0);
// We need a UTF32 codepoint
const utf32 = codepoint(v);
// If we have this already, load it directly
const glyphKey: GlyphKey = .{ .style = style, .codepoint = utf32 };
const gop = try self.glyphs.getOrPut(alloc, glyphKey);
if (gop.found_existing) {
const i = gop.value_ptr.*;
assert(i < self.families.items.len);
return GetOrAdd{
.family = i,
.found_existing = true,
.glyph = self.families.items[i].getGlyph(v, style) orelse unreachable,
};
}
errdefer _ = self.glyphs.remove(glyphKey);
// Go through each familiy and look for a matching glyph
var fam_i: ?usize = 0;
const glyph = glyph: {
for (self.families.items) |*family, i| {
fam_i = i;
// If this family already has it loaded, return it.
if (family.getGlyph(v, style)) |glyph| break :glyph glyph;
// Try to load it.
if (family.addGlyph(alloc, v, style)) |glyph|
break :glyph glyph
else |err| switch (err) {
// TODO: this probably doesn't belong here and should
// be higher level... but how?
error.AtlasFull => {
try family.atlas.grow(alloc, family.atlas.size * 2);
break :glyph try family.addGlyph(alloc, v, style);
},
error.GlyphNotFound => {},
else => return err,
}
}
// If we are regular, we use a fallback character
log.warn("glyph not found, using fallback. codepoint={x}", .{utf32});
fam_i = null;
break :glyph try self.families.items[0].addGlyph(alloc, ' ', style);
};
// If we found a real value, then cache it.
// TODO: support caching fallbacks too
if (fam_i) |i|
gop.value_ptr.* = i
else
_ = self.glyphs.remove(glyphKey);
return GetOrAdd{
.family = fam_i orelse 0,
.glyph = glyph,
// Technically possible that we found this in a cache...
.found_existing = false,
};
}
test {
const fontRegular = @import("test.zig").fontRegular;
const fontEmoji = @import("test.zig").fontEmoji;
const testing = std.testing;
const alloc = testing.allocator;
var lib = try Library.init();
defer lib.deinit();
var set: FallbackSet = .{};
try set.families.append(alloc, fam: {
var fam = Family.init(lib, try Atlas.init(alloc, 512, .greyscale));
try fam.loadFaceFromMemory(.regular, fontRegular, .{ .points = 48 });
break :fam fam;
});
try set.families.append(alloc, fam: {
var fam = Family.init(lib, try Atlas.init(alloc, 512, .rgba));
try fam.loadFaceFromMemory(.regular, fontEmoji, .{ .points = 48 });
break :fam fam;
});
defer {
for (set.families.items) |*family| {
family.atlas.deinit(alloc);
family.deinit(alloc);
}
set.deinit(alloc);
}
// Generate all visible ASCII
var i: u8 = 32;
while (i < 127) : (i += 1) {
_ = try set.getOrAddGlyph(alloc, i, .regular);
}
// Emoji should work
_ = try set.getOrAddGlyph(alloc, '🥸', .regular);
_ = try set.getOrAddGlyph(alloc, '🥸', .bold);
}

View File

@ -1,153 +0,0 @@
//! Family represents a multiple styles of a single font: regular, bold,
//! italic, etc. It is able to cache the glyphs into a single atlas.
const Family = @This();
const std = @import("std");
const Allocator = std.mem.Allocator;
const Atlas = @import("../Atlas.zig");
const Face = @import("main.zig").Face;
const Glyph = @import("main.zig").Glyph;
const Style = @import("main.zig").Style;
const testFont = @import("test.zig").fontRegular;
const codepoint = @import("main.zig").codepoint;
const Library = @import("main.zig").Library;
const log = std.log.scoped(.font_family);
/// The texture atlas where all the font glyphs are rendered.
/// This is NOT owned by the Family, deinitialization must
/// be manually done.
atlas: Atlas,
/// The library shared state.
lib: Library,
/// The glyphs that are loaded into the atlas, keyed by codepoint.
glyphs: std.AutoHashMapUnmanaged(GlyphKey, Glyph) = .{},
/// The font faces representing all the styles in this family.
/// These should be set directly or via various loader functions.
regular: ?Face = null,
bold: ?Face = null,
/// This struct is used for the hash key for glyphs.
const GlyphKey = struct {
style: Style,
codepoint: u32,
};
pub fn init(lib: Library, atlas: Atlas) Family {
return .{
.lib = lib,
.atlas = atlas,
};
}
pub fn deinit(self: *Family, alloc: Allocator) void {
self.glyphs.deinit(alloc);
if (self.regular) |*face| face.deinit();
if (self.bold) |*face| face.deinit();
self.* = undefined;
}
/// Loads a font to use from memory.
///
/// This can only be called if a font is not already loaded for the given style.
pub fn loadFaceFromMemory(
self: *Family,
comptime style: Style,
source: [:0]const u8,
size: Face.DesiredSize,
) !void {
var face = try Face.init(self.lib, source, size);
errdefer face.deinit();
@field(self, switch (style) {
.regular => "regular",
.bold => "bold",
.italic => unreachable,
.bold_italic => unreachable,
}) = face;
}
/// Get the glyph for the given codepoint and style. If the glyph hasn't
/// been loaded yet this will return null.
pub fn getGlyph(self: Family, cp: anytype, style: Style) ?*Glyph {
const utf32 = codepoint(cp);
const entry = self.glyphs.getEntry(.{
.style = style,
.codepoint = utf32,
}) orelse return null;
return entry.value_ptr;
}
/// Add a glyph. If the glyph has already been loaded this will return
/// the existing loaded glyph. If a glyph style can't be found, this will
/// fall back to the "regular" style. If a glyph can't be found in the
/// "regular" style, this will fall back to the unknown glyph character.
///
/// The codepoint can be either a u8 or []const u8 depending on if you know
/// it is ASCII or must be UTF-8 decoded.
pub fn addGlyph(self: *Family, alloc: Allocator, v: anytype, style: Style) !*Glyph {
const face = face: {
// Real is the face we SHOULD use for this style.
var real = switch (style) {
.regular => self.regular,
.bold => self.bold,
.italic => unreachable,
.bold_italic => unreachable,
};
// Fall back to regular if it is null
if (real == null) real = self.regular;
// Return our face if we have it.
if (real) |ptr| break :face ptr;
// If we reached this point, we have no font in the style we
// want OR the fallback.
return error.NoFontFallback;
};
// We need a UTF32 codepoint
const utf32 = codepoint(v);
// If we have this glyph loaded already then we're done.
const glyphKey = .{
.style = style,
.codepoint = utf32,
};
const gop = try self.glyphs.getOrPut(alloc, glyphKey);
if (gop.found_existing) return gop.value_ptr;
errdefer _ = self.glyphs.remove(glyphKey);
// Get the glyph and add it to the atlas.
gop.value_ptr.* = try face.loadGlyph(alloc, &self.atlas, utf32);
return gop.value_ptr;
}
test {
const testing = std.testing;
const alloc = testing.allocator;
var lib = try Library.init();
defer lib.deinit();
var fam = init(lib, try Atlas.init(alloc, 512, .greyscale));
defer fam.deinit(alloc);
defer fam.atlas.deinit(alloc);
try fam.loadFaceFromMemory(.regular, testFont, .{ .points = 12 });
// Generate all visible ASCII
var i: u8 = 32;
while (i < 127) : (i += 1) {
_ = try fam.addGlyph(alloc, i, .regular);
}
i = 32;
while (i < 127) : (i += 1) {
try testing.expect(fam.getGlyph(i, .regular) != null);
}
}

View File

@ -40,6 +40,7 @@ const GlyphKey = struct {
glyph: u32,
};
/// The GroupCache takes ownership of Group and will free it.
pub fn init(alloc: Allocator, group: Group) !GroupCache {
var atlas_greyscale = try Atlas.init(alloc, 512, .greyscale);
errdefer atlas_greyscale.deinit(alloc);
@ -65,6 +66,7 @@ pub fn deinit(self: *GroupCache, alloc: Allocator) void {
self.glyphs.deinit(alloc);
self.atlas_greyscale.deinit(alloc);
self.atlas_color.deinit(alloc);
self.group.deinit(alloc);
}
/// Reset the cache. This should be called:
@ -146,13 +148,13 @@ test {
var lib = try Library.init();
defer lib.deinit();
var group = try Group.init(alloc);
defer group.deinit(alloc);
try group.addFace(alloc, .regular, try Face.init(lib, testFont, .{ .points = 12 }));
var cache = try init(alloc, group);
var cache = try init(alloc, try Group.init(alloc));
defer cache.deinit(alloc);
// Setup group
try cache.group.addFace(alloc, .regular, try Face.init(lib, testFont, .{ .points = 12 }));
const group = cache.group;
// Visible ASCII. Do it twice to verify cache.
var i: u32 = 32;
while (i < 127) : (i += 1) {

View File

@ -1,11 +1,9 @@
const std = @import("std");
pub const Face = @import("Face.zig");
pub const Family = @import("Family.zig");
pub const Group = @import("Group.zig");
pub const GroupCache = @import("GroupCache.zig");
pub const Glyph = @import("Glyph.zig");
pub const FallbackSet = @import("FallbackSet.zig");
pub const Library = @import("Library.zig");
/// The styles that a family can take.