ghostty/src/font/face/coretext.zig
2022-10-08 10:04:17 -07:00

107 lines
3.5 KiB
Zig

const std = @import("std");
const assert = std.debug.assert;
const macos = @import("macos");
const harfbuzz = @import("harfbuzz");
const font = @import("../main.zig");
pub const Face = struct {
/// Our font face
font: *macos.text.Font,
/// Harfbuzz font corresponding to this face.
hb_font: harfbuzz.Font,
/// The presentation for this font.
presentation: font.Presentation,
/// Initialize a CoreText-based face from another initialized font face
/// but with a new size. This is often how CoreText fonts are initialized
/// because the font is loaded at a default size during discovery, and then
/// adjusted to the final size for final load.
pub fn initFontCopy(base: *macos.text.Font, size: font.face.DesiredSize) !Face {
// Create a copy
const ct_font = try base.copyWithAttributes(@intToFloat(f32, size.points), null);
errdefer ct_font.release();
const hb_font = try harfbuzz.coretext.createFont(ct_font);
errdefer hb_font.destroy();
const traits = ct_font.getSymbolicTraits();
return Face{
.font = ct_font,
.hb_font = hb_font,
.presentation = if (traits.color_glyphs) .emoji else .text,
};
}
pub fn deinit(self: *Face) void {
self.font.release();
self.hb_font.destroy();
self.* = undefined;
}
/// Returns the glyph index for the given Unicode code point. If this
/// face doesn't support this glyph, null is returned.
pub fn glyphIndex(self: Face, cp: u32) ?u32 {
// Turn UTF-32 into UTF-16 for CT API
var unichars: [2]u16 = undefined;
const pair = macos.foundation.stringGetSurrogatePairForLongCharacter(cp, &unichars);
const len: usize = if (pair) 2 else 1;
// Get our glyphs
var glyphs = [2]macos.graphics.Glyph{ 0, 0 };
if (!self.font.getGlyphsForCharacters(unichars[0..len], glyphs[0..len]))
return null;
// We can have pairs due to chars like emoji but we expect all of them
// to decode down into exactly one glyph ID.
if (pair) assert(glyphs[1] == 0);
return @intCast(u32, glyphs[0]);
}
};
test {
const testing = std.testing;
const name = try macos.foundation.String.createWithBytes("Monaco", .utf8, false);
defer name.release();
const desc = try macos.text.FontDescriptor.createWithNameAndSize(name, 12);
defer desc.release();
const ct_font = try macos.text.Font.createWithFontDescriptor(desc, 12);
defer ct_font.release();
var face = try Face.initFontCopy(ct_font, .{ .points = 18 });
defer face.deinit();
try testing.expectEqual(font.Presentation.text, face.presentation);
// Generate all visible ASCII
var i: u8 = 32;
while (i < 127) : (i += 1) {
try testing.expect(face.glyphIndex(i) != null);
//_ = try face.renderGlyph(alloc, &atlas, ft_font.glyphIndex(i).?);
}
}
test "emoji" {
const testing = std.testing;
const name = try macos.foundation.String.createWithBytes("Apple Color Emoji", .utf8, false);
defer name.release();
const desc = try macos.text.FontDescriptor.createWithNameAndSize(name, 12);
defer desc.release();
const ct_font = try macos.text.Font.createWithFontDescriptor(desc, 12);
defer ct_font.release();
var face = try Face.initFontCopy(ct_font, .{ .points = 18 });
defer face.deinit();
// Presentation
try testing.expectEqual(font.Presentation.emoji, face.presentation);
// Glyph index check
try testing.expect(face.glyphIndex('🥸') != null);
}