font(coretext): add metrics test case for CT, fix variable font init

Variable font init used to just select the first available predefined
instance, if there were any, which is often not desirable- using
createFontDescriptorFromData instead of createFontDescritorsFromData
ensures that the default variation config is selected. In the future we
should probably allow selection of predefined instances, but for now
this is the correct behavior.

I found this bug when adding the metrics calculation test case for
CoreText, hence why fixing it is part of the same commit.
This commit is contained in:
Qwerasd
2024-12-11 21:14:21 -05:00
parent bd18452310
commit fb50143cec
3 changed files with 65 additions and 4 deletions

View File

@ -20,6 +20,7 @@ pub const FontVariationAxisKey = font_descriptor.FontVariationAxisKey;
pub const FontSymbolicTraits = font_descriptor.FontSymbolicTraits;
pub const createFontDescriptorsFromURL = font_manager.createFontDescriptorsFromURL;
pub const createFontDescriptorsFromData = font_manager.createFontDescriptorsFromData;
pub const createFontDescriptorFromData = font_manager.createFontDescriptorFromData;
pub const Frame = frame.Frame;
pub const Framesetter = framesetter.Framesetter;
pub const Line = line.Line;

View File

@ -1,6 +1,7 @@
const std = @import("std");
const Allocator = std.mem.Allocator;
const foundation = @import("../foundation.zig");
const FontDescriptor = @import("./font_descriptor.zig").FontDescriptor;
const c = @import("c.zig").c;
pub fn createFontDescriptorsFromURL(url: *foundation.URL) ?*foundation.Array {
@ -14,3 +15,9 @@ pub fn createFontDescriptorsFromData(data: *foundation.Data) ?*foundation.Array
@ptrCast(data),
)));
}
pub fn createFontDescriptorFromData(data: *foundation.Data) ?*FontDescriptor {
return @ptrFromInt(@intFromPtr(c.CTFontManagerCreateFontDescriptorFromData(
@ptrCast(data),
)));
}

View File

@ -55,12 +55,10 @@ pub const Face = struct {
const data = try macos.foundation.Data.createWithBytesNoCopy(source);
defer data.release();
const arr = macos.text.createFontDescriptorsFromData(data) orelse
const desc = macos.text.createFontDescriptorFromData(data) orelse
return error.FontInitFailure;
defer arr.release();
if (arr.getCount() == 0) return error.FontInitFailure;
defer desc.release();
const desc = arr.getValueAtIndex(macos.text.FontDescriptor, 0);
const ct_font = try macos.text.Font.createWithFontDescriptor(desc, 12);
defer ct_font.release();
@ -924,3 +922,58 @@ test "glyphIndex colored vs text" {
try testing.expect(face.isColorGlyph(glyph));
}
}
test "coretext: metrics" {
const testFont = font.embedded.inconsolata;
const alloc = std.testing.allocator;
var atlas = try font.Atlas.init(alloc, 512, .grayscale);
defer atlas.deinit(alloc);
var ct_font = try Face.init(
undefined,
testFont,
.{ .size = .{ .points = 12, .xdpi = 96, .ydpi = 96 } },
);
defer ct_font.deinit();
try std.testing.expectEqual(font.face.Metrics{
.cell_width = 8,
// The cell height is 17 px because the calculation is
//
// ascender - descender + gap
//
// which, for inconsolata is
//
// 859 - -190 + 0
//
// font units, at 1000 units per em that works out to 1.049 em,
// and 1em should be the point size * dpi scale, so 12 * (96/72)
// which is 16, and 16 * 1.049 = 16.784, which finally is rounded
// to 17.
.cell_height = 17,
.cell_baseline = 3,
.underline_position = 17,
.underline_thickness = 1,
.strikethrough_position = 10,
.strikethrough_thickness = 1,
.overline_position = 0,
.overline_thickness = 1,
.box_thickness = 1,
}, ct_font.metrics);
// Resize should change metrics
try ct_font.setSize(.{ .size = .{ .points = 24, .xdpi = 96, .ydpi = 96 } });
try std.testing.expectEqual(font.face.Metrics{
.cell_width = 16,
.cell_height = 34,
.cell_baseline = 6,
.underline_position = 34,
.underline_thickness = 2,
.strikethrough_position = 19,
.strikethrough_thickness = 2,
.overline_position = 0,
.overline_thickness = 2,
.box_thickness = 2,
}, ct_font.metrics);
}