mirror of
https://github.com/ghostty-org/ghostty.git
synced 2025-07-16 16:56:09 +03:00
Cache text shaping results per row
This uses an LRU to cache text shaping results per row. Rows are marked "dirty" and the cache invalidated whenever any cell on a row changes. Rows are identified by an ID in the screen circular buffer and not by y-value so even as the screen scrolls, the row cache remains valid. Benchmarks via tracing: * Heavy output (`cat <big>.txt`) - ~2% slower due to overhead of low hit rate on cache * Mostly static screen with only text - ~10% faster, high cache hit rate * Mostly static screen with a mix of ligatures, emoji, text - ~40% faster, high cache hit rate Terminals are "mostly static". Usually we sit with our prompt sitting there flashing, just re-rendering the history that already exists. We might scroll back a bit to see some recent results. Ultimately, `cat <big>.txt` is not that common so I think trading 2% slowdown on that for 10-40% speedup on the common case is worth it. These percentages can eventually be thought of as direct FPS improvements (40% more FPS) once I split tty stream processing and rendering into separate threads and remove the FPS limiter that exists today.
This commit is contained in:
1
TODO.md
1
TODO.md
@ -12,7 +12,6 @@ Performance:
|
||||
screen data structure.
|
||||
* Screen cell structure should be rethought to use some data oriented design,
|
||||
also bring it closer to GPU cells, perhaps.
|
||||
* Cache text shaping results and only invalidate if the line becomes dirty.
|
||||
|
||||
Correctness:
|
||||
|
||||
|
43
src/Grid.zig
43
src/Grid.zig
@ -12,9 +12,12 @@ const Terminal = terminal.Terminal;
|
||||
const gl = @import("opengl.zig");
|
||||
const trace = @import("tracy").trace;
|
||||
const math = @import("math.zig");
|
||||
const lru = @import("lru.zig");
|
||||
|
||||
const log = std.log.scoped(.grid);
|
||||
|
||||
const CellsLRU = lru.AutoHashMap(terminal.Screen.RowHeader.Id, std.ArrayListUnmanaged(GPUCell));
|
||||
|
||||
alloc: std.mem.Allocator,
|
||||
|
||||
/// Current dimensions for this grid.
|
||||
@ -26,6 +29,10 @@ cell_size: CellSize,
|
||||
/// The current set of cells to render.
|
||||
cells: std.ArrayListUnmanaged(GPUCell),
|
||||
|
||||
/// The LRU that stores our GPU cells cached by row IDs. This is used to
|
||||
/// prevent high CPU activity when shaping rows.
|
||||
cells_lru: CellsLRU,
|
||||
|
||||
/// The size of the cells list that was sent to the GPU. This is used
|
||||
/// to detect when the cells array was reallocated/resized and handle that
|
||||
/// accordingly.
|
||||
@ -303,6 +310,7 @@ pub fn init(
|
||||
return Grid{
|
||||
.alloc = alloc,
|
||||
.cells = .{},
|
||||
.cells_lru = CellsLRU.init(0),
|
||||
.cell_size = .{ .width = metrics.cell_width, .height = metrics.cell_height },
|
||||
.size = .{ .rows = 0, .columns = 0 },
|
||||
.program = program,
|
||||
@ -333,6 +341,7 @@ pub fn deinit(self: *Grid) void {
|
||||
self.ebo.destroy();
|
||||
self.vao.destroy();
|
||||
self.program.destroy();
|
||||
self.cells_lru.deinit(self.alloc);
|
||||
self.cells.deinit(self.alloc);
|
||||
self.* = undefined;
|
||||
}
|
||||
@ -369,6 +378,22 @@ pub fn rebuildCells(self: *Grid, term: *Terminal) !void {
|
||||
while (rowIter.next()) |row| {
|
||||
defer y += 1;
|
||||
|
||||
// Get our value from the cache.
|
||||
const gop = try self.cells_lru.getOrPut(self.alloc, row.getId());
|
||||
if (!row.isDirty() and gop.found_existing) {
|
||||
var i: usize = self.cells.items.len;
|
||||
for (gop.value_ptr.items) |cell| {
|
||||
self.cells.appendAssumeCapacity(cell);
|
||||
self.cells.items[i].grid_row = @intCast(u16, y);
|
||||
i += 1;
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
// Get the starting index for our row so we can cache any new GPU cells.
|
||||
const start = self.cells.items.len;
|
||||
|
||||
// Split our row into runs and shape each one.
|
||||
var iter = self.font_shaper.runIterator(&self.font_group, row);
|
||||
while (try iter.next(self.alloc)) |run| {
|
||||
@ -383,6 +408,18 @@ pub fn rebuildCells(self: *Grid, term: *Terminal) !void {
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize our list
|
||||
if (!gop.found_existing) gop.value_ptr.* = .{};
|
||||
var row_cells = gop.value_ptr;
|
||||
|
||||
// Get our new length and cache the cells.
|
||||
try row_cells.ensureTotalCapacity(self.alloc, term.screen.cols);
|
||||
row_cells.clearRetainingCapacity();
|
||||
row_cells.appendSliceAssumeCapacity(self.cells.items[start..]);
|
||||
|
||||
// Set row is not dirty anymore
|
||||
row.setDirty(false);
|
||||
}
|
||||
|
||||
// Add the cursor
|
||||
@ -625,6 +662,12 @@ pub fn setScreenSize(self: *Grid, dim: ScreenSize) !void {
|
||||
// Recalculate the rows/columns.
|
||||
self.size.update(dim, self.cell_size);
|
||||
|
||||
// Update our LRU. We arbitrarily support a certain number of pages here.
|
||||
// We also always support a minimum number of caching in case a user
|
||||
// is resizing tiny then growing again we can save some of the renders.
|
||||
const evicted = try self.cells_lru.resize(self.alloc, @maximum(80, self.size.rows * 10));
|
||||
if (evicted) |list| for (list) |*value| value.deinit(self.alloc);
|
||||
|
||||
// Update our shaper
|
||||
var shape_buf = try self.alloc.alloc(font.Shaper.Cell, self.size.columns * 2);
|
||||
errdefer self.alloc.free(shape_buf);
|
||||
|
330
src/lru.zig
Normal file
330
src/lru.zig
Normal file
@ -0,0 +1,330 @@
|
||||
const std = @import("std");
|
||||
const assert = std.debug.assert;
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
/// Create a HashMap for a key type that can be autoamtically hashed.
|
||||
/// If you want finer-grained control, use HashMap directly.
|
||||
pub fn AutoHashMap(comptime K: type, comptime V: type) type {
|
||||
return HashMap(
|
||||
K,
|
||||
V,
|
||||
std.hash_map.AutoContext(K),
|
||||
std.hash_map.default_max_load_percentage,
|
||||
);
|
||||
}
|
||||
|
||||
/// HashMap implementation that supports least-recently-used eviction.
|
||||
///
|
||||
/// Note: This is a really elementary CS101 version of an LRU right now.
|
||||
/// This is done initially to get something working. Once we have it working,
|
||||
/// we can benchmark and improve if this ends up being a source of slowness.
|
||||
pub fn HashMap(
|
||||
comptime K: type,
|
||||
comptime V: type,
|
||||
comptime Context: type,
|
||||
comptime max_load_percentage: u64,
|
||||
) type {
|
||||
return struct {
|
||||
const Self = @This();
|
||||
const Map = std.HashMapUnmanaged(K, *Queue.Node, Context, max_load_percentage);
|
||||
const Queue = std.TailQueue(KV);
|
||||
|
||||
/// Map to maintain our entries.
|
||||
map: Map,
|
||||
|
||||
/// Queue to maintain LRU order.
|
||||
queue: Queue,
|
||||
|
||||
/// The capacity of our map. If this capacity is reached, cache
|
||||
/// misses will begin evicting entries.
|
||||
capacity: Map.Size,
|
||||
|
||||
pub const KV = struct {
|
||||
key: K,
|
||||
value: V,
|
||||
};
|
||||
|
||||
/// The result of a getOrPut operation.
|
||||
pub const GetOrPutResult = struct {
|
||||
/// The entry that was retrieved. If found_existing is false,
|
||||
/// then this is a pointer to allocated space to store a V.
|
||||
/// If found_existing is true, the pointer value is valid, but
|
||||
/// can be overwritten.
|
||||
value_ptr: *V,
|
||||
|
||||
/// Whether an existing value was found or not.
|
||||
found_existing: bool,
|
||||
|
||||
/// If another entry had to be evicted to make space for this
|
||||
/// put operation, then this is the value that was evicted.
|
||||
evicted: ?KV,
|
||||
};
|
||||
|
||||
pub fn init(capacity: Map.Size) Self {
|
||||
return .{
|
||||
.map = .{},
|
||||
.queue = .{},
|
||||
.capacity = capacity,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Self, alloc: Allocator) void {
|
||||
// Important: use our queue as a source of truth for dealloc
|
||||
// because we might keep items in the queue around that aren't
|
||||
// present in our LRU anymore to prevent future allocations.
|
||||
var it = self.queue.first;
|
||||
while (it) |node| {
|
||||
it = node.next;
|
||||
alloc.destroy(node);
|
||||
}
|
||||
|
||||
self.map.deinit(alloc);
|
||||
self.* = undefined;
|
||||
}
|
||||
|
||||
/// Get or put a value for a key. See GetOrPutResult on how to check
|
||||
/// if an existing value was found, if an existing value was evicted,
|
||||
/// etc.
|
||||
pub fn getOrPut(self: *Self, allocator: Allocator, key: K) Allocator.Error!GetOrPutResult {
|
||||
if (@sizeOf(Context) != 0)
|
||||
@compileError("Cannot infer context " ++ @typeName(Context) ++ ", call getOrPutContext instead.");
|
||||
return self.getOrPutContext(allocator, key, undefined);
|
||||
}
|
||||
|
||||
/// See getOrPut
|
||||
pub fn getOrPutContext(
|
||||
self: *Self,
|
||||
alloc: Allocator,
|
||||
key: K,
|
||||
ctx: Context,
|
||||
) Allocator.Error!GetOrPutResult {
|
||||
const map_gop = try self.map.getOrPutContext(alloc, key, ctx);
|
||||
if (map_gop.found_existing) {
|
||||
// Move to end to mark as most recently used
|
||||
self.queue.remove(map_gop.value_ptr.*);
|
||||
self.queue.append(map_gop.value_ptr.*);
|
||||
|
||||
return GetOrPutResult{
|
||||
.found_existing = true,
|
||||
.value_ptr = &map_gop.value_ptr.*.data.value,
|
||||
.evicted = null,
|
||||
};
|
||||
}
|
||||
errdefer _ = self.map.remove(key);
|
||||
|
||||
// We're evicting if our map insertion increased our capacity.
|
||||
const evict = self.map.count() > self.capacity;
|
||||
|
||||
// Get our node. If we're not evicting then we allocate a new
|
||||
// node. If we are evicting then we avoid allocation by just
|
||||
// reusing the node we would've evicted.
|
||||
var node = if (!evict) try alloc.create(Queue.Node) else node: {
|
||||
// Our first node is the least recently used.
|
||||
var least_used = self.queue.first.?;
|
||||
|
||||
// Move our least recently used to the end to make
|
||||
// it the most recently used.
|
||||
self.queue.remove(least_used);
|
||||
|
||||
// Remove the least used from the map
|
||||
_ = self.map.remove(least_used.data.key);
|
||||
|
||||
break :node least_used;
|
||||
};
|
||||
errdefer if (!evict) alloc.destroy(node);
|
||||
|
||||
// Store our node in the map.
|
||||
map_gop.value_ptr.* = node;
|
||||
|
||||
// Mark the node as most recently used
|
||||
self.queue.append(node);
|
||||
|
||||
// Set our key
|
||||
node.data.key = key;
|
||||
|
||||
return GetOrPutResult{
|
||||
.found_existing = map_gop.found_existing,
|
||||
.value_ptr = &node.data.value,
|
||||
.evicted = if (!evict) null else node.data,
|
||||
};
|
||||
}
|
||||
|
||||
/// Get a value for a key.
|
||||
pub fn get(self: *Self, key: K) ?V {
|
||||
if (@sizeOf(Context) != 0) {
|
||||
@compileError("getContext must be used.");
|
||||
}
|
||||
return self.getContext(key, undefined);
|
||||
}
|
||||
|
||||
/// See get
|
||||
pub fn getContext(self: *Self, key: K, ctx: Context) ?V {
|
||||
const node = self.map.getContext(key, ctx) orelse return null;
|
||||
return node.data.value;
|
||||
}
|
||||
|
||||
/// Resize the LRU. If this shrinks the LRU then LRU items will be
|
||||
/// deallocated. The deallocated items are returned in the slice. This
|
||||
/// slice must be freed by the caller.
|
||||
pub fn resize(self: *Self, alloc: Allocator, capacity: Map.Size) Allocator.Error!?[]V {
|
||||
// Fastest
|
||||
if (capacity >= self.capacity) {
|
||||
self.capacity = capacity;
|
||||
return null;
|
||||
}
|
||||
|
||||
// If we're shrinking but we're smaller than the new capacity,
|
||||
// then we don't have to do anything.
|
||||
if (self.map.count() <= capacity) {
|
||||
self.capacity = capacity;
|
||||
return null;
|
||||
}
|
||||
|
||||
// We're shrinking and we have more items than the new capacity
|
||||
const delta = self.map.count() - capacity;
|
||||
var evicted = try alloc.alloc(V, delta);
|
||||
|
||||
var i: Map.Size = 0;
|
||||
while (i < delta) : (i += 1) {
|
||||
var node = self.queue.first.?;
|
||||
evicted[i] = node.data.value;
|
||||
self.queue.remove(node);
|
||||
_ = self.map.remove(node.data.key);
|
||||
alloc.destroy(node);
|
||||
}
|
||||
|
||||
self.capacity = capacity;
|
||||
assert(self.map.count() == capacity);
|
||||
|
||||
return evicted;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
test "getOrPut" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
const Map = AutoHashMap(u32, u8);
|
||||
var m = Map.init(2);
|
||||
defer m.deinit(alloc);
|
||||
|
||||
// Insert cap values, should be hits
|
||||
{
|
||||
const gop = try m.getOrPut(alloc, 1);
|
||||
try testing.expect(!gop.found_existing);
|
||||
try testing.expect(gop.evicted == null);
|
||||
gop.value_ptr.* = 1;
|
||||
}
|
||||
{
|
||||
const gop = try m.getOrPut(alloc, 2);
|
||||
try testing.expect(!gop.found_existing);
|
||||
try testing.expect(gop.evicted == null);
|
||||
gop.value_ptr.* = 2;
|
||||
}
|
||||
|
||||
// 1 is LRU
|
||||
try testing.expect((try m.getOrPut(alloc, 1)).found_existing);
|
||||
try testing.expect((try m.getOrPut(alloc, 2)).found_existing);
|
||||
|
||||
// Next should evict
|
||||
{
|
||||
const gop = try m.getOrPut(alloc, 3);
|
||||
try testing.expect(!gop.found_existing);
|
||||
try testing.expect(gop.evicted != null);
|
||||
try testing.expect(gop.evicted.?.value == 1);
|
||||
gop.value_ptr.* = 3;
|
||||
}
|
||||
|
||||
// Currently: 2 is LRU, let's make 3 LRU
|
||||
try testing.expect((try m.getOrPut(alloc, 2)).found_existing);
|
||||
|
||||
// Next should evict
|
||||
{
|
||||
const gop = try m.getOrPut(alloc, 4);
|
||||
try testing.expect(!gop.found_existing);
|
||||
try testing.expect(gop.evicted != null);
|
||||
try testing.expect(gop.evicted.?.value == 3);
|
||||
gop.value_ptr.* = 4;
|
||||
}
|
||||
}
|
||||
|
||||
test "get" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
const Map = AutoHashMap(u32, u8);
|
||||
var m = Map.init(2);
|
||||
defer m.deinit(alloc);
|
||||
|
||||
// Insert cap values, should be hits
|
||||
{
|
||||
const gop = try m.getOrPut(alloc, 1);
|
||||
try testing.expect(!gop.found_existing);
|
||||
try testing.expect(gop.evicted == null);
|
||||
gop.value_ptr.* = 1;
|
||||
}
|
||||
|
||||
try testing.expect(m.get(1) != null);
|
||||
try testing.expect(m.get(1).? == 1);
|
||||
try testing.expect(m.get(2) == null);
|
||||
}
|
||||
|
||||
test "resize shrink without removal" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
const Map = AutoHashMap(u32, u8);
|
||||
var m = Map.init(2);
|
||||
defer m.deinit(alloc);
|
||||
|
||||
// Insert cap values, LRU is 1
|
||||
{
|
||||
const gop = try m.getOrPut(alloc, 1);
|
||||
try testing.expect(!gop.found_existing);
|
||||
try testing.expect(gop.evicted == null);
|
||||
gop.value_ptr.* = 1;
|
||||
}
|
||||
|
||||
// Shrink
|
||||
const evicted = try m.resize(alloc, 1);
|
||||
try testing.expect(evicted == null);
|
||||
{
|
||||
const gop = try m.getOrPut(alloc, 1);
|
||||
try testing.expect(gop.found_existing);
|
||||
}
|
||||
}
|
||||
|
||||
test "resize shrink and remove" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
const Map = AutoHashMap(u32, u8);
|
||||
var m = Map.init(2);
|
||||
defer m.deinit(alloc);
|
||||
|
||||
// Insert cap values, LRU is 1
|
||||
{
|
||||
const gop = try m.getOrPut(alloc, 1);
|
||||
try testing.expect(!gop.found_existing);
|
||||
try testing.expect(gop.evicted == null);
|
||||
gop.value_ptr.* = 1;
|
||||
}
|
||||
{
|
||||
const gop = try m.getOrPut(alloc, 2);
|
||||
try testing.expect(!gop.found_existing);
|
||||
try testing.expect(gop.evicted == null);
|
||||
gop.value_ptr.* = 2;
|
||||
}
|
||||
|
||||
// Shrink
|
||||
const evicted = try m.resize(alloc, 1);
|
||||
defer alloc.free(evicted.?);
|
||||
try testing.expectEqual(@as(usize, 1), evicted.?.len);
|
||||
{
|
||||
const gop = try m.getOrPut(alloc, 1);
|
||||
try testing.expect(!gop.found_existing);
|
||||
try testing.expect(gop.evicted.?.value == 2);
|
||||
gop.value_ptr.* = 1;
|
||||
}
|
||||
}
|
@ -119,4 +119,5 @@ test {
|
||||
// TODO
|
||||
_ = @import("config.zig");
|
||||
_ = @import("cli_args.zig");
|
||||
_ = @import("lru.zig");
|
||||
}
|
||||
|
@ -113,7 +113,7 @@ const StorageCell = union {
|
||||
/// The row header is at the start of every row within the storage buffer.
|
||||
/// It can store row-specific data.
|
||||
pub const RowHeader = struct {
|
||||
const Id = u32;
|
||||
pub const Id = u32;
|
||||
|
||||
/// The ID of this row, used to uniquely identify this row. The cells
|
||||
/// are also ID'd by id + cell index (0-indexed). This will wrap around
|
||||
@ -127,6 +127,11 @@ pub const RowHeader = struct {
|
||||
/// row is a continuous of this row.
|
||||
wrap: bool = false,
|
||||
|
||||
/// True if this row has had changes. It is up to the caller to
|
||||
/// set this to false. See the methods on Row to see what will set
|
||||
/// this to true.
|
||||
dirty: bool = false,
|
||||
|
||||
/// True if any cell in this row has a grapheme associated with it.
|
||||
grapheme: bool = false,
|
||||
} = .{},
|
||||
@ -239,7 +244,7 @@ pub const Row = struct {
|
||||
|
||||
/// Returns the ID for this row. You can turn this into a cell ID
|
||||
/// by adding the cell offset plus 1 (so it is 1-indexed).
|
||||
pub fn getId(self: Row) RowHeader.Id {
|
||||
pub inline fn getId(self: Row) RowHeader.Id {
|
||||
return self.storage[0].header.id;
|
||||
}
|
||||
|
||||
@ -249,6 +254,16 @@ pub const Row = struct {
|
||||
self.storage[0].header.flags.wrap = v;
|
||||
}
|
||||
|
||||
/// Set a row as dirty or not. Generally you only set a row as NOT dirty.
|
||||
/// Various Row functions manage flagging dirty to true.
|
||||
pub fn setDirty(self: Row, v: bool) void {
|
||||
self.storage[0].header.flags.dirty = v;
|
||||
}
|
||||
|
||||
pub inline fn isDirty(self: Row) bool {
|
||||
return self.storage[0].header.flags.dirty;
|
||||
}
|
||||
|
||||
/// Retrieve the header for this row.
|
||||
pub fn header(self: Row) RowHeader {
|
||||
return self.storage[0].header;
|
||||
@ -276,6 +291,9 @@ pub const Row = struct {
|
||||
assert(len <= self.storage.len - 1);
|
||||
assert(!cell.attrs.grapheme); // you can't fill with graphemes
|
||||
|
||||
// Always mark the row as dirty for this.
|
||||
self.storage[0].header.flags.dirty = true;
|
||||
|
||||
// If our row has no graphemes, then this is a fast copy
|
||||
if (!self.storage[0].header.flags.grapheme) {
|
||||
std.mem.set(StorageCell, self.storage[start + 1 .. len + 1], .{ .cell = cell });
|
||||
@ -308,6 +326,10 @@ pub const Row = struct {
|
||||
/// this should be done prior.
|
||||
pub fn getCellPtr(self: Row, x: usize) *Cell {
|
||||
assert(x < self.storage.len - 1);
|
||||
|
||||
// Always mark the row as dirty for this.
|
||||
self.storage[0].header.flags.dirty = true;
|
||||
|
||||
return &self.storage[x + 1].cell;
|
||||
}
|
||||
|
||||
@ -323,6 +345,9 @@ pub const Row = struct {
|
||||
// Our row now has a grapheme
|
||||
self.storage[0].header.flags.grapheme = true;
|
||||
|
||||
// Our row is now dirty
|
||||
self.storage[0].header.flags.dirty = true;
|
||||
|
||||
// If we weren't previously a grapheme and we found an existing value
|
||||
// it means that it is old grapheme data. Just delete that.
|
||||
if (!cell.attrs.grapheme and gop.found_existing) {
|
||||
@ -346,6 +371,9 @@ pub const Row = struct {
|
||||
|
||||
/// Removes all graphemes associated with a cell.
|
||||
pub fn clearGraphemes(self: Row, x: usize) void {
|
||||
// Our row is now dirty
|
||||
self.storage[0].header.flags.dirty = true;
|
||||
|
||||
const cell = &self.storage[x + 1].cell;
|
||||
const key = self.getId() + x + 1;
|
||||
cell.attrs.grapheme = false;
|
||||
@ -357,6 +385,9 @@ pub const Row = struct {
|
||||
// If we have graphemes, clear first to unset them.
|
||||
if (self.storage[0].header.flags.grapheme) self.clear(.{});
|
||||
|
||||
// Always mark the row as dirty for this.
|
||||
self.storage[0].header.flags.dirty = true;
|
||||
|
||||
// If the source has no graphemes (likely) then this is fast.
|
||||
const end = @minimum(src.storage.len, self.storage.len);
|
||||
if (!src.storage[0].header.flags.grapheme) {
|
||||
@ -787,6 +818,9 @@ pub fn getRow(self: *Screen, index: RowIndex) Row {
|
||||
// Store the header
|
||||
row.storage[0].header.id = id;
|
||||
|
||||
// Mark that we're dirty since we're a new row
|
||||
row.storage[0].header.flags.dirty = true;
|
||||
|
||||
// We only need to fill with runtime safety because unions are
|
||||
// tag-checked. Otherwise, the default value of zero will be valid.
|
||||
if (std.debug.runtime_safety) row.fill(.{});
|
||||
@ -2304,6 +2338,123 @@ test "Screen: selectionString wide char with header" {
|
||||
}
|
||||
}
|
||||
|
||||
test "Screen: dirty with getCellPtr" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
var s = try init(alloc, 3, 5, 0);
|
||||
defer s.deinit();
|
||||
try s.testWriteString("1ABCD\n2EFGH\n3IJKL");
|
||||
try testing.expect(s.viewportIsBottom());
|
||||
|
||||
// Ensure all are dirty. Clear em.
|
||||
var iter = s.rowIterator(.viewport);
|
||||
while (iter.next()) |row| {
|
||||
try testing.expect(row.isDirty());
|
||||
row.setDirty(false);
|
||||
}
|
||||
|
||||
// Reset our cursor onto the second row.
|
||||
s.cursor.x = 0;
|
||||
s.cursor.y = 1;
|
||||
|
||||
try s.testWriteString("foo");
|
||||
{
|
||||
const row = s.getRow(.{ .active = 0 });
|
||||
try testing.expect(!row.isDirty());
|
||||
}
|
||||
{
|
||||
const row = s.getRow(.{ .active = 1 });
|
||||
try testing.expect(row.isDirty());
|
||||
}
|
||||
{
|
||||
const row = s.getRow(.{ .active = 2 });
|
||||
try testing.expect(!row.isDirty());
|
||||
|
||||
_ = row.getCell(0);
|
||||
try testing.expect(!row.isDirty());
|
||||
}
|
||||
}
|
||||
|
||||
test "Screen: dirty with clear, fill, fillSlice, copyRow" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
var s = try init(alloc, 3, 5, 0);
|
||||
defer s.deinit();
|
||||
try s.testWriteString("1ABCD\n2EFGH\n3IJKL");
|
||||
try testing.expect(s.viewportIsBottom());
|
||||
|
||||
// Ensure all are dirty. Clear em.
|
||||
var iter = s.rowIterator(.viewport);
|
||||
while (iter.next()) |row| {
|
||||
try testing.expect(row.isDirty());
|
||||
row.setDirty(false);
|
||||
}
|
||||
|
||||
{
|
||||
const row = s.getRow(.{ .active = 0 });
|
||||
try testing.expect(!row.isDirty());
|
||||
row.clear(.{});
|
||||
try testing.expect(row.isDirty());
|
||||
row.setDirty(false);
|
||||
}
|
||||
|
||||
{
|
||||
const row = s.getRow(.{ .active = 0 });
|
||||
try testing.expect(!row.isDirty());
|
||||
row.fill(.{ .char = 'A' });
|
||||
try testing.expect(row.isDirty());
|
||||
row.setDirty(false);
|
||||
}
|
||||
|
||||
{
|
||||
const row = s.getRow(.{ .active = 0 });
|
||||
try testing.expect(!row.isDirty());
|
||||
row.fillSlice(.{ .char = 'A' }, 0, 2);
|
||||
try testing.expect(row.isDirty());
|
||||
row.setDirty(false);
|
||||
}
|
||||
|
||||
{
|
||||
const src = s.getRow(.{ .active = 0 });
|
||||
const row = s.getRow(.{ .active = 1 });
|
||||
try testing.expect(!row.isDirty());
|
||||
try row.copyRow(src);
|
||||
try testing.expect(!src.isDirty());
|
||||
try testing.expect(row.isDirty());
|
||||
row.setDirty(false);
|
||||
}
|
||||
}
|
||||
|
||||
test "Screen: dirty with graphemes" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
var s = try init(alloc, 3, 5, 0);
|
||||
defer s.deinit();
|
||||
try s.testWriteString("1ABCD\n2EFGH\n3IJKL");
|
||||
try testing.expect(s.viewportIsBottom());
|
||||
|
||||
// Ensure all are dirty. Clear em.
|
||||
var iter = s.rowIterator(.viewport);
|
||||
while (iter.next()) |row| {
|
||||
try testing.expect(row.isDirty());
|
||||
row.setDirty(false);
|
||||
}
|
||||
|
||||
{
|
||||
const row = s.getRow(.{ .active = 0 });
|
||||
try testing.expect(!row.isDirty());
|
||||
try row.attachGrapheme(0, 0xFE0F);
|
||||
try testing.expect(row.isDirty());
|
||||
row.setDirty(false);
|
||||
row.clearGraphemes(0);
|
||||
try testing.expect(row.isDirty());
|
||||
row.setDirty(false);
|
||||
}
|
||||
}
|
||||
|
||||
test "Screen: resize (no reflow) more rows" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
@ -2312,13 +2463,22 @@ test "Screen: resize (no reflow) more rows" {
|
||||
defer s.deinit();
|
||||
const str = "1ABCD\n2EFGH\n3IJKL";
|
||||
try s.testWriteString(str);
|
||||
try s.resizeWithoutReflow(10, 5);
|
||||
|
||||
// Clear dirty rows
|
||||
var iter = s.rowIterator(.viewport);
|
||||
while (iter.next()) |row| row.setDirty(false);
|
||||
|
||||
// Resize
|
||||
try s.resizeWithoutReflow(10, 5);
|
||||
{
|
||||
var contents = try s.testString(alloc, .viewport);
|
||||
defer alloc.free(contents);
|
||||
try testing.expectEqualStrings(str, contents);
|
||||
}
|
||||
|
||||
// Everything should be dirty
|
||||
iter = s.rowIterator(.viewport);
|
||||
while (iter.next()) |row| try testing.expect(row.isDirty());
|
||||
}
|
||||
|
||||
test "Screen: resize (no reflow) less rows" {
|
||||
|
Reference in New Issue
Block a user