Merge pull request #2167 from qwerasd205/insert-delete-lines-fix

Fix `insertLines` and `deleteLines`
This commit is contained in:
Mitchell Hashimoto
2024-08-31 10:36:09 -07:00
committed by GitHub
5 changed files with 427 additions and 262 deletions

View File

@ -1758,6 +1758,8 @@ pub const AdjustCapacity = struct {
string_bytes: ?usize = null,
};
pub const AdjustCapacityError = Allocator.Error || Page.CloneFromError;
/// Adjust the capcaity of the given page in the list. This should
/// be used in cases where OutOfMemory is returned by some operation
/// i.e to increase style counts, grapheme counts, etc.
@ -1778,25 +1780,31 @@ pub fn adjustCapacity(
self: *PageList,
page: *List.Node,
adjustment: AdjustCapacity,
) !*List.Node {
) AdjustCapacityError!*List.Node {
// We always start with the base capacity of the existing page. This
// ensures we never shrink from what we need.
var cap = page.data.capacity;
// All ceilPowerOfTwo is unreachable because we're always same or less
// bit width so maxInt is always possible.
if (adjustment.styles) |v| {
const aligned = try std.math.ceilPowerOfTwo(usize, v);
comptime assert(@bitSizeOf(@TypeOf(v)) <= @bitSizeOf(usize));
const aligned = std.math.ceilPowerOfTwo(usize, v) catch unreachable;
cap.styles = @max(cap.styles, aligned);
}
if (adjustment.grapheme_bytes) |v| {
const aligned = try std.math.ceilPowerOfTwo(usize, v);
comptime assert(@bitSizeOf(@TypeOf(v)) <= @bitSizeOf(usize));
const aligned = std.math.ceilPowerOfTwo(usize, v) catch unreachable;
cap.grapheme_bytes = @max(cap.grapheme_bytes, aligned);
}
if (adjustment.hyperlink_bytes) |v| {
const aligned = try std.math.ceilPowerOfTwo(usize, v);
comptime assert(@bitSizeOf(@TypeOf(v)) <= @bitSizeOf(usize));
const aligned = std.math.ceilPowerOfTwo(usize, v) catch unreachable;
cap.hyperlink_bytes = @max(cap.hyperlink_bytes, aligned);
}
if (adjustment.string_bytes) |v| {
const aligned = try std.math.ceilPowerOfTwo(usize, v);
comptime assert(@bitSizeOf(@TypeOf(v)) <= @bitSizeOf(usize));
const aligned = std.math.ceilPowerOfTwo(usize, v) catch unreachable;
cap.string_bytes = @max(cap.string_bytes, aligned);
}
@ -1830,7 +1838,7 @@ pub fn adjustCapacity(
fn createPage(
self: *PageList,
cap: Capacity,
) !*List.Node {
) Allocator.Error!*List.Node {
// log.debug("create page cap={}", .{cap});
return try createPageExt(&self.pool, cap, &self.page_size);
}
@ -1839,7 +1847,7 @@ fn createPageExt(
pool: *MemoryPool,
cap: Capacity,
total_size: ?*usize,
) !*List.Node {
) Allocator.Error!*List.Node {
var page = try pool.nodes.create();
errdefer pool.nodes.destroy(page);
@ -2292,7 +2300,7 @@ pub fn pin(self: *const PageList, pt: point.Point) ?Pin {
/// automatically updated as the pagelist is modified. If the point the
/// pin points to is removed completely, the tracked pin will be updated
/// to the top-left of the screen.
pub fn trackPin(self: *PageList, p: Pin) !*Pin {
pub fn trackPin(self: *PageList, p: Pin) Allocator.Error!*Pin {
if (comptime std.debug.runtime_safety) assert(self.pinIsValid(p));
// Create our tracked pin

View File

@ -433,11 +433,11 @@ pub fn clonePool(
/// Adjust the capacity of a page within the pagelist of this screen.
/// This handles some accounting if the page being modified is the
/// cursor page.
fn adjustCapacity(
pub fn adjustCapacity(
self: *Screen,
page: *PageList.List.Node,
adjustment: PageList.AdjustCapacity,
) !*PageList.List.Node {
) PageList.AdjustCapacityError!*PageList.List.Node {
// If the page being modified isn't our cursor page then
// this is a quick operation because we have no additional
// accounting.
@ -1792,7 +1792,7 @@ pub fn cursorSetHyperlink(self: *Screen) !void {
return;
} else |err| switch (err) {
// hyperlink_map is out of space, realloc the page to be larger
error.OutOfMemory => {
error.HyperlinkMapOutOfMemory => {
_ = try self.adjustCapacity(
self.cursor.page_pin.page,
.{ .hyperlink_bytes = page.capacity.hyperlink_bytes * 2 },

View File

@ -1404,6 +1404,11 @@ fn rowWillBeShifted(
}
}
// TODO(qwerasd): `insertLines` and `deleteLines` are 99% identical,
// the majority of their logic can (and should) be abstracted in to
// a single shared helper function, probably on `Screen` not here.
// I'm just too lazy to do that rn :p
/// Insert amount lines at the current cursor row. The contents of the line
/// at the current cursor row and below (to the bottom-most line in the
/// scrolling region) are shifted down by amount lines. The contents of the
@ -1435,6 +1440,19 @@ pub fn insertLines(self: *Terminal, count: usize) void {
// Scrolling dirties the images because it updates their placements pins.
self.screen.kitty_images.dirty = true;
// At the end we need to return the cursor to the row it started on.
const start_y = self.screen.cursor.y;
defer {
self.screen.cursorAbsolute(self.scrolling_region.left, start_y);
// Always unset pending wrap
self.screen.cursor.pending_wrap = false;
}
// We have a slower path if we have left or right scroll margins.
const left_right = self.scrolling_region.left > 0 or
self.scrolling_region.right < self.cols - 1;
// Remaining rows from our cursor to the bottom of the scroll region.
const rem = self.scrolling_region.bottom - self.screen.cursor.y + 1;
@ -1442,121 +1460,159 @@ pub fn insertLines(self: *Terminal, count: usize) void {
// region. So we take whichever is smaller.
const adjusted_count = @min(count, rem);
// top is just the cursor position. insertLines starts at the cursor
// so this is our top. We want to shift lines down, down to the bottom
// of the scroll region.
const top = self.screen.cursor.page_pin.*;
// Create a new tracked pin which we'll use to navigate the page list
// so that if we need to adjust capacity it will be properly tracked.
var cur_p = self.screen.pages.trackPin(
self.screen.cursor.page_pin.down(rem - 1).?,
) catch |err| {
comptime assert(@TypeOf(err) == error{OutOfMemory});
// This is the amount of space at the bottom of the scroll region
// that will NOT be blank, so we need to shift the correct lines down.
// "scroll_amount" is the number of such lines.
const scroll_amount = rem - adjusted_count;
if (scroll_amount > 0) {
// If we have left/right scroll margins we have a slower path.
const left_right = self.scrolling_region.left > 0 or
self.scrolling_region.right < self.cols - 1;
// This error scenario means that our GPA is OOM. This is not a
// situation we can gracefully handle. We can't just ignore insertLines
// because it'll result in a corrupted screen. Ideally in the future
// we flag the state as broken and show an error message to the user.
// For now, we panic.
log.err("insertLines trackPin error err={}", .{err});
@panic("insertLines trackPin OOM");
};
defer self.screen.pages.untrackPin(cur_p);
const bot = top.down(scroll_amount - 1).?;
var it = bot.rowIterator(.left_up, top);
while (it.next()) |p| {
const dst_p = p.down(adjusted_count).?;
const src_rac = p.rowAndCell();
const dst_rac = dst_p.rowAndCell();
const src: *Row = src_rac.row;
const dst: *Row = dst_rac.row;
// Our current y position relative to the cursor
var y: usize = rem;
self.rowWillBeShifted(&p.page.data, src);
self.rowWillBeShifted(&dst_p.page.data, dst);
// Traverse from the bottom up
while (y > 0) {
const cur_rac = cur_p.rowAndCell();
const cur_row: *Row = cur_rac.row;
// Mark both our src/dst as dirty
p.markDirty();
dst_p.markDirty();
// Mark the row as dirty
cur_p.markDirty();
// If this is one of the lines we need to shift, do so
if (y > adjusted_count) {
const off_p = cur_p.up(adjusted_count).?;
const off_rac = off_p.rowAndCell();
const off_row: *Row = off_rac.row;
self.rowWillBeShifted(&cur_p.page.data, cur_row);
self.rowWillBeShifted(&off_p.page.data, off_row);
// If our scrolling region is full width, then we unset wrap.
if (!left_right) {
dst.wrap = false;
src.wrap = false;
dst.wrap_continuation = false;
src.wrap_continuation = false;
off_row.wrap = false;
cur_row.wrap = false;
off_row.wrap_continuation = false;
cur_row.wrap_continuation = false;
}
const src_p = off_p;
const src_row = off_row;
const dst_p = cur_p;
const dst_row = cur_row;
// If our page doesn't match, then we need to do a copy from
// one page to another. This is the slow path.
if (dst_p.page != p.page) {
if (src_p.page != dst_p.page) {
dst_p.page.data.clonePartialRowFrom(
&p.page.data,
dst,
src,
&src_p.page.data,
dst_row,
src_row,
self.scrolling_region.left,
self.scrolling_region.right + 1,
) catch |err| {
std.log.warn("TODO: insertLines handle clone error err={}", .{err});
@panic("TODO");
const cap = dst_p.page.data.capacity;
// Adjust our page capacity to make
// room for we didn't have space for
_ = self.screen.adjustCapacity(
dst_p.page,
switch (err) {
// Rehash the sets
error.StyleSetNeedsRehash,
error.HyperlinkSetNeedsRehash,
=> .{},
// Increase style memory
error.StyleSetOutOfMemory,
=> .{ .styles = cap.styles * 2 },
// Increase string memory
error.StringAllocOutOfMemory,
=> .{ .string_bytes = cap.string_bytes * 2 },
// Increase hyperlink memory
error.HyperlinkSetOutOfMemory,
error.HyperlinkMapOutOfMemory,
=> .{ .hyperlink_bytes = cap.hyperlink_bytes * 2 },
// Increase grapheme memory
error.GraphemeMapOutOfMemory,
error.GraphemeAllocOutOfMemory,
=> .{ .grapheme_bytes = cap.grapheme_bytes * 2 },
},
) catch |e| switch (e) {
// This shouldn't be possible because above we're only
// adjusting capacity _upwards_. So it should have all
// the existing capacity it had to fit the adjusted
// data. Panic since we don't expect this.
error.StyleSetOutOfMemory,
error.StyleSetNeedsRehash,
error.StringAllocOutOfMemory,
error.HyperlinkSetOutOfMemory,
error.HyperlinkSetNeedsRehash,
error.HyperlinkMapOutOfMemory,
error.GraphemeMapOutOfMemory,
error.GraphemeAllocOutOfMemory,
=> @panic("adjustCapacity resulted in capacity errors"),
// The system allocator is OOM. We can't currently do
// anything graceful here. We panic.
error.OutOfMemory,
=> @panic("adjustCapacity system allocator OOM"),
};
// Continue the loop to try handling this row again.
continue;
}
};
} else {
if (!left_right) {
// Swap the src/dst cells. This ensures that our dst gets the proper
// shifted rows and src gets non-garbage cell data that we can clear.
const dst_row = dst.*;
dst.* = src.*;
src.* = dst_row;
// Swap the src/dst cells. This ensures that our dst gets the
// proper shifted rows and src gets non-garbage cell data that
// we can clear.
const dst = dst_row.*;
dst_row.* = src_row.*;
src_row.* = dst;
// Ensure what we did didn't corrupt the page
p.page.data.assertIntegrity();
continue;
}
// Left/right scroll margins we have to copy cells, which is much slower...
const page = &p.page.data;
cur_p.page.data.assertIntegrity();
} else {
// Left/right scroll margins we have to
// copy cells, which is much slower...
const page = &cur_p.page.data;
page.moveCells(
src,
src_row,
self.scrolling_region.left,
dst,
dst_row,
self.scrolling_region.left,
(self.scrolling_region.right - self.scrolling_region.left) + 1,
);
}
// The operations above can prune our cursor style so we need to
// update. This should never fail because the above can only FREE
// memory.
self.screen.manualStyleUpdate() catch |err| {
std.log.warn("deleteLines manualStyleUpdate err={}", .{err});
self.screen.cursor.style = .{};
self.screen.manualStyleUpdate() catch unreachable;
};
}
// Inserted lines should keep our bg color
const bot = top.down(adjusted_count - 1).?;
var it = top.rowIterator(.right_down, bot);
while (it.next()) |p| {
const row: *Row = p.rowAndCell().row;
// This row is now dirty
p.markDirty();
// Clear the src row.
const page = &p.page.data;
const cells = page.getCells(row);
const cells_write = cells[self.scrolling_region.left .. self.scrolling_region.right + 1];
self.screen.clearCells(page, row, cells_write);
}
// Move the cursor to the left margin. But importantly this also
// forces screen.cursor.page_cell to reload because the rows above
// shifted cell ofsets so this will ensure the cursor is pointing
// to the correct cell.
self.screen.cursorAbsolute(
self.scrolling_region.left,
self.screen.cursor.y,
} else {
// Clear the cells for this row, it has been shifted.
const page = &cur_p.page.data;
const cells = page.getCells(cur_row);
self.screen.clearCells(
page,
cur_row,
cells[self.scrolling_region.left .. self.scrolling_region.right + 1],
);
}
// Always unset pending wrap
self.screen.cursor.pending_wrap = false;
// We have successfully processed a line.
y -= 1;
// Move our pin up to the next row.
if (cur_p.up(1)) |p| cur_p.* = p;
}
}
/// Removes amount lines from the current cursor row down. The remaining lines
@ -1575,9 +1631,9 @@ pub fn insertLines(self: *Terminal, count: usize) void {
/// cleared space is colored according to the current SGR state.
///
/// Moves the cursor to the left margin.
pub fn deleteLines(self: *Terminal, count_req: usize) void {
pub fn deleteLines(self: *Terminal, count: usize) void {
// Rare, but happens
if (count_req == 0) return;
if (count == 0) return;
// If the cursor is outside the scroll region we do nothing.
if (self.screen.cursor.y < self.scrolling_region.top or
@ -1588,125 +1644,168 @@ pub fn deleteLines(self: *Terminal, count_req: usize) void {
// Scrolling dirties the images because it updates their placements pins.
self.screen.kitty_images.dirty = true;
// top is just the cursor position. insertLines starts at the cursor
// so this is our top. We want to shift lines down, down to the bottom
// of the scroll region.
const top = self.screen.cursor.page_pin.*;
// At the end we need to return the cursor to the row it started on.
const start_y = self.screen.cursor.y;
defer {
self.screen.cursorAbsolute(self.scrolling_region.left, start_y);
// Always unset pending wrap
self.screen.cursor.pending_wrap = false;
}
// We have a slower path if we have left or right scroll margins.
const left_right = self.scrolling_region.left > 0 or
self.scrolling_region.right < self.cols - 1;
// Remaining rows from our cursor to the bottom of the scroll region.
const rem = self.scrolling_region.bottom - self.screen.cursor.y + 1;
// The maximum we can delete is the remaining lines in the scroll region.
const count = @min(count_req, rem);
// We can only insert lines up to our remaining lines in the scroll
// region. So we take whichever is smaller.
const adjusted_count = @min(count, rem);
// This is the amount of space at the bottom of the scroll region
// that will NOT be blank, so we need to shift the correct lines down.
// "scroll_amount" is the number of such lines.
const scroll_amount = rem - count;
if (scroll_amount > 0) {
// If we have left/right scroll margins we have a slower path.
const left_right = self.scrolling_region.left > 0 or
self.scrolling_region.right < self.cols - 1;
// Create a new tracked pin which we'll use to navigate the page list
// so that if we need to adjust capacity it will be properly tracked.
var cur_p = self.screen.pages.trackPin(
self.screen.cursor.page_pin.*,
) catch |err| {
// See insertLines
comptime assert(@TypeOf(err) == error{OutOfMemory});
log.err("deleteLines trackPin error err={}", .{err});
@panic("deleteLines trackPin OOM");
};
defer self.screen.pages.untrackPin(cur_p);
const bot = top.down(scroll_amount - 1).?;
var it = top.rowIterator(.right_down, bot);
while (it.next()) |p| {
const src_p = p.down(count).?;
const src_rac = src_p.rowAndCell();
const dst_rac = p.rowAndCell();
const src: *Row = src_rac.row;
const dst: *Row = dst_rac.row;
// Our current y position relative to the cursor
var y: usize = 0;
// Mark both our src/dst as dirty
p.markDirty();
src_p.markDirty();
// Traverse from the top down
while (y < rem) {
const cur_rac = cur_p.rowAndCell();
const cur_row: *Row = cur_rac.row;
self.rowWillBeShifted(&src_p.page.data, src);
self.rowWillBeShifted(&p.page.data, dst);
// Mark the row as dirty
cur_p.markDirty();
// If this is one of the lines we need to shift, do so
if (y < rem - adjusted_count) {
const off_p = cur_p.down(adjusted_count).?;
const off_rac = off_p.rowAndCell();
const off_row: *Row = off_rac.row;
self.rowWillBeShifted(&cur_p.page.data, cur_row);
self.rowWillBeShifted(&off_p.page.data, off_row);
// If our scrolling region is full width, then we unset wrap.
if (!left_right) {
dst.wrap = false;
src.wrap = false;
dst.wrap_continuation = false;
src.wrap_continuation = false;
off_row.wrap = false;
cur_row.wrap = false;
off_row.wrap_continuation = false;
cur_row.wrap_continuation = false;
}
if (src_p.page != p.page) {
p.page.data.clonePartialRowFrom(
const src_p = off_p;
const src_row = off_row;
const dst_p = cur_p;
const dst_row = cur_row;
// If our page doesn't match, then we need to do a copy from
// one page to another. This is the slow path.
if (src_p.page != dst_p.page) {
dst_p.page.data.clonePartialRowFrom(
&src_p.page.data,
dst,
src,
dst_row,
src_row,
self.scrolling_region.left,
self.scrolling_region.right + 1,
) catch |err| {
std.log.warn("TODO: deleteLines handle clone error err={}", .{err});
@panic("TODO");
const cap = dst_p.page.data.capacity;
// Adjust our page capacity to make
// room for we didn't have space for
_ = self.screen.adjustCapacity(
dst_p.page,
switch (err) {
// Rehash the sets
error.StyleSetNeedsRehash,
error.HyperlinkSetNeedsRehash,
=> .{},
// Increase style memory
error.StyleSetOutOfMemory,
=> .{ .styles = cap.styles * 2 },
// Increase string memory
error.StringAllocOutOfMemory,
=> .{ .string_bytes = cap.string_bytes * 2 },
// Increase hyperlink memory
error.HyperlinkSetOutOfMemory,
error.HyperlinkMapOutOfMemory,
=> .{ .hyperlink_bytes = cap.hyperlink_bytes * 2 },
// Increase grapheme memory
error.GraphemeMapOutOfMemory,
error.GraphemeAllocOutOfMemory,
=> .{ .grapheme_bytes = cap.grapheme_bytes * 2 },
},
) catch |e| switch (e) {
// See insertLines which has the same error capture.
error.StyleSetOutOfMemory,
error.StyleSetNeedsRehash,
error.StringAllocOutOfMemory,
error.HyperlinkSetOutOfMemory,
error.HyperlinkSetNeedsRehash,
error.HyperlinkMapOutOfMemory,
error.GraphemeMapOutOfMemory,
error.GraphemeAllocOutOfMemory,
=> @panic("adjustCapacity resulted in capacity errors"),
error.OutOfMemory,
=> @panic("adjustCapacity system allocator OOM"),
};
// Continue the loop to try handling this row again.
continue;
}
};
} else {
if (!left_right) {
// Swap the src/dst cells. This ensures that our dst gets the proper
// shifted rows and src gets non-garbage cell data that we can clear.
const dst_row = dst.*;
dst.* = src.*;
src.* = dst_row;
// Swap the src/dst cells. This ensures that our dst gets the
// proper shifted rows and src gets non-garbage cell data that
// we can clear.
const dst = dst_row.*;
dst_row.* = src_row.*;
src_row.* = dst;
// Ensure what we did didn't corrupt the page
p.page.data.assertIntegrity();
continue;
}
// Left/right scroll margins we have to copy cells, which is much slower...
const page = &p.page.data;
cur_p.page.data.assertIntegrity();
} else {
// Left/right scroll margins we have to
// copy cells, which is much slower...
const page = &cur_p.page.data;
page.moveCells(
src,
src_row,
self.scrolling_region.left,
dst,
dst_row,
self.scrolling_region.left,
(self.scrolling_region.right - self.scrolling_region.left) + 1,
);
}
// The operations above can prune our cursor style so we need to
// update. This should never fail because the above can only FREE
// memory.
self.screen.manualStyleUpdate() catch |err| {
std.log.warn("deleteLines manualStyleUpdate err={}", .{err});
self.screen.cursor.style = .{};
self.screen.manualStyleUpdate() catch unreachable;
};
}
const clear_top = top.down(scroll_amount).?;
const bot = top.down(rem - 1).?;
var it = clear_top.rowIterator(.right_down, bot);
while (it.next()) |p| {
const row: *Row = p.rowAndCell().row;
// This row is now dirty
p.markDirty();
// Clear the src row.
const page = &p.page.data;
const cells = page.getCells(row);
const cells_write = cells[self.scrolling_region.left .. self.scrolling_region.right + 1];
self.screen.clearCells(page, row, cells_write);
}
// Move the cursor to the left margin. But importantly this also
// forces screen.cursor.page_cell to reload because the rows above
// shifted cell ofsets so this will ensure the cursor is pointing
// to the correct cell.
self.screen.cursorAbsolute(
self.scrolling_region.left,
self.screen.cursor.y,
} else {
// Clear the cells for this row, it's from out of bounds.
const page = &cur_p.page.data;
const cells = page.getCells(cur_row);
self.screen.clearCells(
page,
cur_row,
cells[self.scrolling_region.left .. self.scrolling_region.right + 1],
);
}
// Always unset pending wrap
self.screen.cursor.pending_wrap = false;
// We have successfully processed a line.
y += 1;
// Move our pin down to the next row.
if (cur_p.down(1)) |p| cur_p.* = p;
}
}
/// Inserts spaces at current cursor position moving existing cell contents

View File

@ -611,7 +611,27 @@ pub const Page = struct {
return result;
}
pub const CloneFromError = Allocator.Error || style.Set.AddError;
pub const StyleSetError = error{
StyleSetOutOfMemory,
StyleSetNeedsRehash,
};
pub const HyperlinkError = error{
StringAllocOutOfMemory,
HyperlinkSetOutOfMemory,
HyperlinkSetNeedsRehash,
HyperlinkMapOutOfMemory,
};
pub const GraphemeError = error{
GraphemeMapOutOfMemory,
GraphemeAllocOutOfMemory,
};
pub const CloneFromError =
StyleSetError ||
HyperlinkError ||
GraphemeError;
/// Clone the contents of another page into this page. The capacities
/// can be different, but the size of the other page must fit into
@ -688,7 +708,7 @@ pub const Page = struct {
const cells = dst_row.cells.ptr(self.memory)[x_start..x_end];
// If our destination has styles or graphemes then we need to
// clear some state.
// clear some state. This will free up the managed memory as well.
if (dst_row.managedMemory()) self.clearCells(dst_row, x_start, x_end);
// Copy all the row metadata but keep our cells offset
@ -731,6 +751,16 @@ pub const Page = struct {
// get all of that right.
for (cells, other_cells) |*dst_cell, *src_cell| {
dst_cell.* = src_cell.*;
// Reset any managed memory markers on the cell so that we don't
// hit an integrity check if we have to return an error because
// the page can't fit the new memory.
dst_cell.hyperlink = false;
dst_cell.style_id = style.default_id;
if (dst_cell.content_tag == .codepoint_grapheme) {
dst_cell.content_tag = .codepoint;
}
if (src_cell.hasGrapheme()) {
// To prevent integrity checks flipping. This will
// get fixed up when we check the style id below.
@ -738,13 +768,14 @@ pub const Page = struct {
dst_cell.style_id = style.default_id;
}
dst_cell.content_tag = .codepoint; // required for appendGrapheme
// Copy the grapheme codepoints
const cps = other.lookupGrapheme(src_cell).?;
for (cps) |cp| try self.appendGrapheme(dst_row, dst_cell, cp);
// Safe to use setGraphemes because we cleared all
// managed memory for our destination cell range.
try self.setGraphemes(dst_row, dst_cell, cps);
}
if (src_cell.hyperlink) hyperlink: {
dst_row.hyperlink = true;
const id = other.lookupHyperlink(src_cell).?;
// Fast-path: same page we can add with the same id.
@ -757,55 +788,61 @@ pub const Page = struct {
// Slow-path: get the hyperlink from the other page,
// add it, and migrate.
const dst_link = dst_link: {
// Fast path is we just dupe the hyperlink because
// it doesn't require traversal through the hyperlink
// map. If the add below already contains it then it'll
// call the deleted context callback and we'll free
// this back.
// If our page can't support an additional cell with
// a hyperlink then we have to return an error.
if (self.hyperlinkCount() >= self.hyperlinkCapacity() - 1) {
// The hyperlink map capacity needs to be increased.
return error.HyperlinkMapOutOfMemory;
}
const other_link = other.hyperlink_set.get(other.memory, id);
if (other_link.dupe(other, self)) |dst_link| {
break :dst_link dst_link;
} else |err| switch (err) {
// If this happens, the only possible valid outcome is
// that it is because this link is already in our set
// and our memory is full because we already have it.
// Any other outcome is an integrity violation.
error.OutOfMemory => {},
}
// Slow, the only way to really find our link is to
// traverse over the map, which includes dupes...
const dst_map = self.hyperlink_map.map(self.memory);
var it = dst_map.valueIterator();
while (it.next()) |existing_id| {
const existing_link = self.hyperlink_set.get(
const dst_id = dst_id: {
// First check if the link already exists in our page,
// and increment its refcount if so, since we're about
// to use it.
if (self.hyperlink_set.lookupContext(
self.memory,
existing_id.*,
);
other_link.*,
if (existing_link.eql(
self.memory,
other_link,
other.memory,
)) {
break :dst_link existing_link.*;
}
// `lookupContext` uses the context for hashing, and
// that doesn't write to the page, so this constCast
// is completely safe.
.{ .page = @constCast(other) },
)) |i| {
self.hyperlink_set.use(self.memory, i);
break :dst_id i;
}
// There is no other valid scenario where we don't
// have the memory to dupe a hyperlink since we allocate
// cloned pages with enough capacity to contain their
// contents.
unreachable;
// If we don't have this link in our page yet then
// we need to clone it over and add it to our set.
// Clone the link.
const dst_link = other_link.dupe(other, self) catch |e| {
comptime assert(@TypeOf(e) == error{OutOfMemory});
// The string alloc capacity needs to be increased.
return error.StringAllocOutOfMemory;
};
const dst_id = try self.hyperlink_set.addWithIdContext(
// Add it, preferring to use the same ID as the other
// page, since this *probably* speeds up full-page
// clones.
//
// TODO(qwerasd): verify the assumption that `addWithId`
// is ever actually useful, I think it may not be.
break :dst_id self.hyperlink_set.addWithIdContext(
self.memory,
dst_link,
id,
.{ .page = self },
) orelse id;
) catch |e| switch (e) {
// The hyperlink set capacity needs to be increased.
error.OutOfMemory => return error.HyperlinkSetOutOfMemory,
// The hyperlink set needs to be rehashed.
error.NeedsRehash => return error.HyperlinkSetNeedsRehash,
} orelse id;
};
try self.setHyperlink(dst_row, dst_cell, dst_id);
}
if (src_cell.style_id != style.default_id) style: {
@ -822,11 +859,17 @@ pub const Page = struct {
// Slow path: Get the style from the other
// page and add it to this page's style set.
const other_style = other.styles.get(other.memory, src_cell.style_id);
dst_cell.style_id = try self.styles.addWithId(
dst_cell.style_id = self.styles.addWithId(
self.memory,
other_style.*,
src_cell.style_id,
) orelse src_cell.style_id;
) catch |e| switch (e) {
// The style set capacity needs to be increased.
error.OutOfMemory => return error.StyleSetOutOfMemory,
// The style set needs to be rehashed.
error.NeedsRehash => return error.StyleSetNeedsRehash,
} orelse src_cell.style_id;
}
if (src_cell.codepoint() == kitty.graphics.unicode.placeholder) {
dst_row.kitty_virtual_placeholder = true;
@ -1090,12 +1133,16 @@ pub const Page = struct {
/// Caller is responsible for updating the refcount in the hyperlink
/// set as necessary by calling `use` if the id was not acquired with
/// `add`.
pub fn setHyperlink(self: *Page, row: *Row, cell: *Cell, id: hyperlink.Id) !void {
pub fn setHyperlink(self: *Page, row: *Row, cell: *Cell, id: hyperlink.Id) error{HyperlinkMapOutOfMemory}!void {
defer self.assertIntegrity();
const cell_offset = getOffset(Cell, self.memory, cell);
var map = self.hyperlink_map.map(self.memory);
const gop = try map.getOrPut(cell_offset);
const gop = map.getOrPut(cell_offset) catch |e| {
comptime assert(@TypeOf(e) == error{OutOfMemory});
// The hyperlink map capacity needs to be increased.
return error.HyperlinkMapOutOfMemory;
};
if (gop.found_existing) {
// Always release the old hyperlink, because even if it's actually
@ -1160,7 +1207,7 @@ pub const Page = struct {
/// Set the graphemes for the given cell. This asserts that the cell
/// has no graphemes set, and only contains a single codepoint.
pub fn setGraphemes(self: *Page, row: *Row, cell: *Cell, cps: []u21) Allocator.Error!void {
pub fn setGraphemes(self: *Page, row: *Row, cell: *Cell, cps: []u21) GraphemeError!void {
defer self.assertIntegrity();
assert(cell.codepoint() > 0);
@ -1169,14 +1216,22 @@ pub const Page = struct {
const cell_offset = getOffset(Cell, self.memory, cell);
var map = self.grapheme_map.map(self.memory);
const slice = try self.grapheme_alloc.alloc(u21, self.memory, cps.len);
const slice = self.grapheme_alloc.alloc(u21, self.memory, cps.len) catch |e| {
comptime assert(@TypeOf(e) == error{OutOfMemory});
// The grapheme alloc capacity needs to be increased.
return error.GraphemeAllocOutOfMemory;
};
errdefer self.grapheme_alloc.free(self.memory, slice);
@memcpy(slice, cps);
try map.putNoClobber(cell_offset, .{
map.putNoClobber(cell_offset, .{
.offset = getOffset(u21, self.memory, @ptrCast(slice.ptr)),
.len = slice.len,
});
}) catch |e| {
comptime assert(@TypeOf(e) == error{OutOfMemory});
// The grapheme map capacity needs to be increased.
return error.GraphemeMapOutOfMemory;
};
errdefer map.remove(cell_offset);
cell.content_tag = .codepoint_grapheme;

View File

@ -227,7 +227,7 @@ pub fn RefCountedSet(
}
// If the item already exists, return it.
if (self.lookup(base, value, ctx)) |id| {
if (self.lookupContext(base, value, ctx)) |id| {
// Notify the context that the value is "deleted" because
// we're reusing the existing value in the set. This allows
// callers to clean up any resources associated with the value.
@ -453,7 +453,10 @@ pub fn RefCountedSet(
/// Find an item in the table and return its ID.
/// If the item does not exist in the table, null is returned.
fn lookup(self: *Self, base: anytype, value: T, ctx: Context) ?Id {
pub fn lookup(self: *const Self, base: anytype, value: T) ?Id {
return self.lookupContext(base, value, self.context);
}
pub fn lookupContext(self: *const Self, base: anytype, value: T, ctx: Context) ?Id {
const table = self.table.ptr(base);
const items = self.items.ptr(base);
@ -504,7 +507,7 @@ pub fn RefCountedSet(
/// is ignored and the existing item's ID is returned.
fn upsert(self: *Self, base: anytype, value: T, new_id: Id, ctx: Context) Id {
// If the item already exists, return it.
if (self.lookup(base, value, ctx)) |id| {
if (self.lookupContext(base, value, ctx)) |id| {
// Notify the context that the value is "deleted" because
// we're reusing the existing value in the set. This allows
// callers to clean up any resources associated with the value.
@ -519,7 +522,7 @@ pub fn RefCountedSet(
/// Insert the given value into the hash table with the given ID.
/// asserts that the value is not already present in the table.
fn insert(self: *Self, base: anytype, value: T, new_id: Id, ctx: Context) Id {
assert(self.lookup(base, value, ctx) == null);
assert(self.lookupContext(base, value, ctx) == null);
const table = self.table.ptr(base);
const items = self.items.ptr(base);