mirror of
https://github.com/ghostty-org/ghostty.git
synced 2025-07-16 16:56:09 +03:00
Merge pull request #2167 from qwerasd205/insert-delete-lines-fix
Fix `insertLines` and `deleteLines`
This commit is contained in:
@ -1758,6 +1758,8 @@ pub const AdjustCapacity = struct {
|
|||||||
string_bytes: ?usize = null,
|
string_bytes: ?usize = null,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
pub const AdjustCapacityError = Allocator.Error || Page.CloneFromError;
|
||||||
|
|
||||||
/// Adjust the capcaity of the given page in the list. This should
|
/// Adjust the capcaity of the given page in the list. This should
|
||||||
/// be used in cases where OutOfMemory is returned by some operation
|
/// be used in cases where OutOfMemory is returned by some operation
|
||||||
/// i.e to increase style counts, grapheme counts, etc.
|
/// i.e to increase style counts, grapheme counts, etc.
|
||||||
@ -1778,25 +1780,31 @@ pub fn adjustCapacity(
|
|||||||
self: *PageList,
|
self: *PageList,
|
||||||
page: *List.Node,
|
page: *List.Node,
|
||||||
adjustment: AdjustCapacity,
|
adjustment: AdjustCapacity,
|
||||||
) !*List.Node {
|
) AdjustCapacityError!*List.Node {
|
||||||
// We always start with the base capacity of the existing page. This
|
// We always start with the base capacity of the existing page. This
|
||||||
// ensures we never shrink from what we need.
|
// ensures we never shrink from what we need.
|
||||||
var cap = page.data.capacity;
|
var cap = page.data.capacity;
|
||||||
|
|
||||||
|
// All ceilPowerOfTwo is unreachable because we're always same or less
|
||||||
|
// bit width so maxInt is always possible.
|
||||||
if (adjustment.styles) |v| {
|
if (adjustment.styles) |v| {
|
||||||
const aligned = try std.math.ceilPowerOfTwo(usize, v);
|
comptime assert(@bitSizeOf(@TypeOf(v)) <= @bitSizeOf(usize));
|
||||||
|
const aligned = std.math.ceilPowerOfTwo(usize, v) catch unreachable;
|
||||||
cap.styles = @max(cap.styles, aligned);
|
cap.styles = @max(cap.styles, aligned);
|
||||||
}
|
}
|
||||||
if (adjustment.grapheme_bytes) |v| {
|
if (adjustment.grapheme_bytes) |v| {
|
||||||
const aligned = try std.math.ceilPowerOfTwo(usize, v);
|
comptime assert(@bitSizeOf(@TypeOf(v)) <= @bitSizeOf(usize));
|
||||||
|
const aligned = std.math.ceilPowerOfTwo(usize, v) catch unreachable;
|
||||||
cap.grapheme_bytes = @max(cap.grapheme_bytes, aligned);
|
cap.grapheme_bytes = @max(cap.grapheme_bytes, aligned);
|
||||||
}
|
}
|
||||||
if (adjustment.hyperlink_bytes) |v| {
|
if (adjustment.hyperlink_bytes) |v| {
|
||||||
const aligned = try std.math.ceilPowerOfTwo(usize, v);
|
comptime assert(@bitSizeOf(@TypeOf(v)) <= @bitSizeOf(usize));
|
||||||
|
const aligned = std.math.ceilPowerOfTwo(usize, v) catch unreachable;
|
||||||
cap.hyperlink_bytes = @max(cap.hyperlink_bytes, aligned);
|
cap.hyperlink_bytes = @max(cap.hyperlink_bytes, aligned);
|
||||||
}
|
}
|
||||||
if (adjustment.string_bytes) |v| {
|
if (adjustment.string_bytes) |v| {
|
||||||
const aligned = try std.math.ceilPowerOfTwo(usize, v);
|
comptime assert(@bitSizeOf(@TypeOf(v)) <= @bitSizeOf(usize));
|
||||||
|
const aligned = std.math.ceilPowerOfTwo(usize, v) catch unreachable;
|
||||||
cap.string_bytes = @max(cap.string_bytes, aligned);
|
cap.string_bytes = @max(cap.string_bytes, aligned);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1830,7 +1838,7 @@ pub fn adjustCapacity(
|
|||||||
fn createPage(
|
fn createPage(
|
||||||
self: *PageList,
|
self: *PageList,
|
||||||
cap: Capacity,
|
cap: Capacity,
|
||||||
) !*List.Node {
|
) Allocator.Error!*List.Node {
|
||||||
// log.debug("create page cap={}", .{cap});
|
// log.debug("create page cap={}", .{cap});
|
||||||
return try createPageExt(&self.pool, cap, &self.page_size);
|
return try createPageExt(&self.pool, cap, &self.page_size);
|
||||||
}
|
}
|
||||||
@ -1839,7 +1847,7 @@ fn createPageExt(
|
|||||||
pool: *MemoryPool,
|
pool: *MemoryPool,
|
||||||
cap: Capacity,
|
cap: Capacity,
|
||||||
total_size: ?*usize,
|
total_size: ?*usize,
|
||||||
) !*List.Node {
|
) Allocator.Error!*List.Node {
|
||||||
var page = try pool.nodes.create();
|
var page = try pool.nodes.create();
|
||||||
errdefer pool.nodes.destroy(page);
|
errdefer pool.nodes.destroy(page);
|
||||||
|
|
||||||
@ -2292,7 +2300,7 @@ pub fn pin(self: *const PageList, pt: point.Point) ?Pin {
|
|||||||
/// automatically updated as the pagelist is modified. If the point the
|
/// automatically updated as the pagelist is modified. If the point the
|
||||||
/// pin points to is removed completely, the tracked pin will be updated
|
/// pin points to is removed completely, the tracked pin will be updated
|
||||||
/// to the top-left of the screen.
|
/// to the top-left of the screen.
|
||||||
pub fn trackPin(self: *PageList, p: Pin) !*Pin {
|
pub fn trackPin(self: *PageList, p: Pin) Allocator.Error!*Pin {
|
||||||
if (comptime std.debug.runtime_safety) assert(self.pinIsValid(p));
|
if (comptime std.debug.runtime_safety) assert(self.pinIsValid(p));
|
||||||
|
|
||||||
// Create our tracked pin
|
// Create our tracked pin
|
||||||
|
@ -433,11 +433,11 @@ pub fn clonePool(
|
|||||||
/// Adjust the capacity of a page within the pagelist of this screen.
|
/// Adjust the capacity of a page within the pagelist of this screen.
|
||||||
/// This handles some accounting if the page being modified is the
|
/// This handles some accounting if the page being modified is the
|
||||||
/// cursor page.
|
/// cursor page.
|
||||||
fn adjustCapacity(
|
pub fn adjustCapacity(
|
||||||
self: *Screen,
|
self: *Screen,
|
||||||
page: *PageList.List.Node,
|
page: *PageList.List.Node,
|
||||||
adjustment: PageList.AdjustCapacity,
|
adjustment: PageList.AdjustCapacity,
|
||||||
) !*PageList.List.Node {
|
) PageList.AdjustCapacityError!*PageList.List.Node {
|
||||||
// If the page being modified isn't our cursor page then
|
// If the page being modified isn't our cursor page then
|
||||||
// this is a quick operation because we have no additional
|
// this is a quick operation because we have no additional
|
||||||
// accounting.
|
// accounting.
|
||||||
@ -1792,7 +1792,7 @@ pub fn cursorSetHyperlink(self: *Screen) !void {
|
|||||||
return;
|
return;
|
||||||
} else |err| switch (err) {
|
} else |err| switch (err) {
|
||||||
// hyperlink_map is out of space, realloc the page to be larger
|
// hyperlink_map is out of space, realloc the page to be larger
|
||||||
error.OutOfMemory => {
|
error.HyperlinkMapOutOfMemory => {
|
||||||
_ = try self.adjustCapacity(
|
_ = try self.adjustCapacity(
|
||||||
self.cursor.page_pin.page,
|
self.cursor.page_pin.page,
|
||||||
.{ .hyperlink_bytes = page.capacity.hyperlink_bytes * 2 },
|
.{ .hyperlink_bytes = page.capacity.hyperlink_bytes * 2 },
|
||||||
|
@ -1404,6 +1404,11 @@ fn rowWillBeShifted(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO(qwerasd): `insertLines` and `deleteLines` are 99% identical,
|
||||||
|
// the majority of their logic can (and should) be abstracted in to
|
||||||
|
// a single shared helper function, probably on `Screen` not here.
|
||||||
|
// I'm just too lazy to do that rn :p
|
||||||
|
|
||||||
/// Insert amount lines at the current cursor row. The contents of the line
|
/// Insert amount lines at the current cursor row. The contents of the line
|
||||||
/// at the current cursor row and below (to the bottom-most line in the
|
/// at the current cursor row and below (to the bottom-most line in the
|
||||||
/// scrolling region) are shifted down by amount lines. The contents of the
|
/// scrolling region) are shifted down by amount lines. The contents of the
|
||||||
@ -1435,6 +1440,19 @@ pub fn insertLines(self: *Terminal, count: usize) void {
|
|||||||
// Scrolling dirties the images because it updates their placements pins.
|
// Scrolling dirties the images because it updates their placements pins.
|
||||||
self.screen.kitty_images.dirty = true;
|
self.screen.kitty_images.dirty = true;
|
||||||
|
|
||||||
|
// At the end we need to return the cursor to the row it started on.
|
||||||
|
const start_y = self.screen.cursor.y;
|
||||||
|
defer {
|
||||||
|
self.screen.cursorAbsolute(self.scrolling_region.left, start_y);
|
||||||
|
|
||||||
|
// Always unset pending wrap
|
||||||
|
self.screen.cursor.pending_wrap = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// We have a slower path if we have left or right scroll margins.
|
||||||
|
const left_right = self.scrolling_region.left > 0 or
|
||||||
|
self.scrolling_region.right < self.cols - 1;
|
||||||
|
|
||||||
// Remaining rows from our cursor to the bottom of the scroll region.
|
// Remaining rows from our cursor to the bottom of the scroll region.
|
||||||
const rem = self.scrolling_region.bottom - self.screen.cursor.y + 1;
|
const rem = self.scrolling_region.bottom - self.screen.cursor.y + 1;
|
||||||
|
|
||||||
@ -1442,121 +1460,159 @@ pub fn insertLines(self: *Terminal, count: usize) void {
|
|||||||
// region. So we take whichever is smaller.
|
// region. So we take whichever is smaller.
|
||||||
const adjusted_count = @min(count, rem);
|
const adjusted_count = @min(count, rem);
|
||||||
|
|
||||||
// top is just the cursor position. insertLines starts at the cursor
|
// Create a new tracked pin which we'll use to navigate the page list
|
||||||
// so this is our top. We want to shift lines down, down to the bottom
|
// so that if we need to adjust capacity it will be properly tracked.
|
||||||
// of the scroll region.
|
var cur_p = self.screen.pages.trackPin(
|
||||||
const top = self.screen.cursor.page_pin.*;
|
self.screen.cursor.page_pin.down(rem - 1).?,
|
||||||
|
) catch |err| {
|
||||||
|
comptime assert(@TypeOf(err) == error{OutOfMemory});
|
||||||
|
|
||||||
// This is the amount of space at the bottom of the scroll region
|
// This error scenario means that our GPA is OOM. This is not a
|
||||||
// that will NOT be blank, so we need to shift the correct lines down.
|
// situation we can gracefully handle. We can't just ignore insertLines
|
||||||
// "scroll_amount" is the number of such lines.
|
// because it'll result in a corrupted screen. Ideally in the future
|
||||||
const scroll_amount = rem - adjusted_count;
|
// we flag the state as broken and show an error message to the user.
|
||||||
if (scroll_amount > 0) {
|
// For now, we panic.
|
||||||
// If we have left/right scroll margins we have a slower path.
|
log.err("insertLines trackPin error err={}", .{err});
|
||||||
const left_right = self.scrolling_region.left > 0 or
|
@panic("insertLines trackPin OOM");
|
||||||
self.scrolling_region.right < self.cols - 1;
|
};
|
||||||
|
defer self.screen.pages.untrackPin(cur_p);
|
||||||
|
|
||||||
const bot = top.down(scroll_amount - 1).?;
|
// Our current y position relative to the cursor
|
||||||
var it = bot.rowIterator(.left_up, top);
|
var y: usize = rem;
|
||||||
while (it.next()) |p| {
|
|
||||||
const dst_p = p.down(adjusted_count).?;
|
|
||||||
const src_rac = p.rowAndCell();
|
|
||||||
const dst_rac = dst_p.rowAndCell();
|
|
||||||
const src: *Row = src_rac.row;
|
|
||||||
const dst: *Row = dst_rac.row;
|
|
||||||
|
|
||||||
self.rowWillBeShifted(&p.page.data, src);
|
// Traverse from the bottom up
|
||||||
self.rowWillBeShifted(&dst_p.page.data, dst);
|
while (y > 0) {
|
||||||
|
const cur_rac = cur_p.rowAndCell();
|
||||||
|
const cur_row: *Row = cur_rac.row;
|
||||||
|
|
||||||
// Mark both our src/dst as dirty
|
// Mark the row as dirty
|
||||||
p.markDirty();
|
cur_p.markDirty();
|
||||||
dst_p.markDirty();
|
|
||||||
|
// If this is one of the lines we need to shift, do so
|
||||||
|
if (y > adjusted_count) {
|
||||||
|
const off_p = cur_p.up(adjusted_count).?;
|
||||||
|
const off_rac = off_p.rowAndCell();
|
||||||
|
const off_row: *Row = off_rac.row;
|
||||||
|
|
||||||
|
self.rowWillBeShifted(&cur_p.page.data, cur_row);
|
||||||
|
self.rowWillBeShifted(&off_p.page.data, off_row);
|
||||||
|
|
||||||
// If our scrolling region is full width, then we unset wrap.
|
// If our scrolling region is full width, then we unset wrap.
|
||||||
if (!left_right) {
|
if (!left_right) {
|
||||||
dst.wrap = false;
|
off_row.wrap = false;
|
||||||
src.wrap = false;
|
cur_row.wrap = false;
|
||||||
dst.wrap_continuation = false;
|
off_row.wrap_continuation = false;
|
||||||
src.wrap_continuation = false;
|
cur_row.wrap_continuation = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const src_p = off_p;
|
||||||
|
const src_row = off_row;
|
||||||
|
const dst_p = cur_p;
|
||||||
|
const dst_row = cur_row;
|
||||||
|
|
||||||
// If our page doesn't match, then we need to do a copy from
|
// If our page doesn't match, then we need to do a copy from
|
||||||
// one page to another. This is the slow path.
|
// one page to another. This is the slow path.
|
||||||
if (dst_p.page != p.page) {
|
if (src_p.page != dst_p.page) {
|
||||||
dst_p.page.data.clonePartialRowFrom(
|
dst_p.page.data.clonePartialRowFrom(
|
||||||
&p.page.data,
|
&src_p.page.data,
|
||||||
dst,
|
dst_row,
|
||||||
src,
|
src_row,
|
||||||
self.scrolling_region.left,
|
self.scrolling_region.left,
|
||||||
self.scrolling_region.right + 1,
|
self.scrolling_region.right + 1,
|
||||||
) catch |err| {
|
) catch |err| {
|
||||||
std.log.warn("TODO: insertLines handle clone error err={}", .{err});
|
const cap = dst_p.page.data.capacity;
|
||||||
@panic("TODO");
|
// Adjust our page capacity to make
|
||||||
|
// room for we didn't have space for
|
||||||
|
_ = self.screen.adjustCapacity(
|
||||||
|
dst_p.page,
|
||||||
|
switch (err) {
|
||||||
|
// Rehash the sets
|
||||||
|
error.StyleSetNeedsRehash,
|
||||||
|
error.HyperlinkSetNeedsRehash,
|
||||||
|
=> .{},
|
||||||
|
|
||||||
|
// Increase style memory
|
||||||
|
error.StyleSetOutOfMemory,
|
||||||
|
=> .{ .styles = cap.styles * 2 },
|
||||||
|
|
||||||
|
// Increase string memory
|
||||||
|
error.StringAllocOutOfMemory,
|
||||||
|
=> .{ .string_bytes = cap.string_bytes * 2 },
|
||||||
|
|
||||||
|
// Increase hyperlink memory
|
||||||
|
error.HyperlinkSetOutOfMemory,
|
||||||
|
error.HyperlinkMapOutOfMemory,
|
||||||
|
=> .{ .hyperlink_bytes = cap.hyperlink_bytes * 2 },
|
||||||
|
|
||||||
|
// Increase grapheme memory
|
||||||
|
error.GraphemeMapOutOfMemory,
|
||||||
|
error.GraphemeAllocOutOfMemory,
|
||||||
|
=> .{ .grapheme_bytes = cap.grapheme_bytes * 2 },
|
||||||
|
},
|
||||||
|
) catch |e| switch (e) {
|
||||||
|
// This shouldn't be possible because above we're only
|
||||||
|
// adjusting capacity _upwards_. So it should have all
|
||||||
|
// the existing capacity it had to fit the adjusted
|
||||||
|
// data. Panic since we don't expect this.
|
||||||
|
error.StyleSetOutOfMemory,
|
||||||
|
error.StyleSetNeedsRehash,
|
||||||
|
error.StringAllocOutOfMemory,
|
||||||
|
error.HyperlinkSetOutOfMemory,
|
||||||
|
error.HyperlinkSetNeedsRehash,
|
||||||
|
error.HyperlinkMapOutOfMemory,
|
||||||
|
error.GraphemeMapOutOfMemory,
|
||||||
|
error.GraphemeAllocOutOfMemory,
|
||||||
|
=> @panic("adjustCapacity resulted in capacity errors"),
|
||||||
|
|
||||||
|
// The system allocator is OOM. We can't currently do
|
||||||
|
// anything graceful here. We panic.
|
||||||
|
error.OutOfMemory,
|
||||||
|
=> @panic("adjustCapacity system allocator OOM"),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Continue the loop to try handling this row again.
|
||||||
|
continue;
|
||||||
};
|
};
|
||||||
|
} else {
|
||||||
|
if (!left_right) {
|
||||||
|
// Swap the src/dst cells. This ensures that our dst gets the
|
||||||
|
// proper shifted rows and src gets non-garbage cell data that
|
||||||
|
// we can clear.
|
||||||
|
const dst = dst_row.*;
|
||||||
|
dst_row.* = src_row.*;
|
||||||
|
src_row.* = dst;
|
||||||
|
|
||||||
continue;
|
// Ensure what we did didn't corrupt the page
|
||||||
|
cur_p.page.data.assertIntegrity();
|
||||||
|
} else {
|
||||||
|
// Left/right scroll margins we have to
|
||||||
|
// copy cells, which is much slower...
|
||||||
|
const page = &cur_p.page.data;
|
||||||
|
page.moveCells(
|
||||||
|
src_row,
|
||||||
|
self.scrolling_region.left,
|
||||||
|
dst_row,
|
||||||
|
self.scrolling_region.left,
|
||||||
|
(self.scrolling_region.right - self.scrolling_region.left) + 1,
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
if (!left_right) {
|
// Clear the cells for this row, it has been shifted.
|
||||||
// Swap the src/dst cells. This ensures that our dst gets the proper
|
const page = &cur_p.page.data;
|
||||||
// shifted rows and src gets non-garbage cell data that we can clear.
|
const cells = page.getCells(cur_row);
|
||||||
const dst_row = dst.*;
|
self.screen.clearCells(
|
||||||
dst.* = src.*;
|
page,
|
||||||
src.* = dst_row;
|
cur_row,
|
||||||
|
cells[self.scrolling_region.left .. self.scrolling_region.right + 1],
|
||||||
// Ensure what we did didn't corrupt the page
|
|
||||||
p.page.data.assertIntegrity();
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Left/right scroll margins we have to copy cells, which is much slower...
|
|
||||||
const page = &p.page.data;
|
|
||||||
page.moveCells(
|
|
||||||
src,
|
|
||||||
self.scrolling_region.left,
|
|
||||||
dst,
|
|
||||||
self.scrolling_region.left,
|
|
||||||
(self.scrolling_region.right - self.scrolling_region.left) + 1,
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// The operations above can prune our cursor style so we need to
|
// We have successfully processed a line.
|
||||||
// update. This should never fail because the above can only FREE
|
y -= 1;
|
||||||
// memory.
|
// Move our pin up to the next row.
|
||||||
self.screen.manualStyleUpdate() catch |err| {
|
if (cur_p.up(1)) |p| cur_p.* = p;
|
||||||
std.log.warn("deleteLines manualStyleUpdate err={}", .{err});
|
|
||||||
self.screen.cursor.style = .{};
|
|
||||||
self.screen.manualStyleUpdate() catch unreachable;
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Inserted lines should keep our bg color
|
|
||||||
const bot = top.down(adjusted_count - 1).?;
|
|
||||||
var it = top.rowIterator(.right_down, bot);
|
|
||||||
while (it.next()) |p| {
|
|
||||||
const row: *Row = p.rowAndCell().row;
|
|
||||||
|
|
||||||
// This row is now dirty
|
|
||||||
p.markDirty();
|
|
||||||
|
|
||||||
// Clear the src row.
|
|
||||||
const page = &p.page.data;
|
|
||||||
const cells = page.getCells(row);
|
|
||||||
const cells_write = cells[self.scrolling_region.left .. self.scrolling_region.right + 1];
|
|
||||||
self.screen.clearCells(page, row, cells_write);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Move the cursor to the left margin. But importantly this also
|
|
||||||
// forces screen.cursor.page_cell to reload because the rows above
|
|
||||||
// shifted cell ofsets so this will ensure the cursor is pointing
|
|
||||||
// to the correct cell.
|
|
||||||
self.screen.cursorAbsolute(
|
|
||||||
self.scrolling_region.left,
|
|
||||||
self.screen.cursor.y,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Always unset pending wrap
|
|
||||||
self.screen.cursor.pending_wrap = false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Removes amount lines from the current cursor row down. The remaining lines
|
/// Removes amount lines from the current cursor row down. The remaining lines
|
||||||
@ -1575,9 +1631,9 @@ pub fn insertLines(self: *Terminal, count: usize) void {
|
|||||||
/// cleared space is colored according to the current SGR state.
|
/// cleared space is colored according to the current SGR state.
|
||||||
///
|
///
|
||||||
/// Moves the cursor to the left margin.
|
/// Moves the cursor to the left margin.
|
||||||
pub fn deleteLines(self: *Terminal, count_req: usize) void {
|
pub fn deleteLines(self: *Terminal, count: usize) void {
|
||||||
// Rare, but happens
|
// Rare, but happens
|
||||||
if (count_req == 0) return;
|
if (count == 0) return;
|
||||||
|
|
||||||
// If the cursor is outside the scroll region we do nothing.
|
// If the cursor is outside the scroll region we do nothing.
|
||||||
if (self.screen.cursor.y < self.scrolling_region.top or
|
if (self.screen.cursor.y < self.scrolling_region.top or
|
||||||
@ -1588,125 +1644,168 @@ pub fn deleteLines(self: *Terminal, count_req: usize) void {
|
|||||||
// Scrolling dirties the images because it updates their placements pins.
|
// Scrolling dirties the images because it updates their placements pins.
|
||||||
self.screen.kitty_images.dirty = true;
|
self.screen.kitty_images.dirty = true;
|
||||||
|
|
||||||
// top is just the cursor position. insertLines starts at the cursor
|
// At the end we need to return the cursor to the row it started on.
|
||||||
// so this is our top. We want to shift lines down, down to the bottom
|
const start_y = self.screen.cursor.y;
|
||||||
// of the scroll region.
|
defer {
|
||||||
const top = self.screen.cursor.page_pin.*;
|
self.screen.cursorAbsolute(self.scrolling_region.left, start_y);
|
||||||
|
// Always unset pending wrap
|
||||||
|
self.screen.cursor.pending_wrap = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// We have a slower path if we have left or right scroll margins.
|
||||||
|
const left_right = self.scrolling_region.left > 0 or
|
||||||
|
self.scrolling_region.right < self.cols - 1;
|
||||||
|
|
||||||
// Remaining rows from our cursor to the bottom of the scroll region.
|
// Remaining rows from our cursor to the bottom of the scroll region.
|
||||||
const rem = self.scrolling_region.bottom - self.screen.cursor.y + 1;
|
const rem = self.scrolling_region.bottom - self.screen.cursor.y + 1;
|
||||||
|
|
||||||
// The maximum we can delete is the remaining lines in the scroll region.
|
// We can only insert lines up to our remaining lines in the scroll
|
||||||
const count = @min(count_req, rem);
|
// region. So we take whichever is smaller.
|
||||||
|
const adjusted_count = @min(count, rem);
|
||||||
|
|
||||||
// This is the amount of space at the bottom of the scroll region
|
// Create a new tracked pin which we'll use to navigate the page list
|
||||||
// that will NOT be blank, so we need to shift the correct lines down.
|
// so that if we need to adjust capacity it will be properly tracked.
|
||||||
// "scroll_amount" is the number of such lines.
|
var cur_p = self.screen.pages.trackPin(
|
||||||
const scroll_amount = rem - count;
|
self.screen.cursor.page_pin.*,
|
||||||
if (scroll_amount > 0) {
|
) catch |err| {
|
||||||
// If we have left/right scroll margins we have a slower path.
|
// See insertLines
|
||||||
const left_right = self.scrolling_region.left > 0 or
|
comptime assert(@TypeOf(err) == error{OutOfMemory});
|
||||||
self.scrolling_region.right < self.cols - 1;
|
log.err("deleteLines trackPin error err={}", .{err});
|
||||||
|
@panic("deleteLines trackPin OOM");
|
||||||
|
};
|
||||||
|
defer self.screen.pages.untrackPin(cur_p);
|
||||||
|
|
||||||
const bot = top.down(scroll_amount - 1).?;
|
// Our current y position relative to the cursor
|
||||||
var it = top.rowIterator(.right_down, bot);
|
var y: usize = 0;
|
||||||
while (it.next()) |p| {
|
|
||||||
const src_p = p.down(count).?;
|
|
||||||
const src_rac = src_p.rowAndCell();
|
|
||||||
const dst_rac = p.rowAndCell();
|
|
||||||
const src: *Row = src_rac.row;
|
|
||||||
const dst: *Row = dst_rac.row;
|
|
||||||
|
|
||||||
// Mark both our src/dst as dirty
|
// Traverse from the top down
|
||||||
p.markDirty();
|
while (y < rem) {
|
||||||
src_p.markDirty();
|
const cur_rac = cur_p.rowAndCell();
|
||||||
|
const cur_row: *Row = cur_rac.row;
|
||||||
|
|
||||||
self.rowWillBeShifted(&src_p.page.data, src);
|
// Mark the row as dirty
|
||||||
self.rowWillBeShifted(&p.page.data, dst);
|
cur_p.markDirty();
|
||||||
|
|
||||||
|
// If this is one of the lines we need to shift, do so
|
||||||
|
if (y < rem - adjusted_count) {
|
||||||
|
const off_p = cur_p.down(adjusted_count).?;
|
||||||
|
const off_rac = off_p.rowAndCell();
|
||||||
|
const off_row: *Row = off_rac.row;
|
||||||
|
|
||||||
|
self.rowWillBeShifted(&cur_p.page.data, cur_row);
|
||||||
|
self.rowWillBeShifted(&off_p.page.data, off_row);
|
||||||
|
|
||||||
// If our scrolling region is full width, then we unset wrap.
|
// If our scrolling region is full width, then we unset wrap.
|
||||||
if (!left_right) {
|
if (!left_right) {
|
||||||
dst.wrap = false;
|
off_row.wrap = false;
|
||||||
src.wrap = false;
|
cur_row.wrap = false;
|
||||||
dst.wrap_continuation = false;
|
off_row.wrap_continuation = false;
|
||||||
src.wrap_continuation = false;
|
cur_row.wrap_continuation = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (src_p.page != p.page) {
|
const src_p = off_p;
|
||||||
p.page.data.clonePartialRowFrom(
|
const src_row = off_row;
|
||||||
|
const dst_p = cur_p;
|
||||||
|
const dst_row = cur_row;
|
||||||
|
|
||||||
|
// If our page doesn't match, then we need to do a copy from
|
||||||
|
// one page to another. This is the slow path.
|
||||||
|
if (src_p.page != dst_p.page) {
|
||||||
|
dst_p.page.data.clonePartialRowFrom(
|
||||||
&src_p.page.data,
|
&src_p.page.data,
|
||||||
dst,
|
dst_row,
|
||||||
src,
|
src_row,
|
||||||
self.scrolling_region.left,
|
self.scrolling_region.left,
|
||||||
self.scrolling_region.right + 1,
|
self.scrolling_region.right + 1,
|
||||||
) catch |err| {
|
) catch |err| {
|
||||||
std.log.warn("TODO: deleteLines handle clone error err={}", .{err});
|
const cap = dst_p.page.data.capacity;
|
||||||
@panic("TODO");
|
// Adjust our page capacity to make
|
||||||
|
// room for we didn't have space for
|
||||||
|
_ = self.screen.adjustCapacity(
|
||||||
|
dst_p.page,
|
||||||
|
switch (err) {
|
||||||
|
// Rehash the sets
|
||||||
|
error.StyleSetNeedsRehash,
|
||||||
|
error.HyperlinkSetNeedsRehash,
|
||||||
|
=> .{},
|
||||||
|
|
||||||
|
// Increase style memory
|
||||||
|
error.StyleSetOutOfMemory,
|
||||||
|
=> .{ .styles = cap.styles * 2 },
|
||||||
|
|
||||||
|
// Increase string memory
|
||||||
|
error.StringAllocOutOfMemory,
|
||||||
|
=> .{ .string_bytes = cap.string_bytes * 2 },
|
||||||
|
|
||||||
|
// Increase hyperlink memory
|
||||||
|
error.HyperlinkSetOutOfMemory,
|
||||||
|
error.HyperlinkMapOutOfMemory,
|
||||||
|
=> .{ .hyperlink_bytes = cap.hyperlink_bytes * 2 },
|
||||||
|
|
||||||
|
// Increase grapheme memory
|
||||||
|
error.GraphemeMapOutOfMemory,
|
||||||
|
error.GraphemeAllocOutOfMemory,
|
||||||
|
=> .{ .grapheme_bytes = cap.grapheme_bytes * 2 },
|
||||||
|
},
|
||||||
|
) catch |e| switch (e) {
|
||||||
|
// See insertLines which has the same error capture.
|
||||||
|
error.StyleSetOutOfMemory,
|
||||||
|
error.StyleSetNeedsRehash,
|
||||||
|
error.StringAllocOutOfMemory,
|
||||||
|
error.HyperlinkSetOutOfMemory,
|
||||||
|
error.HyperlinkSetNeedsRehash,
|
||||||
|
error.HyperlinkMapOutOfMemory,
|
||||||
|
error.GraphemeMapOutOfMemory,
|
||||||
|
error.GraphemeAllocOutOfMemory,
|
||||||
|
=> @panic("adjustCapacity resulted in capacity errors"),
|
||||||
|
|
||||||
|
error.OutOfMemory,
|
||||||
|
=> @panic("adjustCapacity system allocator OOM"),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Continue the loop to try handling this row again.
|
||||||
|
continue;
|
||||||
};
|
};
|
||||||
|
} else {
|
||||||
|
if (!left_right) {
|
||||||
|
// Swap the src/dst cells. This ensures that our dst gets the
|
||||||
|
// proper shifted rows and src gets non-garbage cell data that
|
||||||
|
// we can clear.
|
||||||
|
const dst = dst_row.*;
|
||||||
|
dst_row.* = src_row.*;
|
||||||
|
src_row.* = dst;
|
||||||
|
|
||||||
continue;
|
// Ensure what we did didn't corrupt the page
|
||||||
|
cur_p.page.data.assertIntegrity();
|
||||||
|
} else {
|
||||||
|
// Left/right scroll margins we have to
|
||||||
|
// copy cells, which is much slower...
|
||||||
|
const page = &cur_p.page.data;
|
||||||
|
page.moveCells(
|
||||||
|
src_row,
|
||||||
|
self.scrolling_region.left,
|
||||||
|
dst_row,
|
||||||
|
self.scrolling_region.left,
|
||||||
|
(self.scrolling_region.right - self.scrolling_region.left) + 1,
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
if (!left_right) {
|
// Clear the cells for this row, it's from out of bounds.
|
||||||
// Swap the src/dst cells. This ensures that our dst gets the proper
|
const page = &cur_p.page.data;
|
||||||
// shifted rows and src gets non-garbage cell data that we can clear.
|
const cells = page.getCells(cur_row);
|
||||||
const dst_row = dst.*;
|
self.screen.clearCells(
|
||||||
dst.* = src.*;
|
page,
|
||||||
src.* = dst_row;
|
cur_row,
|
||||||
|
cells[self.scrolling_region.left .. self.scrolling_region.right + 1],
|
||||||
// Ensure what we did didn't corrupt the page
|
|
||||||
p.page.data.assertIntegrity();
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Left/right scroll margins we have to copy cells, which is much slower...
|
|
||||||
const page = &p.page.data;
|
|
||||||
page.moveCells(
|
|
||||||
src,
|
|
||||||
self.scrolling_region.left,
|
|
||||||
dst,
|
|
||||||
self.scrolling_region.left,
|
|
||||||
(self.scrolling_region.right - self.scrolling_region.left) + 1,
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// The operations above can prune our cursor style so we need to
|
// We have successfully processed a line.
|
||||||
// update. This should never fail because the above can only FREE
|
y += 1;
|
||||||
// memory.
|
// Move our pin down to the next row.
|
||||||
self.screen.manualStyleUpdate() catch |err| {
|
if (cur_p.down(1)) |p| cur_p.* = p;
|
||||||
std.log.warn("deleteLines manualStyleUpdate err={}", .{err});
|
|
||||||
self.screen.cursor.style = .{};
|
|
||||||
self.screen.manualStyleUpdate() catch unreachable;
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const clear_top = top.down(scroll_amount).?;
|
|
||||||
const bot = top.down(rem - 1).?;
|
|
||||||
var it = clear_top.rowIterator(.right_down, bot);
|
|
||||||
while (it.next()) |p| {
|
|
||||||
const row: *Row = p.rowAndCell().row;
|
|
||||||
|
|
||||||
// This row is now dirty
|
|
||||||
p.markDirty();
|
|
||||||
|
|
||||||
// Clear the src row.
|
|
||||||
const page = &p.page.data;
|
|
||||||
const cells = page.getCells(row);
|
|
||||||
const cells_write = cells[self.scrolling_region.left .. self.scrolling_region.right + 1];
|
|
||||||
self.screen.clearCells(page, row, cells_write);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Move the cursor to the left margin. But importantly this also
|
|
||||||
// forces screen.cursor.page_cell to reload because the rows above
|
|
||||||
// shifted cell ofsets so this will ensure the cursor is pointing
|
|
||||||
// to the correct cell.
|
|
||||||
self.screen.cursorAbsolute(
|
|
||||||
self.scrolling_region.left,
|
|
||||||
self.screen.cursor.y,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Always unset pending wrap
|
|
||||||
self.screen.cursor.pending_wrap = false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Inserts spaces at current cursor position moving existing cell contents
|
/// Inserts spaces at current cursor position moving existing cell contents
|
||||||
|
@ -611,7 +611,27 @@ pub const Page = struct {
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const CloneFromError = Allocator.Error || style.Set.AddError;
|
pub const StyleSetError = error{
|
||||||
|
StyleSetOutOfMemory,
|
||||||
|
StyleSetNeedsRehash,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const HyperlinkError = error{
|
||||||
|
StringAllocOutOfMemory,
|
||||||
|
HyperlinkSetOutOfMemory,
|
||||||
|
HyperlinkSetNeedsRehash,
|
||||||
|
HyperlinkMapOutOfMemory,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const GraphemeError = error{
|
||||||
|
GraphemeMapOutOfMemory,
|
||||||
|
GraphemeAllocOutOfMemory,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const CloneFromError =
|
||||||
|
StyleSetError ||
|
||||||
|
HyperlinkError ||
|
||||||
|
GraphemeError;
|
||||||
|
|
||||||
/// Clone the contents of another page into this page. The capacities
|
/// Clone the contents of another page into this page. The capacities
|
||||||
/// can be different, but the size of the other page must fit into
|
/// can be different, but the size of the other page must fit into
|
||||||
@ -688,7 +708,7 @@ pub const Page = struct {
|
|||||||
const cells = dst_row.cells.ptr(self.memory)[x_start..x_end];
|
const cells = dst_row.cells.ptr(self.memory)[x_start..x_end];
|
||||||
|
|
||||||
// If our destination has styles or graphemes then we need to
|
// If our destination has styles or graphemes then we need to
|
||||||
// clear some state.
|
// clear some state. This will free up the managed memory as well.
|
||||||
if (dst_row.managedMemory()) self.clearCells(dst_row, x_start, x_end);
|
if (dst_row.managedMemory()) self.clearCells(dst_row, x_start, x_end);
|
||||||
|
|
||||||
// Copy all the row metadata but keep our cells offset
|
// Copy all the row metadata but keep our cells offset
|
||||||
@ -731,6 +751,16 @@ pub const Page = struct {
|
|||||||
// get all of that right.
|
// get all of that right.
|
||||||
for (cells, other_cells) |*dst_cell, *src_cell| {
|
for (cells, other_cells) |*dst_cell, *src_cell| {
|
||||||
dst_cell.* = src_cell.*;
|
dst_cell.* = src_cell.*;
|
||||||
|
|
||||||
|
// Reset any managed memory markers on the cell so that we don't
|
||||||
|
// hit an integrity check if we have to return an error because
|
||||||
|
// the page can't fit the new memory.
|
||||||
|
dst_cell.hyperlink = false;
|
||||||
|
dst_cell.style_id = style.default_id;
|
||||||
|
if (dst_cell.content_tag == .codepoint_grapheme) {
|
||||||
|
dst_cell.content_tag = .codepoint;
|
||||||
|
}
|
||||||
|
|
||||||
if (src_cell.hasGrapheme()) {
|
if (src_cell.hasGrapheme()) {
|
||||||
// To prevent integrity checks flipping. This will
|
// To prevent integrity checks flipping. This will
|
||||||
// get fixed up when we check the style id below.
|
// get fixed up when we check the style id below.
|
||||||
@ -738,13 +768,14 @@ pub const Page = struct {
|
|||||||
dst_cell.style_id = style.default_id;
|
dst_cell.style_id = style.default_id;
|
||||||
}
|
}
|
||||||
|
|
||||||
dst_cell.content_tag = .codepoint; // required for appendGrapheme
|
// Copy the grapheme codepoints
|
||||||
const cps = other.lookupGrapheme(src_cell).?;
|
const cps = other.lookupGrapheme(src_cell).?;
|
||||||
for (cps) |cp| try self.appendGrapheme(dst_row, dst_cell, cp);
|
|
||||||
|
// Safe to use setGraphemes because we cleared all
|
||||||
|
// managed memory for our destination cell range.
|
||||||
|
try self.setGraphemes(dst_row, dst_cell, cps);
|
||||||
}
|
}
|
||||||
if (src_cell.hyperlink) hyperlink: {
|
if (src_cell.hyperlink) hyperlink: {
|
||||||
dst_row.hyperlink = true;
|
|
||||||
|
|
||||||
const id = other.lookupHyperlink(src_cell).?;
|
const id = other.lookupHyperlink(src_cell).?;
|
||||||
|
|
||||||
// Fast-path: same page we can add with the same id.
|
// Fast-path: same page we can add with the same id.
|
||||||
@ -757,55 +788,61 @@ pub const Page = struct {
|
|||||||
// Slow-path: get the hyperlink from the other page,
|
// Slow-path: get the hyperlink from the other page,
|
||||||
// add it, and migrate.
|
// add it, and migrate.
|
||||||
|
|
||||||
const dst_link = dst_link: {
|
// If our page can't support an additional cell with
|
||||||
// Fast path is we just dupe the hyperlink because
|
// a hyperlink then we have to return an error.
|
||||||
// it doesn't require traversal through the hyperlink
|
if (self.hyperlinkCount() >= self.hyperlinkCapacity() - 1) {
|
||||||
// map. If the add below already contains it then it'll
|
// The hyperlink map capacity needs to be increased.
|
||||||
// call the deleted context callback and we'll free
|
return error.HyperlinkMapOutOfMemory;
|
||||||
// this back.
|
}
|
||||||
const other_link = other.hyperlink_set.get(other.memory, id);
|
|
||||||
if (other_link.dupe(other, self)) |dst_link| {
|
const other_link = other.hyperlink_set.get(other.memory, id);
|
||||||
break :dst_link dst_link;
|
const dst_id = dst_id: {
|
||||||
} else |err| switch (err) {
|
// First check if the link already exists in our page,
|
||||||
// If this happens, the only possible valid outcome is
|
// and increment its refcount if so, since we're about
|
||||||
// that it is because this link is already in our set
|
// to use it.
|
||||||
// and our memory is full because we already have it.
|
if (self.hyperlink_set.lookupContext(
|
||||||
// Any other outcome is an integrity violation.
|
self.memory,
|
||||||
error.OutOfMemory => {},
|
other_link.*,
|
||||||
|
|
||||||
|
// `lookupContext` uses the context for hashing, and
|
||||||
|
// that doesn't write to the page, so this constCast
|
||||||
|
// is completely safe.
|
||||||
|
.{ .page = @constCast(other) },
|
||||||
|
)) |i| {
|
||||||
|
self.hyperlink_set.use(self.memory, i);
|
||||||
|
break :dst_id i;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Slow, the only way to really find our link is to
|
// If we don't have this link in our page yet then
|
||||||
// traverse over the map, which includes dupes...
|
// we need to clone it over and add it to our set.
|
||||||
const dst_map = self.hyperlink_map.map(self.memory);
|
|
||||||
var it = dst_map.valueIterator();
|
|
||||||
while (it.next()) |existing_id| {
|
|
||||||
const existing_link = self.hyperlink_set.get(
|
|
||||||
self.memory,
|
|
||||||
existing_id.*,
|
|
||||||
);
|
|
||||||
|
|
||||||
if (existing_link.eql(
|
// Clone the link.
|
||||||
self.memory,
|
const dst_link = other_link.dupe(other, self) catch |e| {
|
||||||
other_link,
|
comptime assert(@TypeOf(e) == error{OutOfMemory});
|
||||||
other.memory,
|
// The string alloc capacity needs to be increased.
|
||||||
)) {
|
return error.StringAllocOutOfMemory;
|
||||||
break :dst_link existing_link.*;
|
};
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// There is no other valid scenario where we don't
|
// Add it, preferring to use the same ID as the other
|
||||||
// have the memory to dupe a hyperlink since we allocate
|
// page, since this *probably* speeds up full-page
|
||||||
// cloned pages with enough capacity to contain their
|
// clones.
|
||||||
// contents.
|
//
|
||||||
unreachable;
|
// TODO(qwerasd): verify the assumption that `addWithId`
|
||||||
|
// is ever actually useful, I think it may not be.
|
||||||
|
break :dst_id self.hyperlink_set.addWithIdContext(
|
||||||
|
self.memory,
|
||||||
|
dst_link,
|
||||||
|
id,
|
||||||
|
.{ .page = self },
|
||||||
|
) catch |e| switch (e) {
|
||||||
|
// The hyperlink set capacity needs to be increased.
|
||||||
|
error.OutOfMemory => return error.HyperlinkSetOutOfMemory,
|
||||||
|
|
||||||
|
// The hyperlink set needs to be rehashed.
|
||||||
|
error.NeedsRehash => return error.HyperlinkSetNeedsRehash,
|
||||||
|
} orelse id;
|
||||||
};
|
};
|
||||||
|
|
||||||
const dst_id = try self.hyperlink_set.addWithIdContext(
|
|
||||||
self.memory,
|
|
||||||
dst_link,
|
|
||||||
id,
|
|
||||||
.{ .page = self },
|
|
||||||
) orelse id;
|
|
||||||
try self.setHyperlink(dst_row, dst_cell, dst_id);
|
try self.setHyperlink(dst_row, dst_cell, dst_id);
|
||||||
}
|
}
|
||||||
if (src_cell.style_id != style.default_id) style: {
|
if (src_cell.style_id != style.default_id) style: {
|
||||||
@ -822,11 +859,17 @@ pub const Page = struct {
|
|||||||
// Slow path: Get the style from the other
|
// Slow path: Get the style from the other
|
||||||
// page and add it to this page's style set.
|
// page and add it to this page's style set.
|
||||||
const other_style = other.styles.get(other.memory, src_cell.style_id);
|
const other_style = other.styles.get(other.memory, src_cell.style_id);
|
||||||
dst_cell.style_id = try self.styles.addWithId(
|
dst_cell.style_id = self.styles.addWithId(
|
||||||
self.memory,
|
self.memory,
|
||||||
other_style.*,
|
other_style.*,
|
||||||
src_cell.style_id,
|
src_cell.style_id,
|
||||||
) orelse src_cell.style_id;
|
) catch |e| switch (e) {
|
||||||
|
// The style set capacity needs to be increased.
|
||||||
|
error.OutOfMemory => return error.StyleSetOutOfMemory,
|
||||||
|
|
||||||
|
// The style set needs to be rehashed.
|
||||||
|
error.NeedsRehash => return error.StyleSetNeedsRehash,
|
||||||
|
} orelse src_cell.style_id;
|
||||||
}
|
}
|
||||||
if (src_cell.codepoint() == kitty.graphics.unicode.placeholder) {
|
if (src_cell.codepoint() == kitty.graphics.unicode.placeholder) {
|
||||||
dst_row.kitty_virtual_placeholder = true;
|
dst_row.kitty_virtual_placeholder = true;
|
||||||
@ -1090,12 +1133,16 @@ pub const Page = struct {
|
|||||||
/// Caller is responsible for updating the refcount in the hyperlink
|
/// Caller is responsible for updating the refcount in the hyperlink
|
||||||
/// set as necessary by calling `use` if the id was not acquired with
|
/// set as necessary by calling `use` if the id was not acquired with
|
||||||
/// `add`.
|
/// `add`.
|
||||||
pub fn setHyperlink(self: *Page, row: *Row, cell: *Cell, id: hyperlink.Id) !void {
|
pub fn setHyperlink(self: *Page, row: *Row, cell: *Cell, id: hyperlink.Id) error{HyperlinkMapOutOfMemory}!void {
|
||||||
defer self.assertIntegrity();
|
defer self.assertIntegrity();
|
||||||
|
|
||||||
const cell_offset = getOffset(Cell, self.memory, cell);
|
const cell_offset = getOffset(Cell, self.memory, cell);
|
||||||
var map = self.hyperlink_map.map(self.memory);
|
var map = self.hyperlink_map.map(self.memory);
|
||||||
const gop = try map.getOrPut(cell_offset);
|
const gop = map.getOrPut(cell_offset) catch |e| {
|
||||||
|
comptime assert(@TypeOf(e) == error{OutOfMemory});
|
||||||
|
// The hyperlink map capacity needs to be increased.
|
||||||
|
return error.HyperlinkMapOutOfMemory;
|
||||||
|
};
|
||||||
|
|
||||||
if (gop.found_existing) {
|
if (gop.found_existing) {
|
||||||
// Always release the old hyperlink, because even if it's actually
|
// Always release the old hyperlink, because even if it's actually
|
||||||
@ -1160,7 +1207,7 @@ pub const Page = struct {
|
|||||||
|
|
||||||
/// Set the graphemes for the given cell. This asserts that the cell
|
/// Set the graphemes for the given cell. This asserts that the cell
|
||||||
/// has no graphemes set, and only contains a single codepoint.
|
/// has no graphemes set, and only contains a single codepoint.
|
||||||
pub fn setGraphemes(self: *Page, row: *Row, cell: *Cell, cps: []u21) Allocator.Error!void {
|
pub fn setGraphemes(self: *Page, row: *Row, cell: *Cell, cps: []u21) GraphemeError!void {
|
||||||
defer self.assertIntegrity();
|
defer self.assertIntegrity();
|
||||||
|
|
||||||
assert(cell.codepoint() > 0);
|
assert(cell.codepoint() > 0);
|
||||||
@ -1169,14 +1216,22 @@ pub const Page = struct {
|
|||||||
const cell_offset = getOffset(Cell, self.memory, cell);
|
const cell_offset = getOffset(Cell, self.memory, cell);
|
||||||
var map = self.grapheme_map.map(self.memory);
|
var map = self.grapheme_map.map(self.memory);
|
||||||
|
|
||||||
const slice = try self.grapheme_alloc.alloc(u21, self.memory, cps.len);
|
const slice = self.grapheme_alloc.alloc(u21, self.memory, cps.len) catch |e| {
|
||||||
|
comptime assert(@TypeOf(e) == error{OutOfMemory});
|
||||||
|
// The grapheme alloc capacity needs to be increased.
|
||||||
|
return error.GraphemeAllocOutOfMemory;
|
||||||
|
};
|
||||||
errdefer self.grapheme_alloc.free(self.memory, slice);
|
errdefer self.grapheme_alloc.free(self.memory, slice);
|
||||||
@memcpy(slice, cps);
|
@memcpy(slice, cps);
|
||||||
|
|
||||||
try map.putNoClobber(cell_offset, .{
|
map.putNoClobber(cell_offset, .{
|
||||||
.offset = getOffset(u21, self.memory, @ptrCast(slice.ptr)),
|
.offset = getOffset(u21, self.memory, @ptrCast(slice.ptr)),
|
||||||
.len = slice.len,
|
.len = slice.len,
|
||||||
});
|
}) catch |e| {
|
||||||
|
comptime assert(@TypeOf(e) == error{OutOfMemory});
|
||||||
|
// The grapheme map capacity needs to be increased.
|
||||||
|
return error.GraphemeMapOutOfMemory;
|
||||||
|
};
|
||||||
errdefer map.remove(cell_offset);
|
errdefer map.remove(cell_offset);
|
||||||
|
|
||||||
cell.content_tag = .codepoint_grapheme;
|
cell.content_tag = .codepoint_grapheme;
|
||||||
|
@ -227,7 +227,7 @@ pub fn RefCountedSet(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// If the item already exists, return it.
|
// If the item already exists, return it.
|
||||||
if (self.lookup(base, value, ctx)) |id| {
|
if (self.lookupContext(base, value, ctx)) |id| {
|
||||||
// Notify the context that the value is "deleted" because
|
// Notify the context that the value is "deleted" because
|
||||||
// we're reusing the existing value in the set. This allows
|
// we're reusing the existing value in the set. This allows
|
||||||
// callers to clean up any resources associated with the value.
|
// callers to clean up any resources associated with the value.
|
||||||
@ -453,7 +453,10 @@ pub fn RefCountedSet(
|
|||||||
|
|
||||||
/// Find an item in the table and return its ID.
|
/// Find an item in the table and return its ID.
|
||||||
/// If the item does not exist in the table, null is returned.
|
/// If the item does not exist in the table, null is returned.
|
||||||
fn lookup(self: *Self, base: anytype, value: T, ctx: Context) ?Id {
|
pub fn lookup(self: *const Self, base: anytype, value: T) ?Id {
|
||||||
|
return self.lookupContext(base, value, self.context);
|
||||||
|
}
|
||||||
|
pub fn lookupContext(self: *const Self, base: anytype, value: T, ctx: Context) ?Id {
|
||||||
const table = self.table.ptr(base);
|
const table = self.table.ptr(base);
|
||||||
const items = self.items.ptr(base);
|
const items = self.items.ptr(base);
|
||||||
|
|
||||||
@ -504,7 +507,7 @@ pub fn RefCountedSet(
|
|||||||
/// is ignored and the existing item's ID is returned.
|
/// is ignored and the existing item's ID is returned.
|
||||||
fn upsert(self: *Self, base: anytype, value: T, new_id: Id, ctx: Context) Id {
|
fn upsert(self: *Self, base: anytype, value: T, new_id: Id, ctx: Context) Id {
|
||||||
// If the item already exists, return it.
|
// If the item already exists, return it.
|
||||||
if (self.lookup(base, value, ctx)) |id| {
|
if (self.lookupContext(base, value, ctx)) |id| {
|
||||||
// Notify the context that the value is "deleted" because
|
// Notify the context that the value is "deleted" because
|
||||||
// we're reusing the existing value in the set. This allows
|
// we're reusing the existing value in the set. This allows
|
||||||
// callers to clean up any resources associated with the value.
|
// callers to clean up any resources associated with the value.
|
||||||
@ -519,7 +522,7 @@ pub fn RefCountedSet(
|
|||||||
/// Insert the given value into the hash table with the given ID.
|
/// Insert the given value into the hash table with the given ID.
|
||||||
/// asserts that the value is not already present in the table.
|
/// asserts that the value is not already present in the table.
|
||||||
fn insert(self: *Self, base: anytype, value: T, new_id: Id, ctx: Context) Id {
|
fn insert(self: *Self, base: anytype, value: T, new_id: Id, ctx: Context) Id {
|
||||||
assert(self.lookup(base, value, ctx) == null);
|
assert(self.lookupContext(base, value, ctx) == null);
|
||||||
|
|
||||||
const table = self.table.ptr(base);
|
const table = self.table.ptr(base);
|
||||||
const items = self.items.ptr(base);
|
const items = self.items.ptr(base);
|
||||||
|
Reference in New Issue
Block a user