Merge branch 'ghostty-org:main' into alt-keybindings-copy-and-paste
@ -1,7 +1,7 @@
|
||||
<!-- LOGO -->
|
||||
<h1>
|
||||
<p align="center">
|
||||
<img src="https://user-images.githubusercontent.com/1299/199110421-9ff5fc30-a244-441e-9882-26070662adf9.png" alt="Logo" width="100">
|
||||
<img src="https://github.com/user-attachments/assets/fe853809-ba8b-400b-83ab-a9a0da25be8a" alt="Logo" width="128">
|
||||
<br>Ghostty
|
||||
</h1>
|
||||
<p align="center">
|
||||
|
18
build.zig
@ -578,15 +578,15 @@ pub fn build(b: *std.Build) !void {
|
||||
|
||||
// Various icons that our application can use, including the icon
|
||||
// that will be used for the desktop.
|
||||
b.installFile("images/icons/icon_16x16.png", "share/icons/hicolor/16x16/apps/com.mitchellh.ghostty.png");
|
||||
b.installFile("images/icons/icon_32x32.png", "share/icons/hicolor/32x32/apps/com.mitchellh.ghostty.png");
|
||||
b.installFile("images/icons/icon_128x128.png", "share/icons/hicolor/128x128/apps/com.mitchellh.ghostty.png");
|
||||
b.installFile("images/icons/icon_256x256.png", "share/icons/hicolor/256x256/apps/com.mitchellh.ghostty.png");
|
||||
b.installFile("images/icons/icon_512x512.png", "share/icons/hicolor/512x512/apps/com.mitchellh.ghostty.png");
|
||||
b.installFile("images/icons/icon_16x16@2x@2x.png", "share/icons/hicolor/16x16@2/apps/com.mitchellh.ghostty.png");
|
||||
b.installFile("images/icons/icon_32x32@2x@2x.png", "share/icons/hicolor/32x32@2/apps/com.mitchellh.ghostty.png");
|
||||
b.installFile("images/icons/icon_128x128@2x@2x.png", "share/icons/hicolor/128x128@2/apps/com.mitchellh.ghostty.png");
|
||||
b.installFile("images/icons/icon_256x256@2x@2x.png", "share/icons/hicolor/256x256@2/apps/com.mitchellh.ghostty.png");
|
||||
b.installFile("images/icons/icon_16.png", "share/icons/hicolor/16x16/apps/com.mitchellh.ghostty.png");
|
||||
b.installFile("images/icons/icon_32.png", "share/icons/hicolor/32x32/apps/com.mitchellh.ghostty.png");
|
||||
b.installFile("images/icons/icon_128.png", "share/icons/hicolor/128x128/apps/com.mitchellh.ghostty.png");
|
||||
b.installFile("images/icons/icon_256.png", "share/icons/hicolor/256x256/apps/com.mitchellh.ghostty.png");
|
||||
b.installFile("images/icons/icon_512.png", "share/icons/hicolor/512x512/apps/com.mitchellh.ghostty.png");
|
||||
b.installFile("images/icons/icon_16@2x.png", "share/icons/hicolor/16x16@2/apps/com.mitchellh.ghostty.png");
|
||||
b.installFile("images/icons/icon_32@2x.png", "share/icons/hicolor/32x32@2/apps/com.mitchellh.ghostty.png");
|
||||
b.installFile("images/icons/icon_128@2x.png", "share/icons/hicolor/128x128@2/apps/com.mitchellh.ghostty.png");
|
||||
b.installFile("images/icons/icon_256@2x.png", "share/icons/hicolor/256x256@2/apps/com.mitchellh.ghostty.png");
|
||||
}
|
||||
|
||||
// libghostty (non-Darwin)
|
||||
|
@ -5,8 +5,8 @@
|
||||
.dependencies = .{
|
||||
// Zig libs
|
||||
.libxev = .{
|
||||
.url = "https://github.com/mitchellh/libxev/archive/b8d1d93e5c899b27abbaa7df23b496c3e6a178c7.tar.gz",
|
||||
.hash = "1220612bc023c21d75234882ec9a8c6a1cbd9d642da3dfb899297f14bb5bd7b6cd78",
|
||||
.url = "https://github.com/mitchellh/libxev/archive/db6a52bafadf00360e675fefa7926e8e6c0e9931.tar.gz",
|
||||
.hash = "12206029de146b685739f69b10a6f08baee86b3d0a5f9a659fa2b2b66c9602078bbf",
|
||||
},
|
||||
.mach_glfw = .{
|
||||
.url = "https://github.com/mitchellh/mach-glfw/archive/37c2995f31abcf7e8378fba68ddcf4a3faa02de0.tar.gz",
|
||||
|
BIN
dist/macos/Ghostty.icns
vendored
Executable file → Normal file
BIN
dist/windows/ghostty.ico
vendored
Before Width: | Height: | Size: 71 KiB After Width: | Height: | Size: 84 KiB |
14
flake.lock
generated
@ -20,27 +20,27 @@
|
||||
},
|
||||
"nixpkgs-stable": {
|
||||
"locked": {
|
||||
"lastModified": 1726062281,
|
||||
"narHash": "sha256-PyFVySdGj3enKqm8RQuo4v1KLJLmNLOq2yYOHsI6e2Q=",
|
||||
"lastModified": 1733423277,
|
||||
"narHash": "sha256-TxabjxEgkNbCGFRHgM/b9yZWlBj60gUOUnRT/wbVQR8=",
|
||||
"owner": "nixos",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "e65aa8301ba4f0ab8cb98f944c14aa9da07394f8",
|
||||
"rev": "e36963a147267afc055f7cf65225958633e536bf",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nixos",
|
||||
"ref": "release-24.05",
|
||||
"ref": "release-24.11",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs-unstable": {
|
||||
"locked": {
|
||||
"lastModified": 1719082008,
|
||||
"narHash": "sha256-jHJSUH619zBQ6WdC21fFAlDxHErKVDJ5fpN0Hgx4sjs=",
|
||||
"lastModified": 1733229606,
|
||||
"narHash": "sha256-FLYY5M0rpa5C2QAE3CKLYAM6TwbKicdRK6qNrSHlNrE=",
|
||||
"owner": "nixos",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "9693852a2070b398ee123a329e68f0dab5526681",
|
||||
"rev": "566e53c2ad750c84f6d31f9ccb9d00f823165550",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
@ -7,7 +7,7 @@
|
||||
# We want to stay as up to date as possible but need to be careful that the
|
||||
# glibc versions used by our dependencies from Nix are compatible with the
|
||||
# system glibc that the user is building for.
|
||||
nixpkgs-stable.url = "github:nixos/nixpkgs/release-24.05";
|
||||
nixpkgs-stable.url = "github:nixos/nixpkgs/release-24.11";
|
||||
|
||||
zig = {
|
||||
url = "github:mitchellh/zig-overlay";
|
||||
@ -36,7 +36,6 @@
|
||||
|
||||
packages.${system} = let
|
||||
mkArgs = optimize: {
|
||||
inherit (pkgs-unstable) zig_0_13 stdenv;
|
||||
inherit optimize;
|
||||
|
||||
revision = self.shortRev or self.dirtyShortRev or "dirty";
|
||||
|
BIN
images/icons/icon_1024.png
Normal file
After Width: | Height: | Size: 454 KiB |
BIN
images/icons/icon_128.png
Normal file
After Width: | Height: | Size: 15 KiB |
BIN
images/icons/icon_128@2x.png
Normal file
After Width: | Height: | Size: 67 KiB |
Before Width: | Height: | Size: 7.8 KiB |
Before Width: | Height: | Size: 18 KiB |
BIN
images/icons/icon_16.png
Normal file
After Width: | Height: | Size: 666 B |
BIN
images/icons/icon_16@2x.png
Normal file
After Width: | Height: | Size: 1.5 KiB |
Before Width: | Height: | Size: 649 B |
Before Width: | Height: | Size: 1.5 KiB |
BIN
images/icons/icon_256.png
Normal file
After Width: | Height: | Size: 67 KiB |
BIN
images/icons/icon_256@2x.png
Normal file
After Width: | Height: | Size: 216 KiB |
Before Width: | Height: | Size: 17 KiB |
Before Width: | Height: | Size: 40 KiB |
BIN
images/icons/icon_32.png
Normal file
After Width: | Height: | Size: 1.5 KiB |
BIN
images/icons/icon_32@2x.png
Normal file
After Width: | Height: | Size: 4.4 KiB |
Before Width: | Height: | Size: 1.5 KiB |
Before Width: | Height: | Size: 3.0 KiB |
BIN
images/icons/icon_512.png
Normal file
After Width: | Height: | Size: 216 KiB |
Before Width: | Height: | Size: 40 KiB |
Before Width: | Height: | Size: 93 KiB |
@ -1,67 +1,67 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "icon_512x512@2x@2x 1.png",
|
||||
"filename" : "macOS-AppIcon-1024px.png",
|
||||
"idiom" : "universal",
|
||||
"platform" : "ios",
|
||||
"size" : "1024x1024"
|
||||
},
|
||||
{
|
||||
"filename" : "icon_16x16.png",
|
||||
"filename" : "macOS-AppIcon-16px-16pt@1x.png",
|
||||
"idiom" : "mac",
|
||||
"scale" : "1x",
|
||||
"size" : "16x16"
|
||||
},
|
||||
{
|
||||
"filename" : "icon_16x16@2x@2x.png",
|
||||
"filename" : "macOS-AppIcon-32px-16pt@2x.png",
|
||||
"idiom" : "mac",
|
||||
"scale" : "2x",
|
||||
"size" : "16x16"
|
||||
},
|
||||
{
|
||||
"filename" : "icon_32x32.png",
|
||||
"filename" : "macOS-AppIcon-32px-32pt@1x.png",
|
||||
"idiom" : "mac",
|
||||
"scale" : "1x",
|
||||
"size" : "32x32"
|
||||
},
|
||||
{
|
||||
"filename" : "icon_32x32@2x@2x.png",
|
||||
"filename" : "macOS-AppIcon-64px-32pt@2x.png",
|
||||
"idiom" : "mac",
|
||||
"scale" : "2x",
|
||||
"size" : "32x32"
|
||||
},
|
||||
{
|
||||
"filename" : "icon_128x128.png",
|
||||
"filename" : "macOS-AppIcon-128px-128pt@1x.png",
|
||||
"idiom" : "mac",
|
||||
"scale" : "1x",
|
||||
"size" : "128x128"
|
||||
},
|
||||
{
|
||||
"filename" : "icon_128x128@2x@2x.png",
|
||||
"filename" : "macOS-AppIcon-256px-128pt@2x.png",
|
||||
"idiom" : "mac",
|
||||
"scale" : "2x",
|
||||
"size" : "128x128"
|
||||
},
|
||||
{
|
||||
"filename" : "icon_256x256.png",
|
||||
"filename" : "macOS-AppIcon-256px-128pt@2x 1.png",
|
||||
"idiom" : "mac",
|
||||
"scale" : "1x",
|
||||
"size" : "256x256"
|
||||
},
|
||||
{
|
||||
"filename" : "icon_256x256@2x@2x.png",
|
||||
"filename" : "macOS-AppIcon-512px-256pt@2x.png",
|
||||
"idiom" : "mac",
|
||||
"scale" : "2x",
|
||||
"size" : "256x256"
|
||||
},
|
||||
{
|
||||
"filename" : "icon_512x512.png",
|
||||
"filename" : "macOS-AppIcon-512px.png",
|
||||
"idiom" : "mac",
|
||||
"scale" : "1x",
|
||||
"size" : "512x512"
|
||||
},
|
||||
{
|
||||
"filename" : "icon_512x512@2x@2x.png",
|
||||
"filename" : "macOS-AppIcon-1024px 1.png",
|
||||
"idiom" : "mac",
|
||||
"scale" : "2x",
|
||||
"size" : "512x512"
|
||||
|
Before Width: | Height: | Size: 7.8 KiB |
Before Width: | Height: | Size: 18 KiB |
Before Width: | Height: | Size: 582 B |
Before Width: | Height: | Size: 1.5 KiB |
Before Width: | Height: | Size: 17 KiB |
Before Width: | Height: | Size: 40 KiB |
Before Width: | Height: | Size: 1.5 KiB |
Before Width: | Height: | Size: 3.0 KiB |
Before Width: | Height: | Size: 40 KiB |
Before Width: | Height: | Size: 93 KiB |
Before Width: | Height: | Size: 93 KiB |
After Width: | Height: | Size: 454 KiB |
After Width: | Height: | Size: 454 KiB |
After Width: | Height: | Size: 15 KiB |
After Width: | Height: | Size: 666 B |
After Width: | Height: | Size: 67 KiB |
After Width: | Height: | Size: 67 KiB |
After Width: | Height: | Size: 1.5 KiB |
After Width: | Height: | Size: 1.5 KiB |
After Width: | Height: | Size: 216 KiB |
BIN
macos/Assets.xcassets/AppIcon.appiconset/macOS-AppIcon-512px.png
Normal file
After Width: | Height: | Size: 216 KiB |
After Width: | Height: | Size: 4.4 KiB |
@ -1,17 +1,17 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "icon_128x128.png",
|
||||
"filename" : "macOS-AppIcon-256px-128pt@2x.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "1x"
|
||||
},
|
||||
{
|
||||
"filename" : "icon_128x128@2x@2x.png",
|
||||
"filename" : "macOS-AppIcon-512px.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "2x"
|
||||
},
|
||||
{
|
||||
"filename" : "icon_256x256@2x@2x.png",
|
||||
"filename" : "macOS-AppIcon-1024px.png",
|
||||
"idiom" : "universal",
|
||||
"scale" : "3x"
|
||||
}
|
||||
|
Before Width: | Height: | Size: 7.8 KiB |
Before Width: | Height: | Size: 18 KiB |
Before Width: | Height: | Size: 40 KiB |
BIN
macos/Assets.xcassets/AppIconImage.imageset/macOS-AppIcon-1024px.png
vendored
Normal file
After Width: | Height: | Size: 454 KiB |
BIN
macos/Assets.xcassets/AppIconImage.imageset/macOS-AppIcon-256px-128pt@2x.png
vendored
Normal file
After Width: | Height: | Size: 67 KiB |
BIN
macos/Assets.xcassets/AppIconImage.imageset/macOS-AppIcon-512px.png
vendored
Normal file
After Width: | Height: | Size: 216 KiB |
@ -429,12 +429,34 @@ extension Ghostty {
|
||||
/// will lose focus. There has to be some nice SwiftUI-native way to fix this but I can't
|
||||
/// figure it out so we're going to do this hacky thing to bring focus back to the terminal
|
||||
/// that should have it.
|
||||
static func moveFocus(to: SurfaceView, from: SurfaceView? = nil) {
|
||||
DispatchQueue.main.async {
|
||||
static func moveFocus(
|
||||
to: SurfaceView,
|
||||
from: SurfaceView? = nil,
|
||||
delay: TimeInterval? = nil
|
||||
) {
|
||||
// The whole delay machinery is a bit of a hack to work around a
|
||||
// situation where the window is destroyed and the surface view
|
||||
// will never be attached to a window. Realistically, we should
|
||||
// handle this upstream but we also don't want this function to be
|
||||
// a source of infinite loops.
|
||||
|
||||
// Our max delay before we give up
|
||||
let maxDelay: TimeInterval = 0.5
|
||||
guard (delay ?? 0) < maxDelay else { return }
|
||||
|
||||
// We start at a 50 millisecond delay and do a doubling backoff
|
||||
let nextDelay: TimeInterval = if let delay {
|
||||
delay * 2
|
||||
} else {
|
||||
// 100 milliseconds
|
||||
0.05
|
||||
}
|
||||
|
||||
let work: DispatchWorkItem = .init {
|
||||
// If the callback runs before the surface is attached to a view
|
||||
// then the window will be nil. We just reschedule in that case.
|
||||
guard let window = to.window else {
|
||||
moveFocus(to: to, from: from)
|
||||
moveFocus(to: to, from: from, delay: nextDelay)
|
||||
return
|
||||
}
|
||||
|
||||
@ -448,5 +470,12 @@ extension Ghostty {
|
||||
|
||||
window.makeFirstResponder(to)
|
||||
}
|
||||
|
||||
let queue = DispatchQueue.main
|
||||
if let delay {
|
||||
queue.asyncAfter(deadline: .now() + delay, execute: work)
|
||||
} else {
|
||||
queue.async(execute: work)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -198,6 +198,10 @@ class NonNativeFullscreen: FullscreenBase, FullscreenStyle {
|
||||
// Being untitled let's our content take up the full frame.
|
||||
window.styleMask.remove(.titled)
|
||||
|
||||
// We dont' want the non-native fullscreen window to be resizable
|
||||
// from the edges.
|
||||
window.styleMask.remove(.resizable)
|
||||
|
||||
// Focus window
|
||||
window.makeKeyAndOrderFront(nil)
|
||||
|
||||
|
@ -159,11 +159,20 @@ in
|
||||
# it to be "portable" across the system.
|
||||
LD_LIBRARY_PATH = lib.makeLibraryPath rpathLibs;
|
||||
|
||||
# On Linux we need to setup the environment so that all GTK data
|
||||
# is available (namely icons).
|
||||
shellHook = lib.optionalString stdenv.hostPlatform.isLinux ''
|
||||
# Minimal subset of env set by wrapGAppsHook4 for icons and global settings
|
||||
export XDG_DATA_DIRS=$XDG_DATA_DIRS:${hicolor-icon-theme}/share:${gnome.adwaita-icon-theme}/share
|
||||
export XDG_DATA_DIRS=$XDG_DATA_DIRS:$GSETTINGS_SCHEMAS_PATH # from glib setup hook
|
||||
'';
|
||||
shellHook =
|
||||
(lib.optionalString stdenv.hostPlatform.isLinux ''
|
||||
# On Linux we need to setup the environment so that all GTK data
|
||||
# is available (namely icons).
|
||||
|
||||
# Minimal subset of env set by wrapGAppsHook4 for icons and global settings
|
||||
export XDG_DATA_DIRS=$XDG_DATA_DIRS:${hicolor-icon-theme}/share:${gnome.adwaita-icon-theme}/share
|
||||
export XDG_DATA_DIRS=$XDG_DATA_DIRS:$GSETTINGS_SCHEMAS_PATH # from glib setup hook
|
||||
'')
|
||||
+ (lib.optionalString stdenv.hostPlatform.isDarwin ''
|
||||
# On macOS, we unset the macOS SDK env vars that Nix sets up because
|
||||
# we rely on a system installation. Nix only provides a macOS SDK
|
||||
# and we need iOS too.
|
||||
unset SDKROOT
|
||||
unset DEVELOPER_DIR
|
||||
'');
|
||||
}
|
||||
|
@ -1,3 +1,3 @@
|
||||
# This file is auto-generated! check build-support/check-zig-cache-hash.sh for
|
||||
# more details.
|
||||
"sha256-D1SQIlmdP9x1PDgRVOy1qJGmu9osDbuyxGOcFj646N4="
|
||||
"sha256-c3MQJG7vwQBOaxHQ8cYP0HxdsLqlgsVmAiT1d7gq6js="
|
||||
|
@ -1182,6 +1182,14 @@ pub fn updateConfig(
|
||||
log.warn("failed to notify renderer of config change err={}", .{err});
|
||||
};
|
||||
|
||||
// If we have a title set then we update our window to have the
|
||||
// newly configured title.
|
||||
if (config.title) |title| try self.rt_app.performAction(
|
||||
.{ .surface = self },
|
||||
.set_title,
|
||||
.{ .title = title },
|
||||
);
|
||||
|
||||
// Notify the window
|
||||
try self.rt_app.performAction(
|
||||
.{ .surface = self },
|
||||
|
@ -115,8 +115,25 @@ pub fn init(core_app: *CoreApp, opts: Options) !App {
|
||||
// reassess...
|
||||
//
|
||||
// Upstream issue: https://gitlab.gnome.org/GNOME/gtk/-/issues/6589
|
||||
//
|
||||
// Specific details about values:
|
||||
// - "opengl" - output OpenGL debug information
|
||||
// - "gl-disable-gles" - disable GLES, Ghostty can't use GLES
|
||||
// - "vulkan-disable" - disable Vulkan, Ghostty can't use Vulkan
|
||||
// and initializing a Vulkan context was causing a longer delay
|
||||
// on some systems.
|
||||
_ = internal_os.setenv("GDK_DEBUG", "opengl,gl-disable-gles,vulkan-disable");
|
||||
|
||||
// Wayland-EGL on GTK 4.14 causes "Failed to create EGL context" errors.
|
||||
// This can be fixed by forcing the backend to prefer X11. This issue
|
||||
// appears to be fixed in GTK 4.16 but I wasn't able to bisect why.
|
||||
// The "*" at the end says that if X11 fails, try all remaining
|
||||
// backends.
|
||||
_ = internal_os.setenv("GDK_BACKEND", "x11,*");
|
||||
} else {
|
||||
// Versions prior to 4.14 are a bit of an unknown for Ghostty. It
|
||||
// is an environment that isn't tested well and we don't have a
|
||||
// good understanding of what we may need to do.
|
||||
_ = internal_os.setenv("GDK_DEBUG", "vulkan-disable");
|
||||
}
|
||||
|
||||
|
@ -13,39 +13,39 @@ const icons = [_]struct {
|
||||
}{
|
||||
.{
|
||||
.alias = "16x16",
|
||||
.source = "16x16",
|
||||
.source = "16",
|
||||
},
|
||||
.{
|
||||
.alias = "16x16@2",
|
||||
.source = "16x16@2x@2x",
|
||||
.source = "16@2x",
|
||||
},
|
||||
.{
|
||||
.alias = "32x32",
|
||||
.source = "32x32",
|
||||
.source = "32",
|
||||
},
|
||||
.{
|
||||
.alias = "32x32@2",
|
||||
.source = "32x32@2x@2x",
|
||||
.source = "32@2x",
|
||||
},
|
||||
.{
|
||||
.alias = "128x128",
|
||||
.source = "128x128",
|
||||
.source = "128",
|
||||
},
|
||||
.{
|
||||
.alias = "128x128@2",
|
||||
.source = "128x128@2x@2x",
|
||||
.source = "128@2x",
|
||||
},
|
||||
.{
|
||||
.alias = "256x256",
|
||||
.source = "256x256",
|
||||
.source = "256",
|
||||
},
|
||||
.{
|
||||
.alias = "256x256@2",
|
||||
.source = "256x256@2x@2x",
|
||||
.source = "256@2x",
|
||||
},
|
||||
.{
|
||||
.alias = "512x512",
|
||||
.source = "512x512",
|
||||
.source = "512",
|
||||
},
|
||||
};
|
||||
|
||||
|
@ -42,7 +42,7 @@ pub fn create(b: *std.Build, opts: Options) *MetallibStep {
|
||||
b,
|
||||
b.fmt("metal {s}", .{opts.name}),
|
||||
);
|
||||
run_ir.addArgs(&.{ "xcrun", "-sdk", sdk, "metal", "-o" });
|
||||
run_ir.addArgs(&.{ "/usr/bin/xcrun", "-sdk", sdk, "metal", "-o" });
|
||||
const output_ir = run_ir.addOutputFileArg(b.fmt("{s}.ir", .{opts.name}));
|
||||
run_ir.addArgs(&.{"-c"});
|
||||
for (opts.sources) |source| run_ir.addFileArg(source);
|
||||
@ -62,7 +62,7 @@ pub fn create(b: *std.Build, opts: Options) *MetallibStep {
|
||||
b,
|
||||
b.fmt("metallib {s}", .{opts.name}),
|
||||
);
|
||||
run_lib.addArgs(&.{ "xcrun", "-sdk", sdk, "metallib", "-o" });
|
||||
run_lib.addArgs(&.{ "/usr/bin/xcrun", "-sdk", sdk, "metallib", "-o" });
|
||||
const output_lib = run_lib.addOutputFileArg(b.fmt("{s}.metallib", .{opts.name}));
|
||||
run_lib.addFileArg(output_ir);
|
||||
run_lib.step.dependOn(&run_ir.step);
|
||||
|
@ -668,9 +668,6 @@ link: RepeatableLink = .{},
|
||||
/// does not apply to tabs, splits, etc. However, this setting will apply to all
|
||||
/// new windows, not just the first one.
|
||||
///
|
||||
/// On macOS, this always creates the window in native fullscreen. Non-native
|
||||
/// fullscreen is not currently supported with this setting.
|
||||
///
|
||||
/// On macOS, this setting does not work if window-decoration is set to
|
||||
/// "false", because native fullscreen on macOS requires window decorations
|
||||
/// to be set.
|
||||
@ -679,6 +676,12 @@ fullscreen: bool = false,
|
||||
/// The title Ghostty will use for the window. This will force the title of the
|
||||
/// window to be this title at all times and Ghostty will ignore any set title
|
||||
/// escape sequences programs (such as Neovim) may send.
|
||||
///
|
||||
/// This configuration can be reloaded at runtime. If it is set, the title
|
||||
/// will update for all windows. If it is unset, the next title change escape
|
||||
/// sequence will be honored but previous changes will not retroactively
|
||||
/// be set. This latter case may require you restart programs such as neovim
|
||||
/// to get the new title.
|
||||
title: ?[:0]const u8 = null,
|
||||
|
||||
/// The setting that will change the application class value.
|
||||
@ -2479,7 +2482,7 @@ pub fn loadCliArgs(self: *Config, alloc_gpa: Allocator) !void {
|
||||
// First, we add an artificial "-e" so that if we
|
||||
// replay the inputs to rebuild the config (i.e. if
|
||||
// a theme is set) then we will get the same behavior.
|
||||
try self._replay_steps.append(arena_alloc, .{ .arg = "-e" });
|
||||
try self._replay_steps.append(arena_alloc, .@"-e");
|
||||
|
||||
// Next, take all remaining args and use that to build up
|
||||
// a command to execute.
|
||||
@ -2587,6 +2590,24 @@ pub fn loadRecursiveFiles(self: *Config, alloc_gpa: Allocator) !void {
|
||||
|
||||
const cwd = std.fs.cwd();
|
||||
|
||||
// We need to insert all of our loaded config-file values
|
||||
// PRIOR to the "-e" in our replay steps, since everything
|
||||
// after "-e" becomes an "initial-command". To do this, we
|
||||
// dupe the values if we find it.
|
||||
var replay_suffix = std.ArrayList(Replay.Step).init(alloc_gpa);
|
||||
defer replay_suffix.deinit();
|
||||
for (self._replay_steps.items, 0..) |step, i| if (step == .@"-e") {
|
||||
// We don't need to clone the steps because they should
|
||||
// all be allocated in our arena and we're keeping our
|
||||
// arena.
|
||||
try replay_suffix.appendSlice(self._replay_steps.items[i..]);
|
||||
|
||||
// Remove our old values. Again, don't need to free any
|
||||
// memory here because its all part of our arena.
|
||||
self._replay_steps.shrinkRetainingCapacity(i);
|
||||
break;
|
||||
};
|
||||
|
||||
// We must use a while below and not a for(items) because we
|
||||
// may add items to the list while iterating for recursive
|
||||
// config-file entries.
|
||||
@ -2638,6 +2659,14 @@ pub fn loadRecursiveFiles(self: *Config, alloc_gpa: Allocator) !void {
|
||||
try self.loadIter(alloc_gpa, &iter);
|
||||
try self.expandPaths(std.fs.path.dirname(path).?);
|
||||
}
|
||||
|
||||
// If we have a suffix, add that back.
|
||||
if (replay_suffix.items.len > 0) {
|
||||
try self._replay_steps.appendSlice(
|
||||
arena_alloc,
|
||||
replay_suffix.items,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Change the state of conditionals and reload the configuration
|
||||
@ -2765,39 +2794,46 @@ fn loadTheme(self: *Config, theme: Theme) !void {
|
||||
try new_config.loadIter(alloc_gpa, &iter);
|
||||
|
||||
// Setup our replay to be conditional.
|
||||
for (new_config._replay_steps.items) |*item| switch (item.*) {
|
||||
.expand => {},
|
||||
conditional: for (new_config._replay_steps.items) |*item| {
|
||||
switch (item.*) {
|
||||
.expand => {},
|
||||
|
||||
// Change our arg to be conditional on our theme.
|
||||
.arg => |v| {
|
||||
const alloc_arena = new_config._arena.?.allocator();
|
||||
const conds = try alloc_arena.alloc(Conditional, 1);
|
||||
conds[0] = .{
|
||||
.key = .theme,
|
||||
.op = .eq,
|
||||
.value = @tagName(self._conditional_state.theme),
|
||||
};
|
||||
item.* = .{ .conditional_arg = .{
|
||||
.conditions = conds,
|
||||
.arg = v,
|
||||
} };
|
||||
},
|
||||
// If we see "-e" then we do NOT make the following arguments
|
||||
// conditional since they are supposed to be part of the
|
||||
// initial command.
|
||||
.@"-e" => break :conditional,
|
||||
|
||||
.conditional_arg => |v| {
|
||||
const alloc_arena = new_config._arena.?.allocator();
|
||||
const conds = try alloc_arena.alloc(Conditional, v.conditions.len + 1);
|
||||
conds[0] = .{
|
||||
.key = .theme,
|
||||
.op = .eq,
|
||||
.value = @tagName(self._conditional_state.theme),
|
||||
};
|
||||
@memcpy(conds[1..], v.conditions);
|
||||
item.* = .{ .conditional_arg = .{
|
||||
.conditions = conds,
|
||||
.arg = v.arg,
|
||||
} };
|
||||
},
|
||||
};
|
||||
// Change our arg to be conditional on our theme.
|
||||
.arg => |v| {
|
||||
const alloc_arena = new_config._arena.?.allocator();
|
||||
const conds = try alloc_arena.alloc(Conditional, 1);
|
||||
conds[0] = .{
|
||||
.key = .theme,
|
||||
.op = .eq,
|
||||
.value = @tagName(self._conditional_state.theme),
|
||||
};
|
||||
item.* = .{ .conditional_arg = .{
|
||||
.conditions = conds,
|
||||
.arg = v,
|
||||
} };
|
||||
},
|
||||
|
||||
.conditional_arg => |v| {
|
||||
const alloc_arena = new_config._arena.?.allocator();
|
||||
const conds = try alloc_arena.alloc(Conditional, v.conditions.len + 1);
|
||||
conds[0] = .{
|
||||
.key = .theme,
|
||||
.op = .eq,
|
||||
.value = @tagName(self._conditional_state.theme),
|
||||
};
|
||||
@memcpy(conds[1..], v.conditions);
|
||||
item.* = .{ .conditional_arg = .{
|
||||
.conditions = conds,
|
||||
.arg = v.arg,
|
||||
} };
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Replay our previous inputs so that we can override values
|
||||
// from the theme.
|
||||
@ -2989,10 +3025,12 @@ pub fn parseManuallyHook(
|
||||
arg: []const u8,
|
||||
iter: anytype,
|
||||
) !bool {
|
||||
// Keep track of our input args no matter what..
|
||||
try self._replay_steps.append(alloc, .{ .arg = try alloc.dupe(u8, arg) });
|
||||
|
||||
if (std.mem.eql(u8, arg, "-e")) {
|
||||
// Add the special -e marker. This prevents:
|
||||
// (1) config-file from adding args to the end (see #2908)
|
||||
// (2) dark/light theme from making this conditional
|
||||
try self._replay_steps.append(alloc, .@"-e");
|
||||
|
||||
// Build up the command. We don't clean this up because we take
|
||||
// ownership in our allocator.
|
||||
var command = std.ArrayList(u8).init(alloc);
|
||||
@ -3028,6 +3066,12 @@ pub fn parseManuallyHook(
|
||||
return false;
|
||||
}
|
||||
|
||||
// Keep track of our input args for replay
|
||||
try self._replay_steps.append(
|
||||
alloc,
|
||||
.{ .arg = try alloc.dupe(u8, arg) },
|
||||
);
|
||||
|
||||
// If we didn't find a special case, continue parsing normally
|
||||
return true;
|
||||
}
|
||||
@ -3295,11 +3339,22 @@ const Replay = struct {
|
||||
arg: []const u8,
|
||||
},
|
||||
|
||||
/// The start of a "-e" argument. This marks the end of
|
||||
/// traditional configuration and the beginning of the
|
||||
/// "-e" initial command magic. This is separate from "arg"
|
||||
/// because there are some behaviors unique to this (i.e.
|
||||
/// we want to keep this at the end for config-file).
|
||||
///
|
||||
/// Note: when "-e" is used, ONLY this is present and
|
||||
/// not an additional "arg" with "-e" value.
|
||||
@"-e",
|
||||
|
||||
fn clone(
|
||||
self: Step,
|
||||
alloc: Allocator,
|
||||
) Allocator.Error!Step {
|
||||
return switch (self) {
|
||||
.@"-e" => self,
|
||||
.arg => |v| .{ .arg = try alloc.dupe(u8, v) },
|
||||
.expand => |v| .{ .expand = try alloc.dupe(u8, v) },
|
||||
.conditional_arg => |v| conditional: {
|
||||
@ -3335,10 +3390,6 @@ const Replay = struct {
|
||||
log.warn("error expanding paths err={}", .{err});
|
||||
},
|
||||
|
||||
.arg => |arg| {
|
||||
return arg;
|
||||
},
|
||||
|
||||
.conditional_arg => |v| conditional: {
|
||||
// All conditions must match.
|
||||
for (v.conditions) |cond| {
|
||||
@ -3349,6 +3400,9 @@ const Replay = struct {
|
||||
|
||||
return v.arg;
|
||||
},
|
||||
|
||||
.arg => |arg| return arg,
|
||||
.@"-e" => return "-e",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -45,10 +45,26 @@ pub fn CircBuf(comptime T: type, comptime default: T) type {
|
||||
self.idx += 1;
|
||||
return &self.buf.storage[storage_idx];
|
||||
}
|
||||
|
||||
/// Seek the iterator by a given amount. This will clamp
|
||||
/// the values to the bounds of the buffer so overflows are
|
||||
/// not possible.
|
||||
pub fn seekBy(self: *Iterator, amount: isize) void {
|
||||
if (amount > 0) {
|
||||
self.idx +|= @intCast(amount);
|
||||
} else {
|
||||
self.idx -|= @intCast(@abs(amount));
|
||||
}
|
||||
}
|
||||
|
||||
/// Reset the iterator back to the first value.
|
||||
pub fn reset(self: *Iterator) void {
|
||||
self.idx = 0;
|
||||
}
|
||||
};
|
||||
|
||||
/// Initialize a new circular buffer that can store size elements.
|
||||
pub fn init(alloc: Allocator, size: usize) !Self {
|
||||
pub fn init(alloc: Allocator, size: usize) Allocator.Error!Self {
|
||||
const buf = try alloc.alloc(T, size);
|
||||
@memset(buf, default);
|
||||
|
||||
@ -56,7 +72,7 @@ pub fn CircBuf(comptime T: type, comptime default: T) type {
|
||||
.storage = buf,
|
||||
.head = 0,
|
||||
.tail = 0,
|
||||
.full = false,
|
||||
.full = size == 0,
|
||||
};
|
||||
}
|
||||
|
||||
@ -67,7 +83,7 @@ pub fn CircBuf(comptime T: type, comptime default: T) type {
|
||||
|
||||
/// Append a single value to the buffer. If the buffer is full,
|
||||
/// an error will be returned.
|
||||
pub fn append(self: *Self, v: T) !void {
|
||||
pub fn append(self: *Self, v: T) Allocator.Error!void {
|
||||
if (self.full) return error.OutOfMemory;
|
||||
self.storage[self.head] = v;
|
||||
self.head += 1;
|
||||
@ -75,6 +91,19 @@ pub fn CircBuf(comptime T: type, comptime default: T) type {
|
||||
self.full = self.head == self.tail;
|
||||
}
|
||||
|
||||
/// Append a slice to the buffer. If the buffer cannot fit the
|
||||
/// entire slice then an error will be returned. It is up to the
|
||||
/// caller to rotate the circular buffer if they want to overwrite
|
||||
/// the oldest data.
|
||||
pub fn appendSlice(
|
||||
self: *Self,
|
||||
slice: []const T,
|
||||
) Allocator.Error!void {
|
||||
const storage = self.getPtrSlice(self.len(), slice.len);
|
||||
fastmem.copy(T, storage[0], slice[0..storage[0].len]);
|
||||
fastmem.copy(T, storage[1], slice[storage[0].len..]);
|
||||
}
|
||||
|
||||
/// Clear the buffer.
|
||||
pub fn clear(self: *Self) void {
|
||||
self.head = 0;
|
||||
@ -91,6 +120,34 @@ pub fn CircBuf(comptime T: type, comptime default: T) type {
|
||||
};
|
||||
}
|
||||
|
||||
/// Get the first (oldest) value in the buffer.
|
||||
pub fn first(self: Self) ?*T {
|
||||
// Note: this can be more efficient by not using the
|
||||
// iterator, but this was an easy way to implement it.
|
||||
var it = self.iterator(.forward);
|
||||
return it.next();
|
||||
}
|
||||
|
||||
/// Get the last (newest) value in the buffer.
|
||||
pub fn last(self: Self) ?*T {
|
||||
// Note: this can be more efficient by not using the
|
||||
// iterator, but this was an easy way to implement it.
|
||||
var it = self.iterator(.reverse);
|
||||
return it.next();
|
||||
}
|
||||
|
||||
/// Ensures that there is enough capacity to store amount more
|
||||
/// items via append.
|
||||
pub fn ensureUnusedCapacity(
|
||||
self: *Self,
|
||||
alloc: Allocator,
|
||||
amount: usize,
|
||||
) Allocator.Error!void {
|
||||
const new_cap = self.len() + amount;
|
||||
if (new_cap <= self.capacity()) return;
|
||||
try self.resize(alloc, new_cap);
|
||||
}
|
||||
|
||||
/// Resize the buffer to the given size (larger or smaller).
|
||||
/// If larger, new values will be set to the default value.
|
||||
pub fn resize(self: *Self, alloc: Allocator, size: usize) Allocator.Error!void {
|
||||
@ -256,7 +313,7 @@ test {
|
||||
try testing.expectEqual(@as(usize, 0), buf.len());
|
||||
}
|
||||
|
||||
test "append" {
|
||||
test "CircBuf append" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
@ -273,7 +330,7 @@ test "append" {
|
||||
try testing.expectError(error.OutOfMemory, buf.append(5));
|
||||
}
|
||||
|
||||
test "forward iterator" {
|
||||
test "CircBuf forward iterator" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
@ -319,7 +376,7 @@ test "forward iterator" {
|
||||
}
|
||||
}
|
||||
|
||||
test "reverse iterator" {
|
||||
test "CircBuf reverse iterator" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
@ -365,7 +422,95 @@ test "reverse iterator" {
|
||||
}
|
||||
}
|
||||
|
||||
test "getPtrSlice fits" {
|
||||
test "CircBuf first/last" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
const Buf = CircBuf(u8, 0);
|
||||
var buf = try Buf.init(alloc, 3);
|
||||
defer buf.deinit(alloc);
|
||||
|
||||
try buf.append(1);
|
||||
try buf.append(2);
|
||||
try buf.append(3);
|
||||
try testing.expectEqual(3, buf.last().?.*);
|
||||
try testing.expectEqual(1, buf.first().?.*);
|
||||
}
|
||||
|
||||
test "CircBuf first/last empty" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
const Buf = CircBuf(u8, 0);
|
||||
var buf = try Buf.init(alloc, 0);
|
||||
defer buf.deinit(alloc);
|
||||
|
||||
try testing.expect(buf.first() == null);
|
||||
try testing.expect(buf.last() == null);
|
||||
}
|
||||
|
||||
test "CircBuf first/last empty with cap" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
const Buf = CircBuf(u8, 0);
|
||||
var buf = try Buf.init(alloc, 3);
|
||||
defer buf.deinit(alloc);
|
||||
|
||||
try testing.expect(buf.first() == null);
|
||||
try testing.expect(buf.last() == null);
|
||||
}
|
||||
|
||||
test "CircBuf append slice" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
const Buf = CircBuf(u8, 0);
|
||||
var buf = try Buf.init(alloc, 5);
|
||||
defer buf.deinit(alloc);
|
||||
|
||||
try buf.appendSlice("hello");
|
||||
{
|
||||
var it = buf.iterator(.forward);
|
||||
try testing.expect(it.next().?.* == 'h');
|
||||
try testing.expect(it.next().?.* == 'e');
|
||||
try testing.expect(it.next().?.* == 'l');
|
||||
try testing.expect(it.next().?.* == 'l');
|
||||
try testing.expect(it.next().?.* == 'o');
|
||||
try testing.expect(it.next() == null);
|
||||
}
|
||||
}
|
||||
|
||||
test "CircBuf append slice with wrap" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
const Buf = CircBuf(u8, 0);
|
||||
var buf = try Buf.init(alloc, 4);
|
||||
defer buf.deinit(alloc);
|
||||
|
||||
// Fill the buffer
|
||||
_ = buf.getPtrSlice(0, buf.capacity());
|
||||
try testing.expect(buf.full);
|
||||
try testing.expectEqual(@as(usize, 4), buf.len());
|
||||
|
||||
// Delete
|
||||
buf.deleteOldest(2);
|
||||
try testing.expect(!buf.full);
|
||||
try testing.expectEqual(@as(usize, 2), buf.len());
|
||||
|
||||
try buf.appendSlice("AB");
|
||||
{
|
||||
var it = buf.iterator(.forward);
|
||||
try testing.expect(it.next().?.* == 0);
|
||||
try testing.expect(it.next().?.* == 0);
|
||||
try testing.expect(it.next().?.* == 'A');
|
||||
try testing.expect(it.next().?.* == 'B');
|
||||
try testing.expect(it.next() == null);
|
||||
}
|
||||
}
|
||||
|
||||
test "CircBuf getPtrSlice fits" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
@ -379,7 +524,7 @@ test "getPtrSlice fits" {
|
||||
try testing.expectEqual(@as(usize, 11), buf.len());
|
||||
}
|
||||
|
||||
test "getPtrSlice wraps" {
|
||||
test "CircBuf getPtrSlice wraps" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
@ -435,7 +580,7 @@ test "getPtrSlice wraps" {
|
||||
}
|
||||
}
|
||||
|
||||
test "rotateToZero" {
|
||||
test "CircBuf rotateToZero" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
@ -447,7 +592,7 @@ test "rotateToZero" {
|
||||
try buf.rotateToZero(alloc);
|
||||
}
|
||||
|
||||
test "rotateToZero offset" {
|
||||
test "CircBuf rotateToZero offset" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
@ -471,7 +616,7 @@ test "rotateToZero offset" {
|
||||
try testing.expectEqual(@as(usize, 1), buf.head);
|
||||
}
|
||||
|
||||
test "rotateToZero wraps" {
|
||||
test "CircBuf rotateToZero wraps" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
@ -511,7 +656,7 @@ test "rotateToZero wraps" {
|
||||
}
|
||||
}
|
||||
|
||||
test "rotateToZero full no wrap" {
|
||||
test "CircBuf rotateToZero full no wrap" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
@ -549,7 +694,32 @@ test "rotateToZero full no wrap" {
|
||||
}
|
||||
}
|
||||
|
||||
test "resize grow" {
|
||||
test "CircBuf resize grow from zero" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
const Buf = CircBuf(u8, 0);
|
||||
var buf = try Buf.init(alloc, 0);
|
||||
defer buf.deinit(alloc);
|
||||
try testing.expect(buf.full);
|
||||
|
||||
// Resize
|
||||
try buf.resize(alloc, 2);
|
||||
try testing.expect(!buf.full);
|
||||
try testing.expectEqual(@as(usize, 0), buf.len());
|
||||
try testing.expectEqual(@as(usize, 2), buf.capacity());
|
||||
|
||||
try buf.append(1);
|
||||
try buf.append(2);
|
||||
|
||||
{
|
||||
const slices = buf.getPtrSlice(0, 2);
|
||||
try testing.expectEqual(@as(u8, 1), slices[0][0]);
|
||||
try testing.expectEqual(@as(u8, 2), slices[0][1]);
|
||||
}
|
||||
}
|
||||
|
||||
test "CircBuf resize grow" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
@ -582,7 +752,7 @@ test "resize grow" {
|
||||
}
|
||||
}
|
||||
|
||||
test "resize shrink" {
|
||||
test "CircBuf resize shrink" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
|
@ -330,6 +330,96 @@ pub fn deinit(self: *PageList) void {
|
||||
}
|
||||
}
|
||||
|
||||
/// Reset the PageList back to an empty state. This is similar to
|
||||
/// deinit and reinit but it importantly preserves the pointer
|
||||
/// stability of tracked pins (they're moved to the top-left since
|
||||
/// all contents are cleared).
|
||||
///
|
||||
/// This can't fail because we always retain at least enough allocated
|
||||
/// memory to fit the active area.
|
||||
pub fn reset(self: *PageList) void {
|
||||
// We need enough pages/nodes to keep our active area. This should
|
||||
// never fail since we by definition have allocated a page already
|
||||
// that fits our size but I'm not confident to make that assertion.
|
||||
const cap = std_capacity.adjust(
|
||||
.{ .cols = self.cols },
|
||||
) catch @panic("reset: std_capacity.adjust failed");
|
||||
assert(cap.rows > 0); // adjust should never return 0 rows
|
||||
|
||||
// The number of pages we need is the number of rows in the active
|
||||
// area divided by the row capacity of a page.
|
||||
const page_count = std.math.divCeil(
|
||||
usize,
|
||||
self.rows,
|
||||
cap.rows,
|
||||
) catch unreachable;
|
||||
|
||||
// Before resetting our pools we need to free any pages that
|
||||
// are non-standard size since those were allocated outside
|
||||
// the pool.
|
||||
{
|
||||
const page_alloc = self.pool.pages.arena.child_allocator;
|
||||
var it = self.pages.first;
|
||||
while (it) |node| : (it = node.next) {
|
||||
if (node.data.memory.len > std_size) {
|
||||
page_alloc.free(node.data.memory);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Reset our pools to free as much memory as possible while retaining
|
||||
// the capacity for at least the minimum number of pages we need.
|
||||
// The return value is whether memory was reclaimed or not, but in
|
||||
// either case the pool is left in a valid state.
|
||||
_ = self.pool.pages.reset(.{
|
||||
.retain_with_limit = page_count * PagePool.item_size,
|
||||
});
|
||||
_ = self.pool.nodes.reset(.{
|
||||
.retain_with_limit = page_count * NodePool.item_size,
|
||||
});
|
||||
|
||||
// Our page pool relies on mmap to zero our page memory. Since we're
|
||||
// retaining a certain amount of memory, it won't use mmap and won't
|
||||
// be zeroed. This block zeroes out all the memory in the pool arena.
|
||||
{
|
||||
// Note: we only have to do this for the page pool because the
|
||||
// nodes are always fully overwritten on each allocation.
|
||||
const page_arena = &self.pool.pages.arena;
|
||||
var it = page_arena.state.buffer_list.first;
|
||||
while (it) |node| : (it = node.next) {
|
||||
// The fully allocated buffer
|
||||
const alloc_buf = @as([*]u8, @ptrCast(node))[0..node.data];
|
||||
|
||||
// The buffer minus our header
|
||||
const BufNode = @TypeOf(page_arena.state.buffer_list).Node;
|
||||
const data_buf = alloc_buf[@sizeOf(BufNode)..];
|
||||
@memset(data_buf, 0);
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize our pages. This should not be able to fail since
|
||||
// we retained the capacity for the minimum number of pages we need.
|
||||
self.pages, self.page_size = initPages(
|
||||
&self.pool,
|
||||
self.cols,
|
||||
self.rows,
|
||||
) catch @panic("initPages failed");
|
||||
|
||||
// Update all our tracked pins to point to our first page top-left
|
||||
{
|
||||
var it = self.tracked_pins.iterator();
|
||||
while (it.next()) |entry| {
|
||||
const p: *Pin = entry.key_ptr.*;
|
||||
p.node = self.pages.first.?;
|
||||
p.x = 0;
|
||||
p.y = 0;
|
||||
}
|
||||
}
|
||||
|
||||
// Move our viewport back to the active area since everything is gone.
|
||||
self.viewport = .active;
|
||||
}
|
||||
|
||||
pub const Clone = struct {
|
||||
/// The top and bottom (inclusive) points of the region to clone.
|
||||
/// The x coordinate is ignored; the full row is always cloned.
|
||||
@ -2356,7 +2446,11 @@ pub fn countTrackedPins(self: *const PageList) usize {
|
||||
/// Checks if a pin is valid for this pagelist. This is a very slow and
|
||||
/// expensive operation since we traverse the entire linked list in the
|
||||
/// worst case. Only for runtime safety/debug.
|
||||
fn pinIsValid(self: *const PageList, p: Pin) bool {
|
||||
pub fn pinIsValid(self: *const PageList, p: Pin) bool {
|
||||
// This is very slow so we want to ensure we only ever
|
||||
// call this during slow runtime safety builds.
|
||||
comptime assert(build_config.slow_runtime_safety);
|
||||
|
||||
var it = self.pages.first;
|
||||
while (it) |node| : (it = node.next) {
|
||||
if (node != p.node) continue;
|
||||
@ -2450,6 +2544,50 @@ pub fn getCell(self: *const PageList, pt: point.Point) ?Cell {
|
||||
};
|
||||
}
|
||||
|
||||
pub const EncodeUtf8Options = struct {
|
||||
/// The start and end points of the dump, both inclusive. The x will
|
||||
/// be ignored and the full row will always be dumped.
|
||||
tl: Pin,
|
||||
br: ?Pin = null,
|
||||
|
||||
/// If true, this will unwrap soft-wrapped lines. If false, this will
|
||||
/// dump the screen as it is visually seen in a rendered window.
|
||||
unwrap: bool = true,
|
||||
|
||||
/// See Page.EncodeUtf8Options.
|
||||
cell_map: ?*Page.CellMap = null,
|
||||
};
|
||||
|
||||
/// Encode the pagelist to utf8 to the given writer.
|
||||
///
|
||||
/// The writer should be buffered; this function does not attempt to
|
||||
/// efficiently write and often writes one byte at a time.
|
||||
///
|
||||
/// Note: this is tested using Screen.dumpString. This is a function that
|
||||
/// predates this and is a thin wrapper around it so the tests all live there.
|
||||
pub fn encodeUtf8(
|
||||
self: *const PageList,
|
||||
writer: anytype,
|
||||
opts: EncodeUtf8Options,
|
||||
) anyerror!void {
|
||||
// We don't currently use self at all. There is an argument that this
|
||||
// function should live on Pin instead but there is some future we might
|
||||
// need state on here so... letting it go.
|
||||
_ = self;
|
||||
|
||||
var page_opts: Page.EncodeUtf8Options = .{
|
||||
.unwrap = opts.unwrap,
|
||||
.cell_map = opts.cell_map,
|
||||
};
|
||||
var iter = opts.tl.pageIterator(.right_down, opts.br);
|
||||
while (iter.next()) |chunk| {
|
||||
const page: *const Page = &chunk.node.data;
|
||||
page_opts.start_y = chunk.start;
|
||||
page_opts.end_y = chunk.end;
|
||||
page_opts.preceding = try page.encodeUtf8(writer, page_opts);
|
||||
}
|
||||
}
|
||||
|
||||
/// Log a debug diagram of the page list to the provided writer.
|
||||
///
|
||||
/// EXAMPLE:
|
||||
@ -8191,3 +8329,66 @@ test "PageList resize reflow wrap moves kitty placeholder" {
|
||||
}
|
||||
try testing.expect(it.next() == null);
|
||||
}
|
||||
|
||||
test "PageList reset" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
var s = try init(alloc, 80, 24, null);
|
||||
defer s.deinit();
|
||||
s.reset();
|
||||
try testing.expect(s.viewport == .active);
|
||||
try testing.expect(s.pages.first != null);
|
||||
try testing.expectEqual(@as(usize, s.rows), s.totalRows());
|
||||
|
||||
// Active area should be the top
|
||||
try testing.expectEqual(Pin{
|
||||
.node = s.pages.first.?,
|
||||
.y = 0,
|
||||
.x = 0,
|
||||
}, s.getTopLeft(.active));
|
||||
}
|
||||
|
||||
test "PageList reset across two pages" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
// Find a cap that makes it so that rows don't fit on one page.
|
||||
const rows = 100;
|
||||
const cap = cap: {
|
||||
var cap = try std_capacity.adjust(.{ .cols = 50 });
|
||||
while (cap.rows >= rows) cap = try std_capacity.adjust(.{
|
||||
.cols = cap.cols + 50,
|
||||
});
|
||||
|
||||
break :cap cap;
|
||||
};
|
||||
|
||||
// Init
|
||||
var s = try init(alloc, cap.cols, rows, null);
|
||||
defer s.deinit();
|
||||
s.reset();
|
||||
try testing.expect(s.viewport == .active);
|
||||
try testing.expect(s.pages.first != null);
|
||||
try testing.expectEqual(@as(usize, s.rows), s.totalRows());
|
||||
}
|
||||
|
||||
test "PageList clears history" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
var s = try init(alloc, 80, 24, null);
|
||||
defer s.deinit();
|
||||
try s.growRows(30);
|
||||
s.reset();
|
||||
try testing.expect(s.viewport == .active);
|
||||
try testing.expect(s.pages.first != null);
|
||||
try testing.expectEqual(@as(usize, s.rows), s.totalRows());
|
||||
|
||||
// Active area should be the top
|
||||
try testing.expectEqual(Pin{
|
||||
.node = s.pages.first.?,
|
||||
.y = 0,
|
||||
.x = 0,
|
||||
}, s.getTopLeft(.active));
|
||||
}
|
||||
|
@ -83,8 +83,8 @@ pub const Dirty = packed struct {
|
||||
/// The cursor position and style.
|
||||
pub const Cursor = struct {
|
||||
// The x/y position within the viewport.
|
||||
x: size.CellCountInt,
|
||||
y: size.CellCountInt,
|
||||
x: size.CellCountInt = 0,
|
||||
y: size.CellCountInt = 0,
|
||||
|
||||
/// The visual style of the cursor. This defaults to block because
|
||||
/// it has to default to something, but users of this struct are
|
||||
@ -249,6 +249,50 @@ pub fn assertIntegrity(self: *const Screen) void {
|
||||
}
|
||||
}
|
||||
|
||||
/// Reset the screen according to the logic of a DEC RIS sequence.
|
||||
///
|
||||
/// - Clears the screen and attempts to reclaim memory.
|
||||
/// - Moves the cursor to the top-left.
|
||||
/// - Clears any cursor state: style, hyperlink, etc.
|
||||
/// - Resets the charset
|
||||
/// - Clears the selection
|
||||
/// - Deletes all Kitty graphics
|
||||
/// - Resets Kitty Keyboard settings
|
||||
/// - Disables protection mode
|
||||
///
|
||||
pub fn reset(self: *Screen) void {
|
||||
// Reset our pages
|
||||
self.pages.reset();
|
||||
|
||||
// The above reset preserves tracked pins so we can still use
|
||||
// our cursor pin, which should be at the top-left already.
|
||||
const cursor_pin: *PageList.Pin = self.cursor.page_pin;
|
||||
assert(cursor_pin.node == self.pages.pages.first.?);
|
||||
assert(cursor_pin.x == 0);
|
||||
assert(cursor_pin.y == 0);
|
||||
const cursor_rac = cursor_pin.rowAndCell();
|
||||
self.cursor.deinit(self.alloc);
|
||||
self.cursor = .{
|
||||
.page_pin = cursor_pin,
|
||||
.page_row = cursor_rac.row,
|
||||
.page_cell = cursor_rac.cell,
|
||||
};
|
||||
|
||||
// Clear kitty graphics
|
||||
self.kitty_images.delete(
|
||||
self.alloc,
|
||||
undefined, // All image deletion doesn't need the terminal
|
||||
.{ .all = true },
|
||||
);
|
||||
|
||||
// Reset our basic state
|
||||
self.saved_cursor = null;
|
||||
self.charset = .{};
|
||||
self.kitty_keyboard = .{};
|
||||
self.protected_mode = .off;
|
||||
self.clearSelection();
|
||||
}
|
||||
|
||||
/// Clone the screen.
|
||||
///
|
||||
/// This will copy:
|
||||
@ -2687,95 +2731,15 @@ pub fn promptPath(
|
||||
return .{ .x = to_x - from_x, .y = to_y - from_y };
|
||||
}
|
||||
|
||||
pub const DumpString = struct {
|
||||
/// The start and end points of the dump, both inclusive. The x will
|
||||
/// be ignored and the full row will always be dumped.
|
||||
tl: Pin,
|
||||
br: ?Pin = null,
|
||||
|
||||
/// If true, this will unwrap soft-wrapped lines. If false, this will
|
||||
/// dump the screen as it is visually seen in a rendered window.
|
||||
unwrap: bool = true,
|
||||
};
|
||||
|
||||
/// Dump the screen to a string. The writer given should be buffered;
|
||||
/// this function does not attempt to efficiently write and generally writes
|
||||
/// one byte at a time.
|
||||
pub fn dumpString(
|
||||
self: *const Screen,
|
||||
writer: anytype,
|
||||
opts: DumpString,
|
||||
) !void {
|
||||
var blank_rows: usize = 0;
|
||||
var blank_cells: usize = 0;
|
||||
|
||||
var iter = opts.tl.rowIterator(.right_down, opts.br);
|
||||
while (iter.next()) |row_offset| {
|
||||
const rac = row_offset.rowAndCell();
|
||||
const row = rac.row;
|
||||
const cells = cells: {
|
||||
const cells: [*]pagepkg.Cell = @ptrCast(rac.cell);
|
||||
break :cells cells[0..self.pages.cols];
|
||||
};
|
||||
|
||||
if (!pagepkg.Cell.hasTextAny(cells)) {
|
||||
blank_rows += 1;
|
||||
continue;
|
||||
}
|
||||
if (blank_rows > 0) {
|
||||
for (0..blank_rows) |_| try writer.writeByte('\n');
|
||||
blank_rows = 0;
|
||||
}
|
||||
|
||||
if (!row.wrap or !opts.unwrap) {
|
||||
// If we're not wrapped, we always add a newline.
|
||||
// If we are wrapped, we only add a new line if we're unwrapping
|
||||
// soft-wrapped lines.
|
||||
blank_rows += 1;
|
||||
}
|
||||
|
||||
if (!row.wrap_continuation or !opts.unwrap) {
|
||||
// We should also reset blank cell counts at the start of each row
|
||||
// unless we're unwrapping and this row is a wrap continuation.
|
||||
blank_cells = 0;
|
||||
}
|
||||
|
||||
for (cells) |*cell| {
|
||||
// Skip spacers
|
||||
switch (cell.wide) {
|
||||
.narrow, .wide => {},
|
||||
.spacer_head, .spacer_tail => continue,
|
||||
}
|
||||
|
||||
// If we have a zero value, then we accumulate a counter. We
|
||||
// only want to turn zero values into spaces if we have a non-zero
|
||||
// char sometime later.
|
||||
if (!cell.hasText()) {
|
||||
blank_cells += 1;
|
||||
continue;
|
||||
}
|
||||
if (blank_cells > 0) {
|
||||
try writer.writeByteNTimes(' ', blank_cells);
|
||||
blank_cells = 0;
|
||||
}
|
||||
|
||||
switch (cell.content_tag) {
|
||||
.codepoint => {
|
||||
try writer.print("{u}", .{cell.content.codepoint});
|
||||
},
|
||||
|
||||
.codepoint_grapheme => {
|
||||
try writer.print("{u}", .{cell.content.codepoint});
|
||||
const cps = row_offset.node.data.lookupGrapheme(cell).?;
|
||||
for (cps) |cp| {
|
||||
try writer.print("{u}", .{cp});
|
||||
}
|
||||
},
|
||||
|
||||
else => unreachable,
|
||||
}
|
||||
}
|
||||
}
|
||||
opts: PageList.EncodeUtf8Options,
|
||||
) anyerror!void {
|
||||
try self.pages.encodeUtf8(writer, opts);
|
||||
}
|
||||
|
||||
/// You should use dumpString, this is a restricted version mostly for
|
||||
@ -8504,3 +8468,81 @@ test "Screen: adjustCapacity cursor style ref count" {
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
test "Screen UTF8 cell map with newlines" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
var s = try Screen.init(alloc, 80, 24, 0);
|
||||
defer s.deinit();
|
||||
try s.testWriteString("A\n\nB\n\nC");
|
||||
|
||||
var cell_map = Page.CellMap.init(alloc);
|
||||
defer cell_map.deinit();
|
||||
var builder = std.ArrayList(u8).init(alloc);
|
||||
defer builder.deinit();
|
||||
try s.dumpString(builder.writer(), .{
|
||||
.tl = s.pages.getTopLeft(.screen),
|
||||
.br = s.pages.getBottomRight(.screen),
|
||||
.cell_map = &cell_map,
|
||||
});
|
||||
|
||||
try testing.expectEqual(7, builder.items.len);
|
||||
try testing.expectEqualStrings("A\n\nB\n\nC", builder.items);
|
||||
try testing.expectEqual(builder.items.len, cell_map.items.len);
|
||||
try testing.expectEqual(Page.CellMapEntry{
|
||||
.x = 0,
|
||||
.y = 0,
|
||||
}, cell_map.items[0]);
|
||||
try testing.expectEqual(Page.CellMapEntry{
|
||||
.x = 1,
|
||||
.y = 0,
|
||||
}, cell_map.items[1]);
|
||||
try testing.expectEqual(Page.CellMapEntry{
|
||||
.x = 0,
|
||||
.y = 1,
|
||||
}, cell_map.items[2]);
|
||||
try testing.expectEqual(Page.CellMapEntry{
|
||||
.x = 0,
|
||||
.y = 2,
|
||||
}, cell_map.items[3]);
|
||||
}
|
||||
|
||||
test "Screen UTF8 cell map with blank prefix" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
var s = try Screen.init(alloc, 80, 24, 0);
|
||||
defer s.deinit();
|
||||
s.cursorAbsolute(2, 1);
|
||||
try s.testWriteString("B");
|
||||
|
||||
var cell_map = Page.CellMap.init(alloc);
|
||||
defer cell_map.deinit();
|
||||
var builder = std.ArrayList(u8).init(alloc);
|
||||
defer builder.deinit();
|
||||
try s.dumpString(builder.writer(), .{
|
||||
.tl = s.pages.getTopLeft(.screen),
|
||||
.br = s.pages.getBottomRight(.screen),
|
||||
.cell_map = &cell_map,
|
||||
});
|
||||
|
||||
try testing.expectEqualStrings("\n B", builder.items);
|
||||
try testing.expectEqual(builder.items.len, cell_map.items.len);
|
||||
try testing.expectEqual(Page.CellMapEntry{
|
||||
.x = 0,
|
||||
.y = 0,
|
||||
}, cell_map.items[0]);
|
||||
try testing.expectEqual(Page.CellMapEntry{
|
||||
.x = 0,
|
||||
.y = 1,
|
||||
}, cell_map.items[1]);
|
||||
try testing.expectEqual(Page.CellMapEntry{
|
||||
.x = 1,
|
||||
.y = 1,
|
||||
}, cell_map.items[2]);
|
||||
try testing.expectEqual(Page.CellMapEntry{
|
||||
.x = 2,
|
||||
.y = 1,
|
||||
}, cell_map.items[3]);
|
||||
}
|
||||
|
@ -2627,82 +2627,38 @@ pub fn plainStringUnwrapped(self: *Terminal, alloc: Allocator) ![]const u8 {
|
||||
|
||||
/// Full reset.
|
||||
///
|
||||
/// This will attempt to free the existing screen memory and allocate
|
||||
/// new screens but if that fails this will reuse the existing memory
|
||||
/// from the prior screens. In the latter case, memory may be wasted
|
||||
/// (since its unused) but it isn't leaked.
|
||||
/// This will attempt to free the existing screen memory but if that fails
|
||||
/// this will reuse the existing memory. In the latter case, memory may
|
||||
/// be wasted (since its unused) but it isn't leaked.
|
||||
pub fn fullReset(self: *Terminal) void {
|
||||
// Attempt to initialize new screens.
|
||||
var new_primary = Screen.init(
|
||||
self.screen.alloc,
|
||||
self.cols,
|
||||
self.rows,
|
||||
self.screen.pages.explicit_max_size,
|
||||
) catch |err| {
|
||||
log.warn("failed to allocate new primary screen, reusing old memory err={}", .{err});
|
||||
self.fallbackReset();
|
||||
return;
|
||||
};
|
||||
const new_secondary = Screen.init(
|
||||
self.secondary_screen.alloc,
|
||||
self.cols,
|
||||
self.rows,
|
||||
0,
|
||||
) catch |err| {
|
||||
log.warn("failed to allocate new secondary screen, reusing old memory err={}", .{err});
|
||||
new_primary.deinit();
|
||||
self.fallbackReset();
|
||||
return;
|
||||
};
|
||||
// Reset our screens
|
||||
self.screen.reset();
|
||||
self.secondary_screen.reset();
|
||||
|
||||
// If we got here, both new screens were successfully allocated
|
||||
// and we can deinitialize the old screens.
|
||||
self.screen.deinit();
|
||||
self.secondary_screen.deinit();
|
||||
// Ensure we're back on primary screen
|
||||
if (self.active_screen != .primary) {
|
||||
const old = self.screen;
|
||||
self.screen = self.secondary_screen;
|
||||
self.secondary_screen = old;
|
||||
self.active_screen = .primary;
|
||||
}
|
||||
|
||||
// Replace with the newly allocated screens.
|
||||
self.screen = new_primary;
|
||||
self.secondary_screen = new_secondary;
|
||||
|
||||
self.resetCommonState();
|
||||
}
|
||||
|
||||
fn fallbackReset(self: *Terminal) void {
|
||||
// Clear existing screens without reallocation
|
||||
self.primaryScreen(.{ .clear_on_exit = true, .cursor_save = false });
|
||||
self.screen.clearSelection();
|
||||
self.eraseDisplay(.scrollback, false);
|
||||
self.eraseDisplay(.complete, false);
|
||||
self.screen.cursorAbsolute(0, 0);
|
||||
self.resetCommonState();
|
||||
}
|
||||
|
||||
fn resetCommonState(self: *Terminal) void {
|
||||
// We set the saved cursor to null and then restore. This will force
|
||||
// our cursor to go back to the default which will also move the cursor
|
||||
// to the top-left.
|
||||
self.screen.saved_cursor = null;
|
||||
self.restoreCursor() catch |err| {
|
||||
log.warn("restore cursor on primary screen failed err={}", .{err});
|
||||
};
|
||||
|
||||
self.screen.endHyperlink();
|
||||
self.screen.charset = .{};
|
||||
// Rest our basic state
|
||||
self.modes.reset();
|
||||
self.flags = .{};
|
||||
self.tabstops.reset(TABSTOP_INTERVAL);
|
||||
self.screen.kitty_keyboard = .{};
|
||||
self.secondary_screen.kitty_keyboard = .{};
|
||||
self.screen.protected_mode = .off;
|
||||
self.previous_char = null;
|
||||
self.pwd.clearRetainingCapacity();
|
||||
self.status_display = .main;
|
||||
self.scrolling_region = .{
|
||||
.top = 0,
|
||||
.bottom = self.rows - 1,
|
||||
.left = 0,
|
||||
.right = self.cols - 1,
|
||||
};
|
||||
self.previous_char = null;
|
||||
self.pwd.clearRetainingCapacity();
|
||||
self.status_display = .main;
|
||||
|
||||
// Always mark dirty so we redraw everything
|
||||
self.flags.dirty.clear = true;
|
||||
}
|
||||
|
||||
/// Returns true if the point is dirty, used for testing.
|
||||
@ -10575,6 +10531,16 @@ test "Terminal: fullReset default modes" {
|
||||
try testing.expect(t.modes.get(.grapheme_cluster));
|
||||
}
|
||||
|
||||
test "Terminal: fullReset tracked pins" {
|
||||
var t = try init(testing.allocator, .{ .cols = 80, .rows = 80 });
|
||||
defer t.deinit(testing.allocator);
|
||||
|
||||
// Create a tracked pin
|
||||
const p = try t.screen.pages.trackPin(t.screen.cursor.page_pin.*);
|
||||
t.fullReset();
|
||||
try testing.expect(t.screen.pages.pinIsValid(p.*));
|
||||
}
|
||||
|
||||
// https://github.com/mitchellh/ghostty/issues/272
|
||||
// This is also tested in depth in screen resize tests but I want to keep
|
||||
// this test around to ensure we don't regress at multiple layers.
|
||||
|
@ -690,7 +690,7 @@ pub const ImageStorage = struct {
|
||||
br.x = @min(
|
||||
// We need to sub one here because the x value is
|
||||
// one width already. So if the image is width "1"
|
||||
// then we add zero to X because X itelf is width 1.
|
||||
// then we add zero to X because X itself is width 1.
|
||||
pin.x + (grid_size.cols - 1),
|
||||
t.cols - 1,
|
||||
);
|
||||
|
@ -18,6 +18,7 @@ pub const kitty = @import("kitty.zig");
|
||||
pub const modes = @import("modes.zig");
|
||||
pub const page = @import("page.zig");
|
||||
pub const parse_table = @import("parse_table.zig");
|
||||
pub const search = @import("search.zig");
|
||||
pub const size = @import("size.zig");
|
||||
pub const tmux = @import("tmux.zig");
|
||||
pub const x11_color = @import("x11_color.zig");
|
||||
|
@ -1481,6 +1481,179 @@ pub const Page = struct {
|
||||
return self.grapheme_map.map(self.memory).capacity();
|
||||
}
|
||||
|
||||
/// Options for encoding the page as UTF-8.
|
||||
pub const EncodeUtf8Options = struct {
|
||||
/// The range of rows to encode. If end_y is null, then it will
|
||||
/// encode to the end of the page.
|
||||
start_y: size.CellCountInt = 0,
|
||||
end_y: ?size.CellCountInt = null,
|
||||
|
||||
/// If true, this will unwrap soft-wrapped lines. If false, this will
|
||||
/// dump the screen as it is visually seen in a rendered window.
|
||||
unwrap: bool = true,
|
||||
|
||||
/// Preceding state from encoding the prior page. Used to preserve
|
||||
/// blanks properly across multiple pages.
|
||||
preceding: TrailingUtf8State = .{},
|
||||
|
||||
/// If non-null, this will be cleared and filled with the x/y
|
||||
/// coordinates of each byte in the UTF-8 encoded output.
|
||||
/// The index in the array is the byte offset in the output
|
||||
/// where 0 is the cursor of the writer when the function is
|
||||
/// called.
|
||||
cell_map: ?*CellMap = null,
|
||||
|
||||
/// Trailing state for UTF-8 encoding.
|
||||
pub const TrailingUtf8State = struct {
|
||||
rows: usize = 0,
|
||||
cells: usize = 0,
|
||||
};
|
||||
};
|
||||
|
||||
/// See cell_map
|
||||
pub const CellMap = std.ArrayList(CellMapEntry);
|
||||
|
||||
/// The x/y coordinate of a single cell in the cell map.
|
||||
pub const CellMapEntry = struct {
|
||||
y: size.CellCountInt,
|
||||
x: size.CellCountInt,
|
||||
};
|
||||
|
||||
/// Encode the page contents as UTF-8.
|
||||
///
|
||||
/// If preceding is non-null, then it will be used to initialize our
|
||||
/// blank rows/cells count so that we can accumulate blanks across
|
||||
/// multiple pages.
|
||||
///
|
||||
/// Note: Many tests for this function are done via Screen.dumpString
|
||||
/// tests since that function is a thin wrapper around this one and
|
||||
/// it makes it easier to test input contents.
|
||||
pub fn encodeUtf8(
|
||||
self: *const Page,
|
||||
writer: anytype,
|
||||
opts: EncodeUtf8Options,
|
||||
) anyerror!EncodeUtf8Options.TrailingUtf8State {
|
||||
var blank_rows: usize = opts.preceding.rows;
|
||||
var blank_cells: usize = opts.preceding.cells;
|
||||
|
||||
const start_y: size.CellCountInt = opts.start_y;
|
||||
const end_y: size.CellCountInt = opts.end_y orelse self.size.rows;
|
||||
|
||||
// We can probably avoid this by doing the logic below in a different
|
||||
// way. The reason this exists is so that when we end a non-blank
|
||||
// line with a newline, we can correctly map the cell map over to
|
||||
// the correct x value.
|
||||
//
|
||||
// For example "A\nB". The cell map for "\n" should be (1, 0).
|
||||
// This is tested in Screen.zig so feel free to refactor this.
|
||||
var last_x: size.CellCountInt = 0;
|
||||
|
||||
for (start_y..end_y) |y_usize| {
|
||||
const y: size.CellCountInt = @intCast(y_usize);
|
||||
const row: *Row = self.getRow(y);
|
||||
const cells: []const Cell = self.getCells(row);
|
||||
|
||||
// If this row is blank, accumulate to avoid a bunch of extra
|
||||
// work later. If it isn't blank, make sure we dump all our
|
||||
// blanks.
|
||||
if (!Cell.hasTextAny(cells)) {
|
||||
blank_rows += 1;
|
||||
continue;
|
||||
}
|
||||
for (1..blank_rows + 1) |i| {
|
||||
try writer.writeByte('\n');
|
||||
|
||||
// This is tested in Screen.zig, i.e. one test is
|
||||
// "cell map with newlines"
|
||||
if (opts.cell_map) |cell_map| {
|
||||
try cell_map.append(.{
|
||||
.x = last_x,
|
||||
.y = @intCast(y - blank_rows + i - 1),
|
||||
});
|
||||
last_x = 0;
|
||||
}
|
||||
}
|
||||
blank_rows = 0;
|
||||
|
||||
// If we're not wrapped, we always add a newline so after
|
||||
// the row is printed we can add a newline.
|
||||
if (!row.wrap or !opts.unwrap) blank_rows += 1;
|
||||
|
||||
// If the row doesn't continue a wrap then we need to reset
|
||||
// our blank cell count.
|
||||
if (!row.wrap_continuation or !opts.unwrap) blank_cells = 0;
|
||||
|
||||
// Go through each cell and print it
|
||||
for (cells, 0..) |*cell, x_usize| {
|
||||
const x: size.CellCountInt = @intCast(x_usize);
|
||||
|
||||
// Skip spacers
|
||||
switch (cell.wide) {
|
||||
.narrow, .wide => {},
|
||||
.spacer_head, .spacer_tail => continue,
|
||||
}
|
||||
|
||||
// If we have a zero value, then we accumulate a counter. We
|
||||
// only want to turn zero values into spaces if we have a non-zero
|
||||
// char sometime later.
|
||||
if (!cell.hasText()) {
|
||||
blank_cells += 1;
|
||||
continue;
|
||||
}
|
||||
if (blank_cells > 0) {
|
||||
try writer.writeByteNTimes(' ', blank_cells);
|
||||
if (opts.cell_map) |cell_map| {
|
||||
for (0..blank_cells) |i| try cell_map.append(.{
|
||||
.x = @intCast(x - blank_cells + i),
|
||||
.y = y,
|
||||
});
|
||||
}
|
||||
|
||||
blank_cells = 0;
|
||||
}
|
||||
|
||||
switch (cell.content_tag) {
|
||||
.codepoint => {
|
||||
try writer.print("{u}", .{cell.content.codepoint});
|
||||
if (opts.cell_map) |cell_map| {
|
||||
last_x = x + 1;
|
||||
try cell_map.append(.{
|
||||
.x = x,
|
||||
.y = y,
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
.codepoint_grapheme => {
|
||||
try writer.print("{u}", .{cell.content.codepoint});
|
||||
if (opts.cell_map) |cell_map| {
|
||||
last_x = x + 1;
|
||||
try cell_map.append(.{
|
||||
.x = x,
|
||||
.y = y,
|
||||
});
|
||||
}
|
||||
|
||||
for (self.lookupGrapheme(cell).?) |cp| {
|
||||
try writer.print("{u}", .{cp});
|
||||
if (opts.cell_map) |cell_map| try cell_map.append(.{
|
||||
.x = x,
|
||||
.y = y,
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
// Unreachable since we do hasText() above
|
||||
.bg_color_palette,
|
||||
.bg_color_rgb,
|
||||
=> unreachable,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return .{ .rows = blank_rows, .cells = blank_cells };
|
||||
}
|
||||
|
||||
/// Returns the bitset for the dirty bits on this page.
|
||||
///
|
||||
/// The returned value is a DynamicBitSetUnmanaged but it is NOT
|
||||
|
864
src/terminal/search.zig
Normal file
@ -0,0 +1,864 @@
|
||||
//! Search functionality for the terminal.
|
||||
//!
|
||||
//! At the time of writing this comment, this is a **work in progress**.
|
||||
//!
|
||||
//! Search at the time of writing is implemented using a simple
|
||||
//! boyer-moore-horspool algorithm. The suboptimal part of the implementation
|
||||
//! is that we need to encode each terminal page into a text buffer in order
|
||||
//! to apply BMH to it. This is because the terminal page is not laid out
|
||||
//! in a flat text form.
|
||||
//!
|
||||
//! To minimize memory usage, we use a sliding window to search for the
|
||||
//! needle. The sliding window only keeps the minimum amount of page data
|
||||
//! in memory to search for a needle (i.e. `needle.len - 1` bytes of overlap
|
||||
//! between terminal pages).
|
||||
//!
|
||||
//! Future work:
|
||||
//!
|
||||
//! - PageListSearch on a PageList concurrently with another thread
|
||||
//! - Handle pruned pages in a PageList to ensure we don't keep references
|
||||
//! - Repeat search a changing active area of the screen
|
||||
//! - Reverse search so that more recent matches are found first
|
||||
//!
|
||||
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const assert = std.debug.assert;
|
||||
const CircBuf = @import("../datastruct/main.zig").CircBuf;
|
||||
const terminal = @import("main.zig");
|
||||
const point = terminal.point;
|
||||
const Page = terminal.Page;
|
||||
const PageList = terminal.PageList;
|
||||
const Pin = PageList.Pin;
|
||||
const Selection = terminal.Selection;
|
||||
const Screen = terminal.Screen;
|
||||
|
||||
/// Searches for a term in a PageList structure.
|
||||
///
|
||||
/// At the time of writing, this does not support searching a pagelist
|
||||
/// simultaneously as its being used by another thread. This will be resolved
|
||||
/// in the future.
|
||||
pub const PageListSearch = struct {
|
||||
/// The list we're searching.
|
||||
list: *PageList,
|
||||
|
||||
/// The sliding window of page contents and nodes to search.
|
||||
window: SlidingWindow,
|
||||
|
||||
/// Initialize the page list search.
|
||||
///
|
||||
/// The needle is not copied and must be kept alive for the duration
|
||||
/// of the search operation.
|
||||
pub fn init(
|
||||
alloc: Allocator,
|
||||
list: *PageList,
|
||||
needle: []const u8,
|
||||
) Allocator.Error!PageListSearch {
|
||||
var window = try SlidingWindow.init(alloc, needle);
|
||||
errdefer window.deinit(alloc);
|
||||
|
||||
return .{
|
||||
.list = list,
|
||||
.window = window,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *PageListSearch, alloc: Allocator) void {
|
||||
self.window.deinit(alloc);
|
||||
}
|
||||
|
||||
/// Find the next match for the needle in the pagelist. This returns
|
||||
/// null when there are no more matches.
|
||||
pub fn next(
|
||||
self: *PageListSearch,
|
||||
alloc: Allocator,
|
||||
) Allocator.Error!?Selection {
|
||||
// Try to search for the needle in the window. If we find a match
|
||||
// then we can return that and we're done.
|
||||
if (self.window.next()) |sel| return sel;
|
||||
|
||||
// Get our next node. If we have a value in our window then we
|
||||
// can determine the next node. If we don't, we've never setup the
|
||||
// window so we use our first node.
|
||||
var node_: ?*PageList.List.Node = if (self.window.meta.last()) |meta|
|
||||
meta.node.next
|
||||
else
|
||||
self.list.pages.first;
|
||||
|
||||
// Add one pagelist node at a time, look for matches, and repeat
|
||||
// until we find a match or we reach the end of the pagelist.
|
||||
// This append then next pattern limits memory usage of the window.
|
||||
while (node_) |node| : (node_ = node.next) {
|
||||
try self.window.append(alloc, node);
|
||||
if (self.window.next()) |sel| return sel;
|
||||
}
|
||||
|
||||
// We've reached the end of the pagelist, no matches.
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
/// Searches page nodes via a sliding window. The sliding window maintains
|
||||
/// the invariant that data isn't pruned until (1) we've searched it and
|
||||
/// (2) we've accounted for overlaps across pages to fit the needle.
|
||||
///
|
||||
/// The sliding window is first initialized empty. Pages are then appended
|
||||
/// in the order to search them. If you're doing a reverse search then the
|
||||
/// pages should be appended in reverse order and the needle should be
|
||||
/// reversed.
|
||||
///
|
||||
/// All appends grow the window. The window is only pruned when a searc
|
||||
/// is done (positive or negative match) via `next()`.
|
||||
///
|
||||
/// To avoid unnecessary memory growth, the recommended usage is to
|
||||
/// call `next()` until it returns null and then `append` the next page
|
||||
/// and repeat the process. This will always maintain the minimum
|
||||
/// required memory to search for the needle.
|
||||
const SlidingWindow = struct {
|
||||
/// The data buffer is a circular buffer of u8 that contains the
|
||||
/// encoded page text that we can use to search for the needle.
|
||||
data: DataBuf,
|
||||
|
||||
/// The meta buffer is a circular buffer that contains the metadata
|
||||
/// about the pages we're searching. This usually isn't that large
|
||||
/// so callers must iterate through it to find the offset to map
|
||||
/// data to meta.
|
||||
meta: MetaBuf,
|
||||
|
||||
/// Offset into data for our current state. This handles the
|
||||
/// situation where our search moved through meta[0] but didn't
|
||||
/// do enough to prune it.
|
||||
data_offset: usize = 0,
|
||||
|
||||
/// The needle we're searching for. Does not own the memory.
|
||||
needle: []const u8,
|
||||
|
||||
/// A buffer to store the overlap search data. This is used to search
|
||||
/// overlaps between pages where the match starts on one page and
|
||||
/// ends on another. The length is always `needle.len * 2`.
|
||||
overlap_buf: []u8,
|
||||
|
||||
const DataBuf = CircBuf(u8, 0);
|
||||
const MetaBuf = CircBuf(Meta, undefined);
|
||||
const Meta = struct {
|
||||
node: *PageList.List.Node,
|
||||
cell_map: Page.CellMap,
|
||||
|
||||
pub fn deinit(self: *Meta) void {
|
||||
self.cell_map.deinit();
|
||||
}
|
||||
};
|
||||
|
||||
pub fn init(
|
||||
alloc: Allocator,
|
||||
needle: []const u8,
|
||||
) Allocator.Error!SlidingWindow {
|
||||
var data = try DataBuf.init(alloc, 0);
|
||||
errdefer data.deinit(alloc);
|
||||
|
||||
var meta = try MetaBuf.init(alloc, 0);
|
||||
errdefer meta.deinit(alloc);
|
||||
|
||||
const overlap_buf = try alloc.alloc(u8, needle.len * 2);
|
||||
errdefer alloc.free(overlap_buf);
|
||||
|
||||
return .{
|
||||
.data = data,
|
||||
.meta = meta,
|
||||
.needle = needle,
|
||||
.overlap_buf = overlap_buf,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *SlidingWindow, alloc: Allocator) void {
|
||||
alloc.free(self.overlap_buf);
|
||||
self.data.deinit(alloc);
|
||||
|
||||
var meta_it = self.meta.iterator(.forward);
|
||||
while (meta_it.next()) |meta| meta.deinit();
|
||||
self.meta.deinit(alloc);
|
||||
}
|
||||
|
||||
/// Clear all data but retain allocated capacity.
|
||||
pub fn clearAndRetainCapacity(self: *SlidingWindow) void {
|
||||
var meta_it = self.meta.iterator(.forward);
|
||||
while (meta_it.next()) |meta| meta.deinit();
|
||||
self.meta.clear();
|
||||
self.data.clear();
|
||||
self.data_offset = 0;
|
||||
}
|
||||
|
||||
/// Search the window for the next occurrence of the needle. As
|
||||
/// the window moves, the window will prune itself while maintaining
|
||||
/// the invariant that the window is always big enough to contain
|
||||
/// the needle.
|
||||
pub fn next(self: *SlidingWindow) ?Selection {
|
||||
const slices = slices: {
|
||||
// If we have less data then the needle then we can't possibly match
|
||||
const data_len = self.data.len();
|
||||
if (data_len < self.needle.len) return null;
|
||||
|
||||
break :slices self.data.getPtrSlice(
|
||||
self.data_offset,
|
||||
data_len - self.data_offset,
|
||||
);
|
||||
};
|
||||
|
||||
// Search the first slice for the needle.
|
||||
if (std.mem.indexOf(u8, slices[0], self.needle)) |idx| {
|
||||
return self.selection(idx, self.needle.len);
|
||||
}
|
||||
|
||||
// Search the overlap buffer for the needle.
|
||||
if (slices[0].len > 0 and slices[1].len > 0) overlap: {
|
||||
// Get up to needle.len - 1 bytes from each side (as much as
|
||||
// we can) and store it in the overlap buffer.
|
||||
const prefix: []const u8 = prefix: {
|
||||
const len = @min(slices[0].len, self.needle.len - 1);
|
||||
const idx = slices[0].len - len;
|
||||
break :prefix slices[0][idx..];
|
||||
};
|
||||
const suffix: []const u8 = suffix: {
|
||||
const len = @min(slices[1].len, self.needle.len - 1);
|
||||
break :suffix slices[1][0..len];
|
||||
};
|
||||
const overlap_len = prefix.len + suffix.len;
|
||||
assert(overlap_len <= self.overlap_buf.len);
|
||||
@memcpy(self.overlap_buf[0..prefix.len], prefix);
|
||||
@memcpy(self.overlap_buf[prefix.len..overlap_len], suffix);
|
||||
|
||||
// Search the overlap
|
||||
const idx = std.mem.indexOf(
|
||||
u8,
|
||||
self.overlap_buf[0..overlap_len],
|
||||
self.needle,
|
||||
) orelse break :overlap;
|
||||
|
||||
// We found a match in the overlap buffer. We need to map the
|
||||
// index back to the data buffer in order to get our selection.
|
||||
return self.selection(
|
||||
slices[0].len - prefix.len + idx,
|
||||
self.needle.len,
|
||||
);
|
||||
}
|
||||
|
||||
// Search the last slice for the needle.
|
||||
if (std.mem.indexOf(u8, slices[1], self.needle)) |idx| {
|
||||
return self.selection(slices[0].len + idx, self.needle.len);
|
||||
}
|
||||
|
||||
// No match. We keep `needle.len - 1` bytes available to
|
||||
// handle the future overlap case.
|
||||
var meta_it = self.meta.iterator(.reverse);
|
||||
prune: {
|
||||
var saved: usize = 0;
|
||||
while (meta_it.next()) |meta| {
|
||||
const needed = self.needle.len - 1 - saved;
|
||||
if (meta.cell_map.items.len >= needed) {
|
||||
// We save up to this meta. We set our data offset
|
||||
// to exactly where it needs to be to continue
|
||||
// searching.
|
||||
self.data_offset = meta.cell_map.items.len - needed;
|
||||
break;
|
||||
}
|
||||
|
||||
saved += meta.cell_map.items.len;
|
||||
} else {
|
||||
// If we exited the while loop naturally then we
|
||||
// never got the amount we needed and so there is
|
||||
// nothing to prune.
|
||||
assert(saved < self.needle.len - 1);
|
||||
break :prune;
|
||||
}
|
||||
|
||||
const prune_count = self.meta.len() - meta_it.idx;
|
||||
if (prune_count == 0) {
|
||||
// This can happen if we need to save up to the first
|
||||
// meta value to retain our window.
|
||||
break :prune;
|
||||
}
|
||||
|
||||
// We can now delete all the metas up to but NOT including
|
||||
// the meta we found through meta_it.
|
||||
meta_it = self.meta.iterator(.forward);
|
||||
var prune_data_len: usize = 0;
|
||||
for (0..prune_count) |_| {
|
||||
const meta = meta_it.next().?;
|
||||
prune_data_len += meta.cell_map.items.len;
|
||||
meta.deinit();
|
||||
}
|
||||
self.meta.deleteOldest(prune_count);
|
||||
self.data.deleteOldest(prune_data_len);
|
||||
}
|
||||
|
||||
// Our data offset now moves to needle.len - 1 from the end so
|
||||
// that we can handle the overlap case.
|
||||
self.data_offset = self.data.len() - self.needle.len + 1;
|
||||
|
||||
self.assertIntegrity();
|
||||
return null;
|
||||
}
|
||||
|
||||
/// Return a selection for the given start and length into the data
|
||||
/// buffer and also prune the data/meta buffers if possible up to
|
||||
/// this start index.
|
||||
///
|
||||
/// The start index is assumed to be relative to the offset. i.e.
|
||||
/// index zero is actually at `self.data[self.data_offset]`. The
|
||||
/// selection will account for the offset.
|
||||
fn selection(
|
||||
self: *SlidingWindow,
|
||||
start_offset: usize,
|
||||
len: usize,
|
||||
) Selection {
|
||||
const start = start_offset + self.data_offset;
|
||||
assert(start < self.data.len());
|
||||
assert(start + len <= self.data.len());
|
||||
|
||||
// meta_consumed is the number of bytes we've consumed in the
|
||||
// data buffer up to and NOT including the meta where we've
|
||||
// found our pin. This is important because it tells us the
|
||||
// amount of data we can safely deleted from self.data since
|
||||
// we can't partially delete a meta block's data. (The partial
|
||||
// amount is represented by self.data_offset).
|
||||
var meta_it = self.meta.iterator(.forward);
|
||||
var meta_consumed: usize = 0;
|
||||
const tl: Pin = pin(&meta_it, &meta_consumed, start);
|
||||
|
||||
// Store the information required to prune later. We store this
|
||||
// now because we only want to prune up to our START so we can
|
||||
// find overlapping matches.
|
||||
const tl_meta_idx = meta_it.idx - 1;
|
||||
const tl_meta_consumed = meta_consumed;
|
||||
|
||||
// We have to seek back so that we reinspect our current
|
||||
// iterator value again in case the start and end are in the
|
||||
// same segment.
|
||||
meta_it.seekBy(-1);
|
||||
const br: Pin = pin(&meta_it, &meta_consumed, start + len - 1);
|
||||
assert(meta_it.idx >= 1);
|
||||
|
||||
// Our offset into the current meta block is the start index
|
||||
// minus the amount of data fully consumed. We then add one
|
||||
// to move one past the match so we don't repeat it.
|
||||
self.data_offset = start - tl_meta_consumed + 1;
|
||||
|
||||
// meta_it.idx is br's meta index plus one (because the iterator
|
||||
// moves one past the end; we call next() one last time). So
|
||||
// we compare against one to check that the meta that we matched
|
||||
// in has prior meta blocks we can prune.
|
||||
if (tl_meta_idx > 0) {
|
||||
// Deinit all our memory in the meta blocks prior to our
|
||||
// match.
|
||||
const meta_count = tl_meta_idx;
|
||||
meta_it.reset();
|
||||
for (0..meta_count) |_| meta_it.next().?.deinit();
|
||||
if (comptime std.debug.runtime_safety) {
|
||||
assert(meta_it.idx == meta_count);
|
||||
assert(meta_it.next().?.node == tl.node);
|
||||
}
|
||||
self.meta.deleteOldest(meta_count);
|
||||
|
||||
// Delete all the data up to our current index.
|
||||
assert(tl_meta_consumed > 0);
|
||||
self.data.deleteOldest(tl_meta_consumed);
|
||||
}
|
||||
|
||||
self.assertIntegrity();
|
||||
return Selection.init(tl, br, false);
|
||||
}
|
||||
|
||||
/// Convert a data index into a pin.
|
||||
///
|
||||
/// The iterator and offset are both expected to be passed by
|
||||
/// pointer so that the pin can be efficiently called for multiple
|
||||
/// indexes (in order). See selection() for an example.
|
||||
///
|
||||
/// Precondition: the index must be within the data buffer.
|
||||
fn pin(
|
||||
it: *MetaBuf.Iterator,
|
||||
offset: *usize,
|
||||
idx: usize,
|
||||
) Pin {
|
||||
while (it.next()) |meta| {
|
||||
// meta_i is the index we expect to find the match in the
|
||||
// cell map within this meta if it contains it.
|
||||
const meta_i = idx - offset.*;
|
||||
if (meta_i >= meta.cell_map.items.len) {
|
||||
// This meta doesn't contain the match. This means we
|
||||
// can also prune this set of data because we only look
|
||||
// forward.
|
||||
offset.* += meta.cell_map.items.len;
|
||||
continue;
|
||||
}
|
||||
|
||||
// We found the meta that contains the start of the match.
|
||||
const map = meta.cell_map.items[meta_i];
|
||||
return .{
|
||||
.node = meta.node,
|
||||
.y = map.y,
|
||||
.x = map.x,
|
||||
};
|
||||
}
|
||||
|
||||
// Unreachable because it is a precondition that the index is
|
||||
// within the data buffer.
|
||||
unreachable;
|
||||
}
|
||||
|
||||
/// Add a new node to the sliding window. This will always grow
|
||||
/// the sliding window; data isn't pruned until it is consumed
|
||||
/// via a search (via next()).
|
||||
pub fn append(
|
||||
self: *SlidingWindow,
|
||||
alloc: Allocator,
|
||||
node: *PageList.List.Node,
|
||||
) Allocator.Error!void {
|
||||
// Initialize our metadata for the node.
|
||||
var meta: Meta = .{
|
||||
.node = node,
|
||||
.cell_map = Page.CellMap.init(alloc),
|
||||
};
|
||||
errdefer meta.deinit();
|
||||
|
||||
// This is suboptimal but we need to encode the page once to
|
||||
// temporary memory, and then copy it into our circular buffer.
|
||||
// In the future, we should benchmark and see if we can encode
|
||||
// directly into the circular buffer.
|
||||
var encoded: std.ArrayListUnmanaged(u8) = .{};
|
||||
defer encoded.deinit(alloc);
|
||||
|
||||
// Encode the page into the buffer.
|
||||
const page: *const Page = &meta.node.data;
|
||||
_ = page.encodeUtf8(
|
||||
encoded.writer(alloc),
|
||||
.{ .cell_map = &meta.cell_map },
|
||||
) catch {
|
||||
// writer uses anyerror but the only realistic error on
|
||||
// an ArrayList is out of memory.
|
||||
return error.OutOfMemory;
|
||||
};
|
||||
assert(meta.cell_map.items.len == encoded.items.len);
|
||||
|
||||
// Ensure our buffers are big enough to store what we need.
|
||||
try self.data.ensureUnusedCapacity(alloc, encoded.items.len);
|
||||
try self.meta.ensureUnusedCapacity(alloc, 1);
|
||||
|
||||
// Append our new node to the circular buffer.
|
||||
try self.data.appendSlice(encoded.items);
|
||||
try self.meta.append(meta);
|
||||
|
||||
self.assertIntegrity();
|
||||
}
|
||||
|
||||
fn assertIntegrity(self: *const SlidingWindow) void {
|
||||
if (comptime !std.debug.runtime_safety) return;
|
||||
|
||||
// Integrity check: verify our data matches our metadata exactly.
|
||||
var meta_it = self.meta.iterator(.forward);
|
||||
var data_len: usize = 0;
|
||||
while (meta_it.next()) |m| data_len += m.cell_map.items.len;
|
||||
assert(data_len == self.data.len());
|
||||
|
||||
// Integrity check: verify our data offset is within bounds.
|
||||
assert(self.data_offset < self.data.len());
|
||||
}
|
||||
};
|
||||
|
||||
test "PageListSearch single page" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
var s = try Screen.init(alloc, 80, 24, 0);
|
||||
defer s.deinit();
|
||||
try s.testWriteString("hello. boo! hello. boo!");
|
||||
try testing.expect(s.pages.pages.first == s.pages.pages.last);
|
||||
|
||||
var search = try PageListSearch.init(alloc, &s.pages, "boo!");
|
||||
defer search.deinit(alloc);
|
||||
|
||||
// We should be able to find two matches.
|
||||
{
|
||||
const sel = (try search.next(alloc)).?;
|
||||
try testing.expectEqual(point.Point{ .active = .{
|
||||
.x = 7,
|
||||
.y = 0,
|
||||
} }, s.pages.pointFromPin(.active, sel.start()).?);
|
||||
try testing.expectEqual(point.Point{ .active = .{
|
||||
.x = 10,
|
||||
.y = 0,
|
||||
} }, s.pages.pointFromPin(.active, sel.end()).?);
|
||||
}
|
||||
{
|
||||
const sel = (try search.next(alloc)).?;
|
||||
try testing.expectEqual(point.Point{ .active = .{
|
||||
.x = 19,
|
||||
.y = 0,
|
||||
} }, s.pages.pointFromPin(.active, sel.start()).?);
|
||||
try testing.expectEqual(point.Point{ .active = .{
|
||||
.x = 22,
|
||||
.y = 0,
|
||||
} }, s.pages.pointFromPin(.active, sel.end()).?);
|
||||
}
|
||||
try testing.expect((try search.next(alloc)) == null);
|
||||
try testing.expect((try search.next(alloc)) == null);
|
||||
}
|
||||
|
||||
test "SlidingWindow empty on init" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
var w = try SlidingWindow.init(alloc, "boo!");
|
||||
defer w.deinit(alloc);
|
||||
try testing.expectEqual(0, w.data.len());
|
||||
try testing.expectEqual(0, w.meta.len());
|
||||
}
|
||||
|
||||
test "SlidingWindow single append" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
var w = try SlidingWindow.init(alloc, "boo!");
|
||||
defer w.deinit(alloc);
|
||||
|
||||
var s = try Screen.init(alloc, 80, 24, 0);
|
||||
defer s.deinit();
|
||||
try s.testWriteString("hello. boo! hello. boo!");
|
||||
|
||||
// We want to test single-page cases.
|
||||
try testing.expect(s.pages.pages.first == s.pages.pages.last);
|
||||
const node: *PageList.List.Node = s.pages.pages.first.?;
|
||||
try w.append(alloc, node);
|
||||
|
||||
// We should be able to find two matches.
|
||||
{
|
||||
const sel = w.next().?;
|
||||
try testing.expectEqual(point.Point{ .active = .{
|
||||
.x = 7,
|
||||
.y = 0,
|
||||
} }, s.pages.pointFromPin(.active, sel.start()).?);
|
||||
try testing.expectEqual(point.Point{ .active = .{
|
||||
.x = 10,
|
||||
.y = 0,
|
||||
} }, s.pages.pointFromPin(.active, sel.end()).?);
|
||||
}
|
||||
{
|
||||
const sel = w.next().?;
|
||||
try testing.expectEqual(point.Point{ .active = .{
|
||||
.x = 19,
|
||||
.y = 0,
|
||||
} }, s.pages.pointFromPin(.active, sel.start()).?);
|
||||
try testing.expectEqual(point.Point{ .active = .{
|
||||
.x = 22,
|
||||
.y = 0,
|
||||
} }, s.pages.pointFromPin(.active, sel.end()).?);
|
||||
}
|
||||
try testing.expect(w.next() == null);
|
||||
try testing.expect(w.next() == null);
|
||||
}
|
||||
|
||||
test "SlidingWindow single append no match" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
var w = try SlidingWindow.init(alloc, "nope!");
|
||||
defer w.deinit(alloc);
|
||||
|
||||
var s = try Screen.init(alloc, 80, 24, 0);
|
||||
defer s.deinit();
|
||||
try s.testWriteString("hello. boo! hello. boo!");
|
||||
|
||||
// We want to test single-page cases.
|
||||
try testing.expect(s.pages.pages.first == s.pages.pages.last);
|
||||
const node: *PageList.List.Node = s.pages.pages.first.?;
|
||||
try w.append(alloc, node);
|
||||
|
||||
// No matches
|
||||
try testing.expect(w.next() == null);
|
||||
try testing.expect(w.next() == null);
|
||||
|
||||
// Should still keep the page
|
||||
try testing.expectEqual(1, w.meta.len());
|
||||
}
|
||||
|
||||
test "SlidingWindow two pages" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
var w = try SlidingWindow.init(alloc, "boo!");
|
||||
defer w.deinit(alloc);
|
||||
|
||||
var s = try Screen.init(alloc, 80, 24, 1000);
|
||||
defer s.deinit();
|
||||
|
||||
// Fill up the first page. The final bytes in the first page
|
||||
// are "boo!"
|
||||
const first_page_rows = s.pages.pages.first.?.data.capacity.rows;
|
||||
for (0..first_page_rows - 1) |_| try s.testWriteString("\n");
|
||||
for (0..s.pages.cols - 4) |_| try s.testWriteString("x");
|
||||
try s.testWriteString("boo!");
|
||||
try testing.expect(s.pages.pages.first == s.pages.pages.last);
|
||||
try s.testWriteString("\n");
|
||||
try testing.expect(s.pages.pages.first != s.pages.pages.last);
|
||||
try s.testWriteString("hello. boo!");
|
||||
|
||||
// Add both pages
|
||||
const node: *PageList.List.Node = s.pages.pages.first.?;
|
||||
try w.append(alloc, node);
|
||||
try w.append(alloc, node.next.?);
|
||||
|
||||
// Search should find two matches
|
||||
{
|
||||
const sel = w.next().?;
|
||||
try testing.expectEqual(point.Point{ .active = .{
|
||||
.x = 76,
|
||||
.y = 22,
|
||||
} }, s.pages.pointFromPin(.active, sel.start()).?);
|
||||
try testing.expectEqual(point.Point{ .active = .{
|
||||
.x = 79,
|
||||
.y = 22,
|
||||
} }, s.pages.pointFromPin(.active, sel.end()).?);
|
||||
}
|
||||
{
|
||||
const sel = w.next().?;
|
||||
try testing.expectEqual(point.Point{ .active = .{
|
||||
.x = 7,
|
||||
.y = 23,
|
||||
} }, s.pages.pointFromPin(.active, sel.start()).?);
|
||||
try testing.expectEqual(point.Point{ .active = .{
|
||||
.x = 10,
|
||||
.y = 23,
|
||||
} }, s.pages.pointFromPin(.active, sel.end()).?);
|
||||
}
|
||||
try testing.expect(w.next() == null);
|
||||
try testing.expect(w.next() == null);
|
||||
}
|
||||
|
||||
test "SlidingWindow two pages match across boundary" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
var w = try SlidingWindow.init(alloc, "hello, world");
|
||||
defer w.deinit(alloc);
|
||||
|
||||
var s = try Screen.init(alloc, 80, 24, 1000);
|
||||
defer s.deinit();
|
||||
|
||||
// Fill up the first page. The final bytes in the first page
|
||||
// are "boo!"
|
||||
const first_page_rows = s.pages.pages.first.?.data.capacity.rows;
|
||||
for (0..first_page_rows - 1) |_| try s.testWriteString("\n");
|
||||
for (0..s.pages.cols - 4) |_| try s.testWriteString("x");
|
||||
try s.testWriteString("hell");
|
||||
try testing.expect(s.pages.pages.first == s.pages.pages.last);
|
||||
try s.testWriteString("o, world!");
|
||||
try testing.expect(s.pages.pages.first != s.pages.pages.last);
|
||||
|
||||
// Add both pages
|
||||
const node: *PageList.List.Node = s.pages.pages.first.?;
|
||||
try w.append(alloc, node);
|
||||
try w.append(alloc, node.next.?);
|
||||
|
||||
// Search should find a match
|
||||
{
|
||||
const sel = w.next().?;
|
||||
try testing.expectEqual(point.Point{ .active = .{
|
||||
.x = 76,
|
||||
.y = 22,
|
||||
} }, s.pages.pointFromPin(.active, sel.start()).?);
|
||||
try testing.expectEqual(point.Point{ .active = .{
|
||||
.x = 7,
|
||||
.y = 23,
|
||||
} }, s.pages.pointFromPin(.active, sel.end()).?);
|
||||
}
|
||||
try testing.expect(w.next() == null);
|
||||
try testing.expect(w.next() == null);
|
||||
|
||||
// We shouldn't prune because we don't have enough space
|
||||
try testing.expectEqual(2, w.meta.len());
|
||||
}
|
||||
|
||||
test "SlidingWindow two pages no match prunes first page" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
var w = try SlidingWindow.init(alloc, "nope!");
|
||||
defer w.deinit(alloc);
|
||||
|
||||
var s = try Screen.init(alloc, 80, 24, 1000);
|
||||
defer s.deinit();
|
||||
|
||||
// Fill up the first page. The final bytes in the first page
|
||||
// are "boo!"
|
||||
const first_page_rows = s.pages.pages.first.?.data.capacity.rows;
|
||||
for (0..first_page_rows - 1) |_| try s.testWriteString("\n");
|
||||
for (0..s.pages.cols - 4) |_| try s.testWriteString("x");
|
||||
try s.testWriteString("boo!");
|
||||
try testing.expect(s.pages.pages.first == s.pages.pages.last);
|
||||
try s.testWriteString("\n");
|
||||
try testing.expect(s.pages.pages.first != s.pages.pages.last);
|
||||
try s.testWriteString("hello. boo!");
|
||||
|
||||
// Add both pages
|
||||
const node: *PageList.List.Node = s.pages.pages.first.?;
|
||||
try w.append(alloc, node);
|
||||
try w.append(alloc, node.next.?);
|
||||
|
||||
// Search should find nothing
|
||||
try testing.expect(w.next() == null);
|
||||
try testing.expect(w.next() == null);
|
||||
|
||||
// We should've pruned our page because the second page
|
||||
// has enough text to contain our needle.
|
||||
try testing.expectEqual(1, w.meta.len());
|
||||
}
|
||||
|
||||
test "SlidingWindow two pages no match keeps both pages" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
var s = try Screen.init(alloc, 80, 24, 1000);
|
||||
defer s.deinit();
|
||||
|
||||
// Fill up the first page. The final bytes in the first page
|
||||
// are "boo!"
|
||||
const first_page_rows = s.pages.pages.first.?.data.capacity.rows;
|
||||
for (0..first_page_rows - 1) |_| try s.testWriteString("\n");
|
||||
for (0..s.pages.cols - 4) |_| try s.testWriteString("x");
|
||||
try s.testWriteString("boo!");
|
||||
try testing.expect(s.pages.pages.first == s.pages.pages.last);
|
||||
try s.testWriteString("\n");
|
||||
try testing.expect(s.pages.pages.first != s.pages.pages.last);
|
||||
try s.testWriteString("hello. boo!");
|
||||
|
||||
// Imaginary needle for search. Doesn't match!
|
||||
var needle_list = std.ArrayList(u8).init(alloc);
|
||||
defer needle_list.deinit();
|
||||
try needle_list.appendNTimes('x', first_page_rows * s.pages.cols);
|
||||
const needle: []const u8 = needle_list.items;
|
||||
|
||||
var w = try SlidingWindow.init(alloc, needle);
|
||||
defer w.deinit(alloc);
|
||||
|
||||
// Add both pages
|
||||
const node: *PageList.List.Node = s.pages.pages.first.?;
|
||||
try w.append(alloc, node);
|
||||
try w.append(alloc, node.next.?);
|
||||
|
||||
// Search should find nothing
|
||||
try testing.expect(w.next() == null);
|
||||
try testing.expect(w.next() == null);
|
||||
|
||||
// No pruning because both pages are needed to fit needle.
|
||||
try testing.expectEqual(2, w.meta.len());
|
||||
}
|
||||
|
||||
test "SlidingWindow single append across circular buffer boundary" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
var w = try SlidingWindow.init(alloc, "abc");
|
||||
defer w.deinit(alloc);
|
||||
|
||||
var s = try Screen.init(alloc, 80, 24, 0);
|
||||
defer s.deinit();
|
||||
try s.testWriteString("XXXXXXXXXXXXXXXXXXXboo!XXXXX");
|
||||
|
||||
// We are trying to break a circular buffer boundary so the way we
|
||||
// do this is to duplicate the data then do a failing search. This
|
||||
// will cause the first page to be pruned. The next time we append we'll
|
||||
// put it in the middle of the circ buffer. We assert this so that if
|
||||
// our implementation changes our test will fail.
|
||||
try testing.expect(s.pages.pages.first == s.pages.pages.last);
|
||||
const node: *PageList.List.Node = s.pages.pages.first.?;
|
||||
try w.append(alloc, node);
|
||||
try w.append(alloc, node);
|
||||
{
|
||||
// No wrap around yet
|
||||
const slices = w.data.getPtrSlice(0, w.data.len());
|
||||
try testing.expect(slices[0].len > 0);
|
||||
try testing.expect(slices[1].len == 0);
|
||||
}
|
||||
|
||||
// Search non-match, prunes page
|
||||
try testing.expect(w.next() == null);
|
||||
try testing.expectEqual(1, w.meta.len());
|
||||
|
||||
// Change the needle, just needs to be the same length (not a real API)
|
||||
w.needle = "boo";
|
||||
|
||||
// Add new page, now wraps
|
||||
try w.append(alloc, node);
|
||||
{
|
||||
const slices = w.data.getPtrSlice(0, w.data.len());
|
||||
try testing.expect(slices[0].len > 0);
|
||||
try testing.expect(slices[1].len > 0);
|
||||
}
|
||||
{
|
||||
const sel = w.next().?;
|
||||
try testing.expectEqual(point.Point{ .active = .{
|
||||
.x = 19,
|
||||
.y = 0,
|
||||
} }, s.pages.pointFromPin(.active, sel.start()).?);
|
||||
try testing.expectEqual(point.Point{ .active = .{
|
||||
.x = 21,
|
||||
.y = 0,
|
||||
} }, s.pages.pointFromPin(.active, sel.end()).?);
|
||||
}
|
||||
try testing.expect(w.next() == null);
|
||||
}
|
||||
|
||||
test "SlidingWindow single append match on boundary" {
|
||||
const testing = std.testing;
|
||||
const alloc = testing.allocator;
|
||||
|
||||
var w = try SlidingWindow.init(alloc, "abcd");
|
||||
defer w.deinit(alloc);
|
||||
|
||||
var s = try Screen.init(alloc, 80, 24, 0);
|
||||
defer s.deinit();
|
||||
try s.testWriteString("o!XXXXXXXXXXXXXXXXXXXbo");
|
||||
|
||||
// We are trying to break a circular buffer boundary so the way we
|
||||
// do this is to duplicate the data then do a failing search. This
|
||||
// will cause the first page to be pruned. The next time we append we'll
|
||||
// put it in the middle of the circ buffer. We assert this so that if
|
||||
// our implementation changes our test will fail.
|
||||
try testing.expect(s.pages.pages.first == s.pages.pages.last);
|
||||
const node: *PageList.List.Node = s.pages.pages.first.?;
|
||||
try w.append(alloc, node);
|
||||
try w.append(alloc, node);
|
||||
{
|
||||
// No wrap around yet
|
||||
const slices = w.data.getPtrSlice(0, w.data.len());
|
||||
try testing.expect(slices[0].len > 0);
|
||||
try testing.expect(slices[1].len == 0);
|
||||
}
|
||||
|
||||
// Search non-match, prunes page
|
||||
try testing.expect(w.next() == null);
|
||||
try testing.expectEqual(1, w.meta.len());
|
||||
|
||||
// Change the needle, just needs to be the same length (not a real API)
|
||||
w.needle = "boo!";
|
||||
|
||||
// Add new page, now wraps
|
||||
try w.append(alloc, node);
|
||||
{
|
||||
const slices = w.data.getPtrSlice(0, w.data.len());
|
||||
try testing.expect(slices[0].len > 0);
|
||||
try testing.expect(slices[1].len > 0);
|
||||
}
|
||||
{
|
||||
const sel = w.next().?;
|
||||
try testing.expectEqual(point.Point{ .active = .{
|
||||
.x = 21,
|
||||
.y = 0,
|
||||
} }, s.pages.pointFromPin(.active, sel.start()).?);
|
||||
try testing.expectEqual(point.Point{ .active = .{
|
||||
.x = 1,
|
||||
.y = 0,
|
||||
} }, s.pages.pointFromPin(.active, sel.end()).?);
|
||||
}
|
||||
try testing.expect(w.next() == null);
|
||||
}
|
@ -478,6 +478,18 @@ pub fn clearScreen(self: *Termio, td: *ThreadData, history: bool) !void {
|
||||
);
|
||||
}
|
||||
|
||||
// Clear all Kitty graphics state for this screen. This copies
|
||||
// Kitty's behavior when Cmd+K deletes all Kitty graphics. I
|
||||
// didn't spend time researching whether it only deletes Kitty
|
||||
// graphics that are placed baove the cursor or if it deletes
|
||||
// all of them. We delete all of them for now but if this behavior
|
||||
// isn't fully correct we should fix this later.
|
||||
self.terminal.screen.kitty_images.delete(
|
||||
self.terminal.screen.alloc,
|
||||
&self.terminal,
|
||||
.{ .all = true },
|
||||
);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
|