mirror of
https://github.com/ghostty-org/ghostty.git
synced 2025-08-02 14:57:31 +03:00
terminal/kitty_graphics: ignore base64 padding
Also move all base64 decoding to inside of the command parser.
This commit is contained in:
@ -3,6 +3,8 @@ const assert = std.debug.assert;
|
|||||||
const Allocator = std.mem.Allocator;
|
const Allocator = std.mem.Allocator;
|
||||||
const ArenaAllocator = std.heap.ArenaAllocator;
|
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||||
|
|
||||||
|
const log = std.log.scoped(.kitty_gfx);
|
||||||
|
|
||||||
/// The key-value pairs for the control information for a command. The
|
/// The key-value pairs for the control information for a command. The
|
||||||
/// keys are always single characters and the values are either single
|
/// keys are always single characters and the values are either single
|
||||||
/// characters or 32-bit unsigned integers.
|
/// characters or 32-bit unsigned integers.
|
||||||
@ -165,12 +167,45 @@ pub const CommandParser = struct {
|
|||||||
return .{
|
return .{
|
||||||
.control = control,
|
.control = control,
|
||||||
.quiet = quiet,
|
.quiet = quiet,
|
||||||
.data = if (self.data.items.len == 0) "" else data: {
|
.data = try self.decodeData(),
|
||||||
break :data try self.data.toOwnedSlice();
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Decodes the payload data from base64 and returns it as a slice.
|
||||||
|
/// This function will destroy the contents of self.data, it should
|
||||||
|
/// only be used once we are done collecting payload bytes.
|
||||||
|
fn decodeData(self: *CommandParser) ![]const u8 {
|
||||||
|
if (self.data.items.len == 0) {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
|
||||||
|
const Base64Decoder = std.base64.standard_no_pad.Decoder;
|
||||||
|
|
||||||
|
// We remove any padding, since it's optional, and decode without it.
|
||||||
|
while (self.data.items[self.data.items.len - 1] == '=') {
|
||||||
|
self.data.items.len -= 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
const size = Base64Decoder.calcSizeForSlice(self.data.items) catch |err| {
|
||||||
|
log.warn("failed to calculate base64 size for payload: {}", .{err});
|
||||||
|
return error.InvalidData;
|
||||||
|
};
|
||||||
|
|
||||||
|
// This is kinda cursed, but we can decode the base64 on top of
|
||||||
|
// itself, since it's guaranteed that the encoded size is larger,
|
||||||
|
// and any bytes in areas that are written to will have already
|
||||||
|
// been used (assuming scalar decoding).
|
||||||
|
Base64Decoder.decode(self.data.items[0..size], self.data.items) catch |err| {
|
||||||
|
log.warn("failed to decode base64 payload data: {}", .{err});
|
||||||
|
return error.InvalidData;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Remove the extra bytes.
|
||||||
|
self.data.items.len = size;
|
||||||
|
|
||||||
|
return try self.data.toOwnedSlice();
|
||||||
|
}
|
||||||
|
|
||||||
fn accumulateValue(self: *CommandParser, c: u8, overflow_state: State) !void {
|
fn accumulateValue(self: *CommandParser, c: u8, overflow_state: State) !void {
|
||||||
const idx = self.kv_temp_len;
|
const idx = self.kv_temp_len;
|
||||||
self.kv_temp_len += 1;
|
self.kv_temp_len += 1;
|
||||||
|
@ -5,6 +5,7 @@ const Allocator = std.mem.Allocator;
|
|||||||
const ArenaAllocator = std.heap.ArenaAllocator;
|
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||||
const posix = std.posix;
|
const posix = std.posix;
|
||||||
|
|
||||||
|
const fastmem = @import("../../fastmem.zig");
|
||||||
const command = @import("graphics_command.zig");
|
const command = @import("graphics_command.zig");
|
||||||
const point = @import("../point.zig");
|
const point = @import("../point.zig");
|
||||||
const PageList = @import("../PageList.zig");
|
const PageList = @import("../PageList.zig");
|
||||||
@ -56,30 +57,16 @@ pub const LoadingImage = struct {
|
|||||||
.display = cmd.display(),
|
.display = cmd.display(),
|
||||||
};
|
};
|
||||||
|
|
||||||
// Special case for the direct medium, we just add it directly
|
// Special case for the direct medium, we just add the chunk directly.
|
||||||
// which will handle copying the data, base64 decoding, etc.
|
|
||||||
if (t.medium == .direct) {
|
if (t.medium == .direct) {
|
||||||
try result.addData(alloc, cmd.data);
|
try result.addData(alloc, cmd.data);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
// For every other medium, we'll need to at least base64 decode
|
// Otherwise, the payload data is guaranteed to be a path.
|
||||||
// the data to make it useful so let's do that. Also, all the data
|
|
||||||
// has to be path data so we can put it in a stack-allocated buffer.
|
|
||||||
var buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
|
|
||||||
const Base64Decoder = std.base64.standard.Decoder;
|
|
||||||
const size = Base64Decoder.calcSizeForSlice(cmd.data) catch |err| {
|
|
||||||
log.warn("failed to calculate base64 size for file path: {}", .{err});
|
|
||||||
return error.InvalidData;
|
|
||||||
};
|
|
||||||
if (size > buf.len) return error.FilePathTooLong;
|
|
||||||
Base64Decoder.decode(&buf, cmd.data) catch |err| {
|
|
||||||
log.warn("failed to decode base64 data: {}", .{err});
|
|
||||||
return error.InvalidData;
|
|
||||||
};
|
|
||||||
|
|
||||||
if (comptime builtin.os.tag != .windows) {
|
if (comptime builtin.os.tag != .windows) {
|
||||||
if (std.mem.indexOfScalar(u8, buf[0..size], 0) != null) {
|
if (std.mem.indexOfScalar(u8, cmd.data, 0) != null) {
|
||||||
// posix.realpath *asserts* that the path does not have
|
// posix.realpath *asserts* that the path does not have
|
||||||
// internal nulls instead of erroring.
|
// internal nulls instead of erroring.
|
||||||
log.warn("failed to get absolute path: BadPathName", .{});
|
log.warn("failed to get absolute path: BadPathName", .{});
|
||||||
@ -88,7 +75,7 @@ pub const LoadingImage = struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var abs_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
|
var abs_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
|
||||||
const path = posix.realpath(buf[0..size], &abs_buf) catch |err| {
|
const path = posix.realpath(cmd.data, &abs_buf) catch |err| {
|
||||||
log.warn("failed to get absolute path: {}", .{err});
|
log.warn("failed to get absolute path: {}", .{err});
|
||||||
return error.InvalidData;
|
return error.InvalidData;
|
||||||
};
|
};
|
||||||
@ -229,42 +216,25 @@ pub const LoadingImage = struct {
|
|||||||
alloc.destroy(self);
|
alloc.destroy(self);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Adds a chunk of base64-encoded data to the image. Use this if the
|
/// Adds a chunk of data to the image. Use this if the image
|
||||||
/// image is coming in chunks (the "m" parameter in the protocol).
|
/// is coming in chunks (the "m" parameter in the protocol).
|
||||||
pub fn addData(self: *LoadingImage, alloc: Allocator, data: []const u8) !void {
|
pub fn addData(self: *LoadingImage, alloc: Allocator, data: []const u8) !void {
|
||||||
// If no data, skip
|
// If no data, skip
|
||||||
if (data.len == 0) return;
|
if (data.len == 0) return;
|
||||||
|
|
||||||
// Grow our array list by size capacity if it needs it
|
|
||||||
const Base64Decoder = std.base64.standard.Decoder;
|
|
||||||
const size = Base64Decoder.calcSizeForSlice(data) catch |err| {
|
|
||||||
log.warn("failed to calculate size for base64 data: {}", .{err});
|
|
||||||
return error.InvalidData;
|
|
||||||
};
|
|
||||||
|
|
||||||
// If our data would get too big, return an error
|
// If our data would get too big, return an error
|
||||||
if (self.data.items.len + size > max_size) {
|
if (self.data.items.len + data.len > max_size) {
|
||||||
log.warn("image data too large max_size={}", .{max_size});
|
log.warn("image data too large max_size={}", .{max_size});
|
||||||
return error.InvalidData;
|
return error.InvalidData;
|
||||||
}
|
}
|
||||||
|
|
||||||
try self.data.ensureUnusedCapacity(alloc, size);
|
// Ensure we have enough room to add the data
|
||||||
|
// to the end of the ArrayList before doing so.
|
||||||
|
try self.data.ensureUnusedCapacity(alloc, data.len);
|
||||||
|
|
||||||
// We decode directly into the arraylist
|
|
||||||
const start_i = self.data.items.len;
|
const start_i = self.data.items.len;
|
||||||
self.data.items.len = start_i + size;
|
self.data.items.len = start_i + data.len;
|
||||||
const buf = self.data.items[start_i..];
|
fastmem.copy(u8, self.data.items[start_i..], data);
|
||||||
Base64Decoder.decode(buf, data) catch |err| switch (err) {
|
|
||||||
// We have to ignore invalid padding because lots of encoders
|
|
||||||
// add the wrong padding. Since we validate image data later
|
|
||||||
// (PNG decode or simple dimensions check), we can ignore this.
|
|
||||||
error.InvalidPadding => {},
|
|
||||||
|
|
||||||
else => {
|
|
||||||
log.warn("failed to decode base64 data: {}", .{err});
|
|
||||||
return error.InvalidData;
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Complete the chunked image, returning a completed image.
|
/// Complete the chunked image, returning a completed image.
|
||||||
|
Reference in New Issue
Block a user