mirror of
https://github.com/ghostty-org/ghostty.git
synced 2025-08-02 14:57:31 +03:00
feat: generate json help strings for use in docs/website
This commit is contained in:
60
build.zig
60
build.zig
@ -124,6 +124,12 @@ pub fn build(b: *std.Build) !void {
|
||||
"Build and install the helpgen executable.",
|
||||
) orelse false;
|
||||
|
||||
const emit_jsongen = b.option(
|
||||
bool,
|
||||
"emit-jsongen",
|
||||
"Build and install the helpgen executable.",
|
||||
) orelse false;
|
||||
|
||||
const emit_docs = b.option(
|
||||
bool,
|
||||
"emit-docs",
|
||||
@ -146,7 +152,7 @@ pub fn build(b: *std.Build) !void {
|
||||
) orelse builtin.target.isDarwin() and
|
||||
target.result.os.tag == .macos and
|
||||
config.app_runtime == .none and
|
||||
(!emit_bench and !emit_test_exe and !emit_helpgen);
|
||||
(!emit_bench and !emit_test_exe and !emit_helpgen and !emit_jsongen);
|
||||
|
||||
// On NixOS, the built binary from `zig build` needs to patch the rpath
|
||||
// into the built binary for it to be portable across the NixOS system
|
||||
@ -215,6 +221,8 @@ pub fn build(b: *std.Build) !void {
|
||||
// otherwise the build will be cached without emit. That's clunky but meh.
|
||||
if (emit_helpgen) try addHelp(b, null, config);
|
||||
|
||||
if (emit_jsongen) try addJson(b, null, config);
|
||||
|
||||
// Add our benchmarks
|
||||
try benchSteps(b, target, config, emit_bench);
|
||||
|
||||
@ -1269,6 +1277,7 @@ fn addDeps(
|
||||
}
|
||||
|
||||
try addHelp(b, step, config);
|
||||
try addJson(b, step, config);
|
||||
try addUnicodeTables(b, step);
|
||||
|
||||
return static_libs;
|
||||
@ -1333,6 +1342,55 @@ fn addHelp(
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate help files
|
||||
fn addJson(
|
||||
b: *std.Build,
|
||||
step_: ?*std.Build.Step.Compile,
|
||||
config: BuildConfig,
|
||||
) !void {
|
||||
// Our static state between runs. We memoize our help strings
|
||||
// so that we only execute the help generation once.
|
||||
const JsonState = struct {
|
||||
var generated: ?std.Build.LazyPath = null;
|
||||
};
|
||||
|
||||
const json_output = JsonState.generated orelse strings: {
|
||||
const json_exe = b.addExecutable(.{
|
||||
.name = "jsongen",
|
||||
.root_source_file = b.path("src/jsongen.zig"),
|
||||
.target = b.host,
|
||||
});
|
||||
if (step_ == null) b.installArtifact(json_exe);
|
||||
|
||||
const json_config = config: {
|
||||
var copy = config;
|
||||
copy.exe_entrypoint = .jsongen;
|
||||
break :config copy;
|
||||
};
|
||||
const options = b.addOptions();
|
||||
try json_config.addOptions(options);
|
||||
json_exe.root_module.addOptions("build_options", options);
|
||||
|
||||
const json_run = b.addRunArtifact(json_exe);
|
||||
JsonState.generated = json_run.captureStdOut();
|
||||
if (step_) |_| {
|
||||
b.getInstallStep().dependOn(&b.addInstallFile(
|
||||
JsonState.generated.?,
|
||||
"share/ghostty/doc/ghostty-help.json",
|
||||
).step);
|
||||
}
|
||||
|
||||
break :strings JsonState.generated.?;
|
||||
};
|
||||
|
||||
if (step_) |step| {
|
||||
json_output.addStepDependencies(&step.step);
|
||||
step.root_module.addAnonymousImport("json_help", .{
|
||||
.root_source_file = json_output,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate unicode fast lookup tables
|
||||
fn addUnicodeTables(
|
||||
b: *std.Build,
|
||||
|
@ -103,6 +103,10 @@ pub const flatpak = options.flatpak;
|
||||
pub const app_runtime: apprt.Runtime = config.app_runtime;
|
||||
pub const font_backend: font.Backend = config.font_backend;
|
||||
pub const renderer: rendererpkg.Impl = config.renderer;
|
||||
pub const building_docs = switch (exe_entrypoint) {
|
||||
.helpgen, .jsongen => true,
|
||||
else => false,
|
||||
};
|
||||
|
||||
pub const Artifact = enum {
|
||||
/// Standalone executable
|
||||
@ -144,6 +148,7 @@ pub const Artifact = enum {
|
||||
pub const ExeEntrypoint = enum {
|
||||
ghostty,
|
||||
helpgen,
|
||||
jsongen,
|
||||
mdgen_ghostty_1,
|
||||
mdgen_ghostty_5,
|
||||
bench_parser,
|
||||
|
@ -12,6 +12,7 @@ const Config = @This();
|
||||
|
||||
const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
const build_config = @import("../build_config.zig");
|
||||
const assert = std.debug.assert;
|
||||
const Allocator = std.mem.Allocator;
|
||||
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||
@ -3449,6 +3450,8 @@ pub const Keybinds = struct {
|
||||
|
||||
/// Like formatEntry but has an option to include docs.
|
||||
pub fn formatEntryDocs(self: Keybinds, formatter: anytype, docs: bool) !void {
|
||||
if (build_config.building_docs) return;
|
||||
|
||||
if (self.set.bindings.size == 0) {
|
||||
try formatter.formatEntry(void, {});
|
||||
return;
|
||||
|
@ -1,6 +1,7 @@
|
||||
const formatter = @This();
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const build_config = @import("../build_config.zig");
|
||||
const help_strings = @import("help_strings");
|
||||
const Config = @import("Config.zig");
|
||||
const Key = @import("key.zig").Key;
|
||||
@ -168,7 +169,7 @@ pub const FileFormatter = struct {
|
||||
} else true;
|
||||
|
||||
if (do_format) {
|
||||
const do_docs = self.docs and @hasDecl(help_strings.Config, field.name);
|
||||
const do_docs = !build_config.building_docs and self.docs and @hasDecl(help_strings.Config, field.name);
|
||||
if (do_docs) {
|
||||
const help = @field(help_strings.Config, field.name);
|
||||
var lines = std.mem.splitScalar(u8, help, '\n');
|
||||
|
385
src/jsongen.zig
Normal file
385
src/jsongen.zig
Normal file
@ -0,0 +1,385 @@
|
||||
//! This program is used to generate JSON data from the configuration file
|
||||
//! and CLI actions for Ghostty. This can then be used to generate help, docs,
|
||||
//! website, etc.
|
||||
|
||||
const std = @import("std");
|
||||
const formatter = @import("config/formatter.zig");
|
||||
const Config = @import("config/Config.zig");
|
||||
const Action = @import("cli/action.zig").Action;
|
||||
const KeybindAction = @import("input/Binding.zig").Action;
|
||||
|
||||
pub const Help = struct {
|
||||
config: []ConfigInfo,
|
||||
actions: []ActionInfo,
|
||||
keybind_actions: []KeybindActionInfo,
|
||||
enums: []EnumInfo,
|
||||
};
|
||||
|
||||
pub const ConfigInfo = struct {
|
||||
name: []const u8,
|
||||
help: ?[]const u8 = null,
|
||||
type: []const u8,
|
||||
default: ?[]const u8 = null,
|
||||
};
|
||||
|
||||
pub const ActionInfo = struct {
|
||||
name: []const u8,
|
||||
help: ?[]const u8 = null,
|
||||
};
|
||||
|
||||
pub const KeybindActionInfo = struct {
|
||||
name: []const u8,
|
||||
help: ?[]const u8 = null,
|
||||
type: []const u8,
|
||||
default: ?[]const u8 = null,
|
||||
};
|
||||
|
||||
pub const EnumInfo = struct {
|
||||
name: []const u8,
|
||||
help: ?[]const u8,
|
||||
values: []EnumValue,
|
||||
};
|
||||
|
||||
pub const EnumValue = struct {
|
||||
value: []const u8,
|
||||
help: ?[]const u8,
|
||||
};
|
||||
|
||||
pub fn main() !void {
|
||||
var gpa = std.heap.GeneralPurposeAllocator(.{}){};
|
||||
var arena = std.heap.ArenaAllocator.init(gpa.allocator());
|
||||
const alloc = arena.allocator();
|
||||
defer arena.deinit();
|
||||
|
||||
const stdout = std.io.getStdOut().writer();
|
||||
|
||||
var config_list = std.ArrayList(ConfigInfo).init(alloc);
|
||||
errdefer config_list.deinit();
|
||||
|
||||
var enum_hash = std.StringArrayHashMap(EnumInfo).init(alloc);
|
||||
|
||||
try genConfig(alloc, &config_list, &enum_hash);
|
||||
|
||||
var actions_list = std.ArrayList(ActionInfo).init(alloc);
|
||||
errdefer config_list.deinit();
|
||||
|
||||
try genActions(alloc, &actions_list);
|
||||
|
||||
var keybind_actions_list = std.ArrayList(KeybindActionInfo).init(alloc);
|
||||
errdefer config_list.deinit();
|
||||
|
||||
try genKeybindActions(alloc, &keybind_actions_list, &enum_hash);
|
||||
|
||||
const j = Help{
|
||||
.config = try config_list.toOwnedSlice(),
|
||||
.actions = try actions_list.toOwnedSlice(),
|
||||
.keybind_actions = try keybind_actions_list.toOwnedSlice(),
|
||||
.enums = enum_hash.values(),
|
||||
};
|
||||
|
||||
try std.json.stringify(j, .{ .whitespace = .indent_2 }, stdout);
|
||||
}
|
||||
|
||||
fn genConfig(
|
||||
alloc: std.mem.Allocator,
|
||||
config_list: *std.ArrayList(ConfigInfo),
|
||||
enum_hash: *std.StringArrayHashMap(EnumInfo),
|
||||
) !void {
|
||||
var ast = try std.zig.Ast.parse(alloc, @embedFile("config/Config.zig"), .zig);
|
||||
defer ast.deinit(alloc);
|
||||
|
||||
inline for (@typeInfo(Config).Struct.fields) |field| {
|
||||
if (field.name[0] == '_') continue;
|
||||
const default_value = d: {
|
||||
if (field.default_value) |dv| {
|
||||
const v: *const field.type = @ptrCast(@alignCast(dv));
|
||||
var l = std.ArrayList(u8).init(alloc);
|
||||
errdefer l.deinit();
|
||||
try formatter.formatEntry(
|
||||
field.type,
|
||||
field.name,
|
||||
v.*,
|
||||
l.writer(),
|
||||
);
|
||||
break :d try l.toOwnedSlice();
|
||||
}
|
||||
break :d "(none)";
|
||||
};
|
||||
if (@typeInfo(field.type) == .Enum) try genEnum(
|
||||
field.type,
|
||||
alloc,
|
||||
enum_hash,
|
||||
);
|
||||
try genConfigField(
|
||||
alloc,
|
||||
ConfigInfo,
|
||||
config_list,
|
||||
ast,
|
||||
field.name,
|
||||
@typeName(field.type),
|
||||
default_value,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn genActions(alloc: std.mem.Allocator, actions_list: *std.ArrayList(ActionInfo)) !void {
|
||||
inline for (@typeInfo(Action).Enum.fields) |field| {
|
||||
const action_file = comptime action_file: {
|
||||
const action = @field(Action, field.name);
|
||||
break :action_file action.file();
|
||||
};
|
||||
|
||||
var ast = try std.zig.Ast.parse(alloc, @embedFile(action_file), .zig);
|
||||
defer ast.deinit(alloc);
|
||||
|
||||
const tokens: []std.zig.Token.Tag = ast.tokens.items(.tag);
|
||||
|
||||
for (tokens, 0..) |token, i| {
|
||||
// We're looking for a function named "run".
|
||||
if (token != .keyword_fn) continue;
|
||||
if (!std.mem.eql(u8, ast.tokenSlice(@intCast(i + 1)), "run")) continue;
|
||||
|
||||
// The function must be preceded by a doc comment.
|
||||
if (tokens[i - 2] != .doc_comment) {
|
||||
std.debug.print(
|
||||
"doc comment must be present on run function of the {s} action!",
|
||||
.{field.name},
|
||||
);
|
||||
std.process.exit(1);
|
||||
}
|
||||
|
||||
const comment = try extractDocComments(alloc, ast, @intCast(i - 2), tokens);
|
||||
|
||||
try actions_list.append(
|
||||
.{
|
||||
.name = field.name,
|
||||
.help = comment,
|
||||
},
|
||||
);
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn genKeybindActions(
|
||||
alloc: std.mem.Allocator,
|
||||
keybind_actions_list: *std.ArrayList(KeybindActionInfo),
|
||||
enum_hash: *std.StringArrayHashMap(EnumInfo),
|
||||
) !void {
|
||||
var ast = try std.zig.Ast.parse(alloc, @embedFile("input/Binding.zig"), .zig);
|
||||
defer ast.deinit(alloc);
|
||||
|
||||
inline for (@typeInfo(KeybindAction).Union.fields) |field| {
|
||||
if (field.name[0] == '_') continue;
|
||||
if (@typeInfo(field.type) == .Enum) try genEnum(
|
||||
field.type,
|
||||
alloc,
|
||||
enum_hash,
|
||||
);
|
||||
try genConfigField(
|
||||
alloc,
|
||||
KeybindActionInfo,
|
||||
keybind_actions_list,
|
||||
ast,
|
||||
field.name,
|
||||
@typeName(field.type),
|
||||
null,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn genConfigField(
|
||||
alloc: std.mem.Allocator,
|
||||
comptime T: type,
|
||||
list: *std.ArrayList(T),
|
||||
ast: std.zig.Ast,
|
||||
comptime field: []const u8,
|
||||
comptime type_name: []const u8,
|
||||
default_value: ?[]const u8,
|
||||
) !void {
|
||||
const tokens = ast.tokens.items(.tag);
|
||||
for (tokens, 0..) |token, i| {
|
||||
// We only care about identifiers that are preceded by doc comments.
|
||||
if (token != .identifier) continue;
|
||||
if (tokens[i - 1] != .doc_comment) continue;
|
||||
|
||||
// Identifier may have @"" so we strip that.
|
||||
const name = ast.tokenSlice(@intCast(i));
|
||||
const key = if (name[0] == '@') name[2 .. name.len - 1] else name;
|
||||
if (!std.mem.eql(u8, key, field)) continue;
|
||||
|
||||
const comment = try extractDocComments(alloc, ast, @intCast(i - 1), tokens);
|
||||
try list.append(.{ .name = field, .help = comment, .type = type_name, .default = default_value });
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
fn extractDocComments(
|
||||
alloc: std.mem.Allocator,
|
||||
ast: std.zig.Ast,
|
||||
index: std.zig.Ast.TokenIndex,
|
||||
tokens: []std.zig.Token.Tag,
|
||||
) ![]const u8 {
|
||||
// Find the first index of the doc comments. The doc comments are
|
||||
// always stacked on top of each other so we can just go backwards.
|
||||
const start_idx: usize = start_idx: for (0..index) |i| {
|
||||
const reverse_i = index - i - 1;
|
||||
const token = tokens[reverse_i];
|
||||
if (token != .doc_comment) break :start_idx reverse_i + 1;
|
||||
} else unreachable;
|
||||
|
||||
// Go through and build up the lines.
|
||||
var lines = std.ArrayList([]const u8).init(alloc);
|
||||
defer lines.deinit();
|
||||
for (start_idx..index + 1) |i| {
|
||||
const token = tokens[i];
|
||||
if (token != .doc_comment) break;
|
||||
try lines.append(ast.tokenSlice(@intCast(i))[3..]);
|
||||
}
|
||||
|
||||
var buffer = std.ArrayList(u8).init(alloc);
|
||||
const writer = buffer.writer();
|
||||
const prefix = findCommonPrefix(lines);
|
||||
for (lines.items) |line| {
|
||||
try writer.writeAll(line[@min(prefix, line.len)..]);
|
||||
try writer.writeAll("\n");
|
||||
}
|
||||
|
||||
return buffer.toOwnedSlice();
|
||||
}
|
||||
|
||||
fn findCommonPrefix(lines: std.ArrayList([]const u8)) usize {
|
||||
var m: usize = std.math.maxInt(usize);
|
||||
for (lines.items) |line| {
|
||||
var n: usize = std.math.maxInt(usize);
|
||||
for (line, 0..) |c, i| {
|
||||
if (c != ' ') {
|
||||
n = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
m = @min(m, n);
|
||||
}
|
||||
return m;
|
||||
}
|
||||
|
||||
fn genEnum(comptime T: type, alloc: std.mem.Allocator, enum_map: *std.StringArrayHashMap(EnumInfo)) !void {
|
||||
const long_name = @typeName(T);
|
||||
|
||||
if (enum_map.contains(long_name)) return;
|
||||
|
||||
const source = s: {
|
||||
if (std.mem.startsWith(u8, long_name, "config.Config.")) break :s @embedFile("config/Config.zig");
|
||||
if (std.mem.startsWith(u8, long_name, "input.Binding.")) break :s @embedFile("input/Binding.zig");
|
||||
if (std.mem.startsWith(u8, long_name, "terminal.Screen.")) break :s @embedFile("terminal/Screen.zig");
|
||||
std.log.warn("unsupported enum {s}", .{long_name});
|
||||
return;
|
||||
};
|
||||
|
||||
var it = std.mem.splitScalar(u8, long_name, '.');
|
||||
_ = it.next();
|
||||
_ = it.next();
|
||||
|
||||
var ast = try std.zig.Ast.parse(alloc, source, .zig);
|
||||
defer ast.deinit(alloc);
|
||||
|
||||
const tokens = ast.tokens.items(.tag);
|
||||
|
||||
var short_name: []const u8 = "";
|
||||
var start: std.zig.Ast.TokenIndex = 0;
|
||||
var end: std.zig.Ast.TokenIndex = @intCast(tokens.len);
|
||||
|
||||
while (it.next()) |s| {
|
||||
const e = findDefinition(ast, tokens, s, start, end) orelse {
|
||||
@panic("can't find " ++ long_name);
|
||||
};
|
||||
short_name = s;
|
||||
start = e.start;
|
||||
end = e.end;
|
||||
}
|
||||
|
||||
const comment = if (start >= 2) try extractDocComments(alloc, ast, @intCast(start - 2), tokens) else null;
|
||||
|
||||
var values = std.ArrayList(EnumValue).init(alloc);
|
||||
errdefer values.deinit();
|
||||
|
||||
for (tokens[start..end], start..) |token, j| {
|
||||
if (token != .identifier) continue;
|
||||
switch (tokens[j + 1]) {
|
||||
.equal => {
|
||||
if (tokens[j + 2] != .number_literal) continue;
|
||||
if (tokens[j + 3] != .comma) continue;
|
||||
},
|
||||
.comma => {},
|
||||
else => continue,
|
||||
}
|
||||
|
||||
const value_name = ast.tokenSlice(@intCast(j));
|
||||
const value_key = if (value_name[0] == '@') value_name[2 .. value_name.len - 1] else value_name;
|
||||
const value_comment = try extractDocComments(alloc, ast, @intCast(j - 1), tokens);
|
||||
try values.append(
|
||||
.{
|
||||
.value = value_key,
|
||||
.help = value_comment,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
try enum_map.put(
|
||||
long_name,
|
||||
.{
|
||||
.name = long_name,
|
||||
.help = comment,
|
||||
.values = try values.toOwnedSlice(),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
fn findDefinition(
|
||||
ast: std.zig.Ast,
|
||||
tokens: []std.zig.Token.Tag,
|
||||
name: []const u8,
|
||||
start: std.zig.Ast.TokenIndex,
|
||||
end: std.zig.Ast.TokenIndex,
|
||||
) ?struct {
|
||||
identifier: std.zig.Ast.TokenIndex,
|
||||
start: std.zig.Ast.TokenIndex,
|
||||
end: std.zig.Ast.TokenIndex,
|
||||
} {
|
||||
for (tokens[start..end], start..) |token, i| {
|
||||
if (token != .identifier) continue;
|
||||
|
||||
if (i < 2) continue;
|
||||
|
||||
const identifier: std.zig.Ast.TokenIndex = @intCast(i);
|
||||
|
||||
if (tokens[i - 2] != .keyword_pub) continue;
|
||||
if (tokens[i - 1] != .keyword_const) continue;
|
||||
if (tokens[i + 1] != .equal) continue;
|
||||
|
||||
if (!std.mem.eql(u8, name, ast.tokenSlice(identifier))) continue;
|
||||
|
||||
const start_brace: std.zig.Ast.TokenIndex = s: {
|
||||
for (tokens[i..end], i..) |t, j| {
|
||||
if (t == .l_brace) break :s @intCast(j);
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
var depth: usize = 0;
|
||||
|
||||
for (tokens[start_brace..], start_brace..) |tok, j| {
|
||||
if (tok == .l_brace) depth += 1;
|
||||
if (tok == .r_brace) depth -= 1;
|
||||
if (depth == 0) {
|
||||
return .{
|
||||
.identifier = identifier,
|
||||
.start = start_brace + 1,
|
||||
.end = @intCast(j - 1),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
@ -4,6 +4,7 @@ const build_config = @import("build_config.zig");
|
||||
const entrypoint = switch (build_config.exe_entrypoint) {
|
||||
.ghostty => @import("main_ghostty.zig"),
|
||||
.helpgen => @import("helpgen.zig"),
|
||||
.jsongen => @import("jsongen.zig"),
|
||||
.mdgen_ghostty_1 => @import("build/mdgen/main_ghostty_1.zig"),
|
||||
.mdgen_ghostty_5 => @import("build/mdgen/main_ghostty_5.zig"),
|
||||
.bench_parser => @import("bench/parser.zig"),
|
||||
|
Reference in New Issue
Block a user