replace deprecated std.mem.tokenize with std.mem.tokenizeScalar

This commit is contained in:
Jeffrey C. Ollie
2024-01-03 10:22:33 -06:00
parent 5091655558
commit f4292bccfc
3 changed files with 3 additions and 3 deletions

View File

@ -371,7 +371,7 @@ pub fn expandPath(alloc: Allocator, cmd: []const u8) !?[]u8 {
defer if (builtin.os.tag == .windows) alloc.free(PATH);
var path_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
var it = std.mem.tokenize(u8, PATH, &[_]u8{std.fs.path.delimiter});
var it = std.mem.tokenizeScalar(u8, PATH, std.fs.path.delimiter);
var seen_eacces = false;
while (it.next()) |search_path| {
// We need enough space in our path buffer to store this

View File

@ -45,7 +45,7 @@ pub fn parse(raw_input: []const u8) !Binding {
// the "=", i.e. "ctrl+shift+a" or "a"
const trigger = trigger: {
var result: Trigger = .{};
var iter = std.mem.tokenize(u8, input[0..eqlIdx], "+");
var iter = std.mem.tokenizeScalar(u8, input[0..eqlIdx], '+');
loop: while (iter.next()) |part| {
// All parts must be non-empty
if (part.len == 0) return Error.InvalidFormat;

View File

@ -2322,7 +2322,7 @@ pub fn selectionString(
// doing this in the loop above but this isn't very hot path code and
// this is simple.
if (trim) {
var it = std.mem.tokenize(u8, strbuilder.items, "\n");
var it = std.mem.tokenizeScalar(u8, strbuilder.items, '\n');
// Reset our items. We retain our capacity. Because we're only
// removing bytes, we know that the trimmed string must be no longer