srctree

Gregory Mullen parent d82ba323 69f7abd0
update to zig v0.12

well v0.12-ish :D
build.zig added: 233, removed: 227, total 6
@@ -14,7 +14,7 @@ pub fn build(b: *std.Build) void {
);
 
const log = b.createModule(.{
.source_file = .{ .path = "src/log.zig" },
.root_source_file = .{ .path = "src/log.zig" },
});
 
const exe = b.addExecutable(.{
@@ -24,8 +24,8 @@ pub fn build(b: *std.Build) void {
.optimize = optimize,
});
 
exe.addModule("log", log);
exe.addOptions("hsh_build", opts);
exe.root_module.addOptions("hsh_build", opts);
exe.root_module.addImport("log", log);
 
b.installArtifact(exe);
 
@@ -48,8 +48,8 @@ pub fn build(b: *std.Build) void {
.target = target,
.optimize = optimize,
});
unit_tests.addOptions("hsh_build", opts);
unit_tests.addModule("log", log);
unit_tests.root_module.addOptions("hsh_build", opts);
unit_tests.root_module.addImport("log", log);
const run_tests = b.addRunArtifact(unit_tests);
 
const test_step = b.step("test", "Run unit tests");
@@ -63,7 +63,7 @@ fn version(b: *std.Build) []const u8 {
}
 
var code: u8 = undefined;
var git_wide = b.execAllowFail(&[_][]const u8{
const git_wide = b.runAllowFail(&[_][]const u8{
"git",
"describe",
"--dirty",
 
src/builtins/alias.zig added: 233, removed: 227, total 6
@@ -65,7 +65,7 @@ pub fn alias_core(a: std.mem.Allocator, titr: *ParsedIterator) Err!u8 {
 
var name: ?[]const u8 = null;
var value: ?[]const u8 = null;
var mode: ?[]const u8 = null;
const mode: ?[]const u8 = null;
while (titr.next()) |t| {
if (name) |_| {
value = t.cannon();
@@ -136,7 +136,7 @@ fn replace(a: std.mem.Allocator, key: []const u8, val: []const u8) !bool {
fn del(src: []const u8) Err!void {
for (aliases.items, 0..) |a, i| {
if (std.mem.eql(u8, src, a.name)) {
var d = aliases.swapRemove(i);
const d = aliases.swapRemove(i);
aliases.allocator.free(d.name);
aliases.allocator.free(d.value);
return;
@@ -145,7 +145,7 @@ fn del(src: []const u8) Err!void {
}
 
test "alias" {
var a = std.testing.allocator;
const a = std.testing.allocator;
init(a);
defer raze(a);
 
@@ -159,7 +159,7 @@ test "save" {
const str = "alias haxzor='ssh 127.0.0.1 \"echo hsh was here | sudo tee /root/.lmao.txt\"'";
 
var itr = tokenizer.TokenIterator{ .raw = str };
var slice = try itr.toSliceExec(a);
const slice = try itr.toSliceExec(a);
defer a.free(slice);
var pitr = try Parse.Parser.parse(a, slice);
 
 
src/builtins/export.zig added: 233, removed: 227, total 6
@@ -82,8 +82,8 @@ pub fn exports(h: *HSH, pitr: *ParsedIterator) Err!u8 {
 
if (std.mem.indexOf(u8, name.?.cannon(), "=")) |_| {
var keyitr = std.mem.split(u8, name.?.cannon(), "=");
var key = keyitr.first();
var value = keyitr.rest();
const key = keyitr.first();
const value = keyitr.rest();
// TODO push into variables
add(key, value) catch {
log.err("", .{});
@@ -92,8 +92,8 @@ pub fn exports(h: *HSH, pitr: *ParsedIterator) Err!u8 {
return 0;
} else {
// no = in the string, so it needs to already exist within variables.
var key = h.alloc.dupe(u8, name.?.cannon()) catch return Err.Memory;
var value = Variables.getStr(key) orelse {
const key = h.alloc.dupe(u8, name.?.cannon()) catch return Err.Memory;
const value = Variables.getStr(key) orelse {
log.err("Attempted to export an non-existant name\n", .{});
return 1;
};
 
src/builtins/set.zig added: 233, removed: 227, total 6
@@ -142,7 +142,7 @@ fn option(_: std.mem.Allocator, opt: []const u8, titr: *ParsedIterator) Err!u8 {
fn dump() Err!u8 {
inline for (@typeInfo(PosixOpts).Enum.fields) |o| {
const name = o.name;
var truthy = if (Vars.getKind(name, .internal)) |str|
const truthy = if (Vars.getKind(name, .internal)) |str|
std.mem.eql(u8, "true", str.str)
else
false;
@@ -188,7 +188,7 @@ pub fn set(h: *HSH, titr: *ParsedIterator) Err!u8 {
 
test "set" {
const Parse = @import("../parse.zig");
var a = std.testing.allocator;
const a = std.testing.allocator;
Vars.init(a);
defer Vars.raze();
 
 
src/builtins/which.zig added: 233, removed: 227, total 6
@@ -10,7 +10,7 @@ var path: [2048]u8 = undefined;
 
fn executable(str: []const u8) ?[]const u8 {
var fba = std.heap.FixedBufferAllocator.init(&path);
var a = fba.allocator();
const a = fba.allocator();
return Exec.makeAbsExecutable(a, str) catch return null;
}
 
 
src/completion.zig added: 233, removed: 227, total 6
@@ -3,7 +3,7 @@ const ArrayList = std.ArrayList;
const Allocator = std.mem.Allocator;
const HSH = @import("hsh.zig").HSH;
const fs = @import("fs.zig");
const IterableDir = std.fs.IterableDir;
const Dir = std.fs.Dir;
const Tokenizer = @import("tokenizer.zig").Tokenizer;
const Token = @import("token.zig");
const Parser = @import("parse.zig").Parser;
@@ -44,7 +44,7 @@ pub const FSKind = enum {
};
}
 
pub fn fromFsKind(k: std.fs.IterableDir.Entry.Kind) FSKind {
pub fn fromFsKind(k: std.fs.Dir.Entry.Kind) FSKind {
return switch (k) {
.file => .file,
.directory => .dir,
@@ -336,8 +336,8 @@ pub const CompSet = struct {
pub fn drawGroup(self: *CompSet, f: Flavors, d: *Draw.Drawable, wh: Cord) !void {
//defer list.clearAndFree();
const g_int = @intFromEnum(f);
var group = &self.groups[g_int];
var current_group = g_int == self.group_index;
const group = &self.groups[g_int];
const current_group = g_int == self.group_index;
 
if (group.items.len == 0) return;
 
@@ -373,14 +373,14 @@ pub const CompSet = struct {
 
pub fn drawGroupBuild(self: *CompSet, f: Flavors, d: *Draw.Drawable, wh: Cord) !void {
const g_int = @intFromEnum(f);
var group = &self.groups[g_int];
const group = &self.groups[g_int];
 
var list = ArrayList(Draw.Lexeme).init(self.alloc);
for (group.items) |itm| {
const lex = itm.lexeme(false);
list.append(lex) catch break;
}
var items = try list.toOwnedSlice();
const items = try list.toOwnedSlice();
if (Draw.Layout.table(self.alloc, items, wh)) |trees| {
self.draw_cache[g_int] = trees;
} else |err| {
@@ -492,7 +492,7 @@ pub const CompSet = struct {
}
};
 
fn completeDir(cs: *CompSet, cwdi: IterableDir) !void {
fn completeDir(cs: *CompSet, cwdi: Dir) !void {
var itr = cwdi.iterate();
cs.original = CompOption{ .str = try cs.alloc.dupe(u8, ""), .kind = null };
while (try itr.next()) |each| {
@@ -505,7 +505,7 @@ fn completeDir(cs: *CompSet, cwdi: IterableDir) !void {
}
}
 
fn completeDirBase(cs: *CompSet, cwdi: IterableDir, base: []const u8) !void {
fn completeDirBase(cs: *CompSet, cwdi: Dir, base: []const u8) !void {
var itr = cwdi.iterate();
cs.original = CompOption{ .str = try cs.alloc.dupe(u8, base), .kind = null };
while (try itr.next()) |each| {
@@ -523,18 +523,18 @@ fn completePath(cs: *CompSet, _: *HSH, target: []const u8) !void {
if (target.len < 1) return;
 
var whole = std.mem.splitBackwards(u8, target, "/");
var base = whole.first();
var path = whole.rest();
const base = whole.first();
const path = whole.rest();
 
var dir: std.fs.IterableDir = undefined;
var dir: std.fs.Dir = undefined;
if (target[0] == '/') {
if (path.len == 0) {
dir = std.fs.openIterableDirAbsolute("/", .{}) catch return;
dir = std.fs.openDirAbsolute("/", .{}) catch return;
} else {
dir = std.fs.openIterableDirAbsolute(path, .{}) catch return;
dir = std.fs.openDirAbsolute(path, .{}) catch return;
}
} else {
dir = std.fs.cwd().openIterableDir(path, .{}) catch return;
dir = std.fs.cwd().openDir(path, .{}) catch return;
}
defer dir.close();
 
@@ -561,14 +561,14 @@ fn completeSysPath(cs: *CompSet, h: *HSH, target: []const u8) !void {
cs.original = CompOption{ .str = try cs.alloc.dupe(u8, target), .kind = null };
 
for (h.hfs.names.paths.items) |path| {
var dir = std.fs.openIterableDirAbsolute(path, .{}) catch return;
var dir = std.fs.openDirAbsolute(path, .{ .iterate = true }) catch return;
defer dir.close();
var itr = dir.iterate();
while (try itr.next()) |each| {
if (!std.mem.startsWith(u8, each.name, target)) continue;
if (each.name[0] == '.' and (target.len == 0 or target[0] != '.')) continue;
if (each.kind != .file) continue; // TODO probably a bug
const file = fs.openFileAt(dir.dir, each.name, false) orelse continue;
const file = fs.openFileAt(dir, each.name, false) orelse continue;
defer file.close();
if (file.metadata()) |md| {
if (!md.permissions().inner.unixHas(
@@ -638,7 +638,7 @@ pub fn complete(cs: *CompSet, hsh: *HSH, tks: *Tokenizer) !void {
else => {
switch (pair.t.kind) {
.ws => {
var dir = try std.fs.cwd().openIterableDir(".", .{});
var dir = try std.fs.cwd().openDir(".", .{});
defer dir.close();
try completeDir(cs, dir);
},
@@ -647,7 +647,7 @@ pub fn complete(cs: *CompSet, hsh: *HSH, tks: *Tokenizer) !void {
if (std.mem.indexOfScalar(u8, t.cannon(), '/')) |_| {
try completePath(cs, hsh, t.cannon());
} else {
var dir = try std.fs.cwd().openIterableDir(".", .{});
var dir = try std.fs.cwd().openDir(".", .{ .iterate = true });
defer dir.close();
try completeDirBase(cs, dir, t.cannon());
}
 
src/contexts/git.zig added: 233, removed: 227, total 6
@@ -28,7 +28,7 @@ fn fetch(_: *const HSH) Error!Lexeme {
}
 
fn update(h: *HSH) Error!void {
var result = exec.childZ(
const result = exec.childZ(
h.alloc,
&[_:null]?[*:0]const u8{
"git",
 
src/draw/layout.zig added: 233, removed: 227, total 6
@@ -57,7 +57,7 @@ fn countLexems(lexs: []const Lexeme) u32 {
fn maxWidth(items: []const []const u8) u32 {
var max: u32 = 0;
for (items) |item| {
var len: u32 = countPrintable(item);
const len: u32 = countPrintable(item);
max = @max(max, len);
}
return max + 1;
@@ -66,7 +66,7 @@ fn maxWidth(items: []const []const u8) u32 {
fn maxWidthLexem(lexs: []const Lexeme) u32 {
var max: u32 = 0;
for (lexs) |lex| {
var len: u32 = countPrintable(lex.char);
const len: u32 = countPrintable(lex.char);
max = @max(max, len);
}
return max + 1;
@@ -183,7 +183,7 @@ fn tableLexeme(a: Allocator, items: []Lexeme, wh: Cord) Error![]LexTree {
/// LexTree.siblings.Lexem[..].char must all be free'd
/// items are not reordered
fn tableChar(a: Allocator, items: []const []const u8, wh: Cord) Error![]LexTree {
var lexes = a.alloc(Lexeme, items.len) catch return Error.Memory;
const lexes = a.alloc(Lexeme, items.len) catch return Error.Memory;
errdefer a.free(lexes);
 
for (items, lexes) |i, *l| {
 
src/exec.zig added: 233, removed: 227, total 6
@@ -85,7 +85,7 @@ var paths: []const []const u8 = undefined;
 
pub fn execFromInput(h: *HSH, str: []const u8) ![]u8 {
var itr = TokenIterator{ .raw = str };
var tokens = try itr.toSlice(h.alloc);
const tokens = try itr.toSlice(h.alloc);
defer h.alloc.free(tokens);
var ps = try Parser.parse(h.tkn.alloc, tokens);
defer ps.raze();
@@ -102,7 +102,7 @@ pub fn executableType(h: *HSH, str: []const u8) ?ExeKind {
if (Funcs.exists(str)) return .function;
if (bi.exists(str)) return .builtin;
paths = h.hfs.names.paths.items;
var plsfree = makeAbsExecutable(h.alloc, str) catch {
const plsfree = makeAbsExecutable(h.alloc, str) catch {
if (bi.existsOptional(str)) {
return .builtin;
}
@@ -197,7 +197,7 @@ fn mkBinary(a: Allocator, itr: *ParsedIterator) Error!Binary {
var argv = ArrayList(?ARG).init(a);
defer itr.raze();
 
var exeZ: ?ARG = makeExeZ(a, itr.first().cannon()) catch |e| {
const exeZ: ?ARG = makeExeZ(a, itr.first().cannon()) catch |e| {
log.warn("path missing {s}\n", .{itr.first().cannon()});
return e;
};
@@ -245,20 +245,20 @@ fn mkCallableStack(a: Allocator, itr: *TokenIterator) Error![]CallableStack {
return try stack.toOwnedSlice();
}
 
var eslice = itr.toSliceExec(a) catch unreachable;
const eslice = itr.toSliceExec(a) catch unreachable;
errdefer a.free(eslice);
var parsed = Parser.parse(a, eslice) catch |err| {
if (err == error.Empty) continue;
return Error.Parse;
};
var io: StdIo = StdIo{ .in = prev_stdout orelse STDIN_FILENO };
var condition: ?Conditional = conditional_rule;
const condition: ?Conditional = conditional_rule;
 
// peek is now the exec operator because of how the iterator works :<
if (peek.kind == .oper) {
switch (peek.kind.oper) {
.Pipe => {
const pipe = std.os.pipe2(0) catch return Error.OSErr;
const pipe = std.os.pipe2(.{}) catch return Error.OSErr;
io.pipe = true;
io.out = pipe[1];
prev_stdout = pipe[0];
@@ -382,7 +382,7 @@ fn free(a: Allocator, s: *CallableStack) void {
// TODO validate this clears all pointers correctly
for (e.argv) |*marg| {
if (marg.*) |argz| {
var arg = std.mem.span(argz);
const arg = std.mem.span(argz);
a.free(arg);
}
}
@@ -453,7 +453,7 @@ pub fn exec(h_: *HSH, input: []const u8) Error!void {
defer free(a, s);
if (s.conditional) |cond| {
if (fpid == 0) unreachable;
var waited_job = jobs.waitFor(fpid) catch @panic("job doesn't exist");
const waited_job = jobs.waitFor(fpid) catch @panic("job doesn't exist");
switch (cond) {
.After => {},
.Failure => {
@@ -538,7 +538,7 @@ pub const ChildResult = struct {
pub fn childParsed(a: Allocator, argv: []const u8) Error!ChildResult {
var itr = TokenIterator{ .raw = argv };
 
var slice = try itr.toSliceExec(a);
const slice = try itr.toSliceExec(a);
defer a.free(slice);
 
var parsed = Parser.parse(a, slice) catch return Error.Parse;
@@ -548,7 +548,7 @@ pub fn childParsed(a: Allocator, argv: []const u8) Error!ChildResult {
try list.append(p.cannon());
log.debug("Exec.childParse {} {s}\n", .{ list.items.len, p.cannon() });
} // Precomptue
var strs = try list.toOwnedSlice();
const strs = try list.toOwnedSlice();
defer a.free(strs);
 
return child(a, strs);
@@ -564,7 +564,7 @@ pub fn child(a: Allocator, argv: []const []const u8) !ChildResult {
for (argv) |arg| {
try list.append((try a.dupeZ(u8, arg)).ptr);
}
var argvZ: [:null]?[*:0]u8 = try list.toOwnedSliceSentinel(null);
const argvZ: [:null]?[*:0]u8 = try list.toOwnedSliceSentinel(null);
 
defer {
for (argvZ) |*argm| {
@@ -580,7 +580,7 @@ pub fn child(a: Allocator, argv: []const []const u8) !ChildResult {
/// Preformatted version of child. Accepts the null, and 0 terminated versions
/// to pass directly to exec. Caller maintains ownership of argv
pub fn childZ(a: Allocator, argv: [:null]const ?[*:0]const u8) Error!ChildResult {
var pipe = std.os.pipe2(0) catch unreachable;
const pipe = std.os.pipe2(.{}) catch unreachable;
const pid = std.os.fork() catch unreachable;
if (pid == 0) {
// we kid nao
@@ -606,7 +606,7 @@ pub fn childZ(a: Allocator, argv: [:null]const ?[*:0]const u8) Error!ChildResult
var r = f.reader();
var list = std.ArrayList([]u8).init(a);
 
var job = jobs.waitFor(pid) catch return Error.Unknown;
const job = jobs.waitFor(pid) catch return Error.Unknown;
 
while (r.readUntilDelimiterOrEofAlloc(a, '\n', 2048) catch unreachable) |line| {
try list.append(line);
@@ -638,7 +638,7 @@ test "mkstack" {
 
var a = std.testing.allocator;
ti.restart();
var stk = try mkCallableStack(a, &ti);
const stk = try mkCallableStack(a, &ti);
try std.testing.expect(stk.len == 2);
for (stk) |*s| {
free(a, s);
 
src/fs.zig added: 233, removed: 227, total 6
@@ -54,11 +54,11 @@ const Names = struct {
};
 
const Dirs = struct {
cwd: std.fs.IterableDir,
conf: ?std.fs.IterableDir = null,
cwd: std.fs.Dir,
conf: ?std.fs.Dir = null,
 
fn update(self: *Dirs) !void {
self.cwd = try std.fs.cwd().openIterableDir(".", .{});
self.cwd = try std.fs.cwd().openDir(".", .{ .iterate = true });
}
 
fn raze(self: *Dirs) void {
@@ -88,7 +88,7 @@ pub fn init(a: mem.Allocator, env: std.process.EnvMap) !fs {
.rc = findCoreFile(a, &env, .rc),
.history = findCoreFile(a, &env, .history),
.dirs = .{
.cwd = try std.fs.cwd().openIterableDir(".", .{}),
.cwd = try std.fs.cwd().openDir(".", .{ .iterate = true }),
},
.names = .{
.cwd = try a.dupe(u8, "???"),
@@ -122,7 +122,7 @@ pub fn inotifyInstallRc(self: *fs, cb: ?INotify.Callback) !void {
if (self.rc) |_| {
if (self.names.home) |home| {
var buf: [2048]u8 = undefined;
var path = try std.fmt.bufPrint(&buf, "{s}/.config/hsh/hshrc", .{home});
const path = try std.fmt.bufPrint(&buf, "{s}/.config/hsh/hshrc", .{home});
try self.inotifyInstall(path, cb);
}
}
@@ -141,7 +141,7 @@ pub fn checkINotify(self: *fs, h: *HSH) bool {
);
return true;
}
var event: *const std.os.linux.inotify_event = @ptrCast(&buf);
const event: *const std.os.linux.inotify_event = @ptrCast(&buf);
// TODO optimize
for (&self.watches) |*watch| {
if (watch.*) |*wd| {
@@ -171,9 +171,9 @@ pub fn raze(self: *fs, a: mem.Allocator) void {
pub fn cd(self: *fs, trgt: []const u8) !void {
// std.debug.print("cd path {s} default {s}\n", .{ &path, hsh.fs.home_name });
const dir = if (trgt.len == 0 and self.names.home != null)
try self.dirs.cwd.dir.openDir(self.names.home.?, .{})
try self.dirs.cwd.openDir(self.names.home.?, .{})
else
try self.dirs.cwd.dir.openDir(trgt, .{});
try self.dirs.cwd.openDir(trgt, .{});
 
dir.setAsCwd() catch |e| {
log.err("cwd failed! {}", .{e});
@@ -251,12 +251,12 @@ pub fn reCreate(name: []const u8) ?std.fs.File {
}
 
pub fn globCwd(a: Allocator, search: []const u8) ![][]u8 {
var dir = try std.fs.cwd().openIterableDir(".", .{});
var dir = try std.fs.cwd().openDir(".", .{ .iterate = true });
defer dir.close();
return globAt(a, dir, search);
}
 
pub fn globAt(a: Allocator, dir: std.fs.IterableDir, search: []const u8) ![][]u8 {
pub fn globAt(a: Allocator, dir: std.fs.Dir, search: []const u8) ![][]u8 {
// TODO multi space glob
std.debug.assert(std.mem.count(u8, search, "*") == 1);
var split = std.mem.splitScalar(u8, search, '*');
@@ -302,7 +302,7 @@ fn findPath(
log.debug("unable to open {s}\n", .{out});
}
} else if (env.get("HOME")) |home| {
var main = try a.dupe(u8, home);
const main = try a.dupe(u8, home);
defer a.free(main);
if (std.fs.openDirAbsolute(home, .{})) |h| {
if (h.openDir(".config", .{})) |hc| {
 
src/history.zig added: 233, removed: 227, total 6
@@ -10,7 +10,7 @@ cnt: usize = 0,
 
fn seenAdd(self: *History, seen: []const u8) void {
if (self.seen_list) |*sl| {
var dupe = self.alloc.?.dupe(u8, seen) catch unreachable;
const dupe = self.alloc.?.dupe(u8, seen) catch unreachable;
sl.append(dupe) catch unreachable;
}
}
@@ -48,7 +48,7 @@ pub fn atTop(self: *History) bool {
/// Returns true when there's is assumed to be more history
/// Final file pos is undefined
fn readLine(self: *History, buffer: ?*std.ArrayList(u8)) !bool {
var b = buffer orelse return try self.file.getPos() != 0;
const b = buffer orelse return try self.file.getPos() != 0;
var hist = self.file;
const pos = try hist.getPos();
try hist.reader().readUntilDelimiterArrayList(b, '\n', 1 << 16);
@@ -62,7 +62,7 @@ fn readLine(self: *History, buffer: ?*std.ArrayList(u8)) !bool {
/// buffer will likely return the same line)
fn readLinePrev(self: *History, buffer: ?*std.ArrayList(u8)) !bool {
var hist = self.file;
var cursor = try hist.getPos();
const cursor = try hist.getPos();
var buf: [1]u8 = undefined;
while (cursor > 0) {
hist.seekBy(-2) catch {
@@ -153,7 +153,7 @@ test "samesame" {
;
const line = "this is line 4";
 
var fbs = std.io.FixedBufferStream(@TypeOf(src)){
const fbs = std.io.FixedBufferStream(@TypeOf(src)){
.buffer = src,
.pos = src.len,
};
 
src/hsh.zig added: 233, removed: 227, total 6
@@ -54,7 +54,7 @@ comptime {
 
/// caller owns memory
fn readLine(a: *Allocator, r: std.fs.File.Reader) ![]u8 {
var buf = a.alloc(u8, 1024) catch return Error.Memory;
const buf = a.alloc(u8, 1024) catch return Error.Memory;
errdefer a.free(buf);
if (r.readUntilDelimiterOrEof(buf, '\n')) |line| {
if (line) |l| {
@@ -80,7 +80,7 @@ pub fn readRCINotify(h: *HSH, e: INEvent) void {
 
fn readFromRC(hsh: *HSH) E!void {
if (hsh.hfs.rc) |rc_| {
var r = rc_.reader();
const r = rc_.reader();
var a = hsh.alloc;
 
var tokenizer = Tokenizer.init(a);
@@ -97,7 +97,7 @@ fn readFromRC(hsh: *HSH) E!void {
defer tokenizer.reset();
tokenizer.consumes(line) catch return E.Memory;
var titr = tokenizer.iterator();
var tokens = titr.toSlice(a) catch return E.Memory;
const tokens = titr.toSlice(a) catch return E.Memory;
defer a.free(tokens);
var pitr = Parser.parse(a, tokens) catch continue;
 
@@ -198,7 +198,7 @@ pub const HSH = struct {
// example I found. It's probably sub optimal, but ¯\_(ツ)_/¯. We may
// decide we care enough to fix this, or not. The internet seems to think
// it's a mistake to alter the env for a running process.
var env = std.process.getEnvMap(a) catch return E.Unknown; // TODO err handling
const env = std.process.getEnvMap(a) catch return E.Unknown; // TODO err handling
 
var hfs = fs.init(a, env) catch return E.Memory;
hfs.inotifyInstallRc(readRCINotify) catch {
 
src/input.zig added: 233, removed: 227, total 6
@@ -112,7 +112,7 @@ fn doComplete(hsh: *HSH, tkn: *Tokenizer, comp: *complete.CompSet) !Mode {
}
 
if (comp.countFiltered() > 1) {
var target = comp.next();
const target = comp.next();
try tkn.maybeReplace(target);
comp.drawAll(&hsh.draw, hsh.draw.term_size) catch |err| {
if (err == Draw.Layout.Error.ItemCount) return .COMPLETING else return err;
@@ -273,7 +273,7 @@ fn ctrlCode(in: *Input, hsh: *HSH, tkn: *Tokenizer, b: u8, comp: *complete.CompS
return .Prompt;
}
 
var nl_exec = tkn.consumec(nl);
const nl_exec = tkn.consumec(nl);
if (nl_exec == error.Exec) {
if (tkn.validate()) {} else |e| {
log.err("validate", .{});
@@ -429,7 +429,7 @@ pub fn nonInteractive(in: *Input, hsh: *HSH, comp: *complete.CompSet) !Event {
in.mode = .TYPING;
return .Signaled;
}
var nbyte: usize = try read(hsh.input, &buffer);
const nbyte: usize = try read(hsh.input, &buffer);
if (nbyte == 0) return .ExitHSH;
 
// No... I don't like this, but I've spent too long staring at it
 
src/keys.zig added: 233, removed: 227, total 6
@@ -201,7 +201,7 @@ fn csi_xterm(buffer: []const u8) Error!Event {
}
 
fn csi_vt(in: []const u8) Error!Key {
var y: u16 = std.fmt.parseInt(u16, in, 10) catch 0;
const y: u16 = std.fmt.parseInt(u16, in, 10) catch 0;
switch (y) {
1 => return .Home,
2 => return .Insert,
 
src/logic.zig added: 233, removed: 227, total 6
@@ -99,7 +99,7 @@ const If = struct {
 
fn mkElif(a: Allocator, str: []const u8) Error!?*Elif {
if (str.len == 0) return null;
var elfi = try Token.any(str);
const elfi = try Token.any(str);
if (elfi.kind == .resr) {
switch (elfi.kind.resr) {
.Fi => return null,
@@ -110,17 +110,17 @@ const If = struct {
offset = off + 4;
} else return Error.InvalidLogic;
// find fi;
var fi = try Token.any(str[str.len - 2 ..]);
const fi = try Token.any(str[str.len - 2 ..]);
if (fi.kind != .resr or fi.kind.resr != .Fi) {
return Error.InvalidLogic;
}
 
var elif = try a.create(Elif);
const elif = try a.create(Elif);
elif.* = .{ .elses = str[offset .. str.len - 2] };
return elif;
},
.Elif => {
var elif = try a.create(Elif);
const elif = try a.create(Elif);
elif.* = .{ .elifs = try mkIf(a, str) };
return elif;
},
@@ -174,7 +174,7 @@ const If = struct {
fn execClause(self: *If) Error!bool {
const clause = self.clause orelse return Error.InvalidLogic;
log.debug("testing logic clasue \n {s}\n", .{clause});
var child = exec_.childParsed(self.alloc, clause) catch |err| {
const child = exec_.childParsed(self.alloc, clause) catch |err| {
log.err("Unexpected error ({}) when attempting to run logic\n", .{err});
return Error.ExecFailure;
};
@@ -348,7 +348,7 @@ pub const Logicizer = struct {
};
 
test "if" {
var a = std.testing.allocator;
const a = std.testing.allocator;
const if_str =
\\if true
\\then
 
src/main.zig added: 233, removed: 227, total 6
@@ -24,7 +24,7 @@ test "main" {
}
 
fn core(hsh: *HSH) !bool {
var tkn = &hsh.tkn;
const tkn = &hsh.tkn;
defer hsh.draw.reset();
//try Context.update(hsh, &[_]Context.Contexts{.git});
var comp = try complete.init(hsh);
@@ -75,7 +75,7 @@ fn usage() void {
/// return 2 == alloc error
fn execTacC(args: *std.process.ArgIterator) u8 {
var gpa = std.heap.GeneralPurposeAllocator(.{}){};
var a = gpa.allocator();
const a = gpa.allocator();
var hsh = HSH.init(a) catch return 255;
defer hsh.raze();
hsh.tkn = Tokenizer.init(a);
@@ -86,7 +86,7 @@ fn execTacC(args: *std.process.ArgIterator) u8 {
while (args.next()) |arg| {
hsh.tkn.consumes(arg) catch return 2;
}
var str = hsh.alloc.dupe(u8, hsh.tkn.raw.items) catch return 2;
const str = hsh.alloc.dupe(u8, hsh.tkn.raw.items) catch return 2;
defer hsh.alloc.free(str);
 
Exec.exec(&hsh, str) catch |err| {
@@ -142,7 +142,7 @@ pub fn main() !void {
std.time.sleep(6 * 1000 * 1000 * 1000);
}
}
var a = gpa.allocator();
const a = gpa.allocator();
 
var hsh = try HSH.init(a);
defer hsh.raze();
@@ -170,7 +170,7 @@ pub fn main() !void {
if (hsh.tkn.raw.items.len == 0) continue;
// debugging data
 
var str = try hsh.alloc.dupe(u8, hsh.tkn.raw.items);
const str = try hsh.alloc.dupe(u8, hsh.tkn.raw.items);
defer hsh.alloc.free(str);
 
//var itr = hsh.tkn.iterator();
@@ -180,7 +180,7 @@ pub fn main() !void {
error.ExeNotFound => {
const first = Exec.execFromInput(&hsh, str) catch @panic("memory");
defer hsh.alloc.free(first);
var tree = Draw.LexTree{ .siblings = @constCast(&[_]Draw.Lexeme{
const tree = Draw.LexTree{ .siblings = @constCast(&[_]Draw.Lexeme{
Draw.Lexeme{
.char = "[ Unable to find ",
.style = .{ .attr = .bold, .fg = .red },
 
src/mem.zig added: 233, removed: 227, total 6
@@ -30,31 +30,31 @@ pub fn concat(a: Allocator, base: []u8, ends: []const []const u8) ![]u8 {
}
 
pub fn concatPath(a: Allocator, base: []u8, end: []const u8) ![]u8 {
var sep = if (base[base.len - 1] == '/') "" else "/";
var end_clean = if (end[0] == '/') end[1..] else end;
const sep = if (base[base.len - 1] == '/') "" else "/";
const end_clean = if (end[0] == '/') end[1..] else end;
return concat(a, base, &[2][]const u8{ sep, end_clean });
}
 
test "concat" {
var a = std.testing.allocator;
var thing = try a.dupe(u8, "thing");
var out = try concat(a, thing, &[_][]const u8{ " blerg", " null" });
const thing = try a.dupe(u8, "thing");
const out = try concat(a, thing, &[_][]const u8{ " blerg", " null" });
try std.testing.expect(std.mem.eql(u8, out, "thing blerg null"));
defer a.free(out);
}
 
test "concatPath" {
var a = std.testing.allocator;
var thing = try a.dupe(u8, "thing");
var out = try concatPath(a, thing, "null");
const thing = try a.dupe(u8, "thing");
const out = try concatPath(a, thing, "null");
try std.testing.expect(std.mem.eql(u8, out, "thing/null"));
defer a.free(out);
}
 
test "concatPath 2" {
var a = std.testing.allocator;
var thing = try a.dupe(u8, "thing/");
var out = try concatPath(a, thing, "null");
const thing = try a.dupe(u8, "thing/");
const out = try concatPath(a, thing, "null");
try std.testing.expect(std.mem.eql(u8, out, "thing/null"));
defer a.free(out);
}
 
src/parse.zig added: 233, removed: 227, total 6
@@ -66,7 +66,7 @@ pub const Parsed = struct {
self.capacity = target;
return;
}
var new = try self.alloc.realloc(self.str, target);
const new = try self.alloc.realloc(self.str, target);
self.str = new;
self.str.len = oldlen;
self.capacity = target;
@@ -173,7 +173,7 @@ pub const ParsedIterator = struct {
 
self.aliasedAdd(token.cannon());
var a_itr = TokenIterator{ .raw = Parser.alias(token) catch token.str };
var aliases = try self.resolveAlias(a_itr.first().*);
const aliases = try self.resolveAlias(a_itr.first().*);
defer self.alloc.free(aliases);
for (aliases) |stkn| {
try tokens.append(stkn);
@@ -204,16 +204,16 @@ pub const ParsedIterator = struct {
}
 
if (std.mem.indexOf(u8, local.cannon(), "$") != null or local.kind == .vari) {
var owned = try self.resolveDollar(local);
const owned = try self.resolveDollar(local);
try tokens.append(Token{ .str = "", .resolved = owned });
} else if (std.mem.indexOf(u8, local.cannon(), "*")) |_| {
var real = try Parser.single(self.alloc, local);
const real = try Parser.single(self.alloc, local);
defer if (real.resolved) |r| self.alloc.free(r);
var globs = try self.resolveGlob(real);
const globs = try self.resolveGlob(real);
defer self.alloc.free(globs);
for (globs) |glob| try tokens.append(glob);
} else {
var real = try Parser.single(self.alloc, local);
const real = try Parser.single(self.alloc, local);
try tokens.append(real);
}
return try tokens.toOwnedSlice();
@@ -262,8 +262,8 @@ pub const ParsedIterator = struct {
var tokens = ArrayList(Token).init(self.alloc);
if (std.mem.indexOf(u8, token.cannon(), "/")) |_| {
var bitr = std.mem.splitBackwards(u8, token.cannon(), "/");
var glob = bitr.first();
var dir = bitr.rest();
const glob = bitr.first();
const dir = bitr.rest();
if (Parser.globAt(self.alloc, dir, glob)) |names| {
for (names) |name| {
defer self.alloc.free(name);
@@ -272,7 +272,7 @@ pub const ParsedIterator = struct {
{
continue;
}
var path = try std.mem.join(self.alloc, "/", &[2][]const u8{ dir, name });
const path = try std.mem.join(self.alloc, "/", &[2][]const u8{ dir, name });
try tokens.append(Token{ .str = "", .resolved = path });
}
self.alloc.free(names);
@@ -427,9 +427,9 @@ pub const Parser = struct {
/// Caller owns memory for both list of names, and each name
fn globAt(a: Allocator, d: []const u8, str: []const u8) Error![][]u8 {
var dir = if (d[0] == '/')
std.fs.openIterableDirAbsolute(d, .{}) catch return Error.Unknown
std.fs.openDirAbsolute(d, .{ .iterate = true }) catch return Error.Unknown
else
std.fs.cwd().openIterableDir(d, .{}) catch return Error.Unknown;
std.fs.cwd().openDir(d, .{ .iterate = true }) catch return Error.Unknown;
defer dir.close();
return fs.globAt(a, dir, str) catch @panic("this error not implemented");
}
@@ -472,22 +472,22 @@ pub const Parser = struct {
 
fn subcmd(a: Allocator, tkn: Token) Error!Token {
var local = tkn;
var cmd = tkn.str[2 .. tkn.str.len - 1];
const cmd = tkn.str[2 .. tkn.str.len - 1];
std.debug.assert(tkn.str[0] == '$');
std.debug.assert(tkn.str[1] == '(');
 
var itr = TokenIterator{ .raw = cmd };
var argv_t = itr.toSlice(a) catch return Error.Memory;
const argv_t = itr.toSlice(a) catch return Error.Memory;
defer a.free(argv_t);
var list = ArrayList([]const u8).init(a);
for (argv_t) |t| {
list.append(t.cannon()) catch return Error.Memory;
}
var argv = list.toOwnedSlice() catch return Error.Memory;
const argv = list.toOwnedSlice() catch return Error.Memory;
defer a.free(argv);
local.parsed = true;
 
var out = exec.child(a, argv) catch {
const out = exec.child(a, argv) catch {
local.resolved = a.dupe(u8, local.str) catch return Error.Memory;
return local;
};
@@ -512,7 +512,7 @@ test "iterator nows" {
 
try t.consumes("\"this is some text\" more text");
var itr = t.iterator();
var ts = try itr.toSlice(a);
const ts = try itr.toSlice(a);
defer a.free(ts);
var ptr = try Parser.parse(a, ts);
defer ptr.raze();
@@ -530,7 +530,7 @@ test "breaking" {
 
try t.consumes("alias la='ls -la'");
var titr = t.iterator();
var tokens = try titr.toSlice(a);
const tokens = try titr.toSlice(a);
try expectEql(tokens.len, 4);
 
titr.restart();
@@ -556,7 +556,7 @@ test "breaking" {
}
 
test "iterator alias is builtin" {
var a = std.testing.allocator;
const a = std.testing.allocator;
 
var ts = [_]Token{
Token{ .kind = .word, .str = "alias" },
@@ -661,7 +661,7 @@ test "iterator aliased recurse" {
i += 1;
}
try expectEql(i, 4);
var first = itr.first().cannon();
const first = itr.first().cannon();
try expect(eql(u8, first, "ls"));
try expect(eql(u8, itr.next().?.cannon(), "--color=auto"));
try expect(eql(u8, itr.next().?.cannon(), "-la"));
@@ -670,7 +670,7 @@ test "iterator aliased recurse" {
}
 
test "parse vars" {
var a = std.testing.allocator;
const a = std.testing.allocator;
 
comptime var ts = [5]Token{
try Token.any("echo"),
@@ -688,7 +688,7 @@ test "parse vars" {
i += 1;
}
try expectEql(i, 3);
var first = itr.first().cannon();
const first = itr.first().cannon();
try eqlStr("echo", first);
try eqlStr("", itr.next().?.cannon());
try eqlStr("blerg", itr.next().?.cannon());
@@ -696,7 +696,7 @@ test "parse vars" {
}
 
test "parse vars existing" {
var a = std.testing.allocator;
const a = std.testing.allocator;
 
comptime var ts = [3]Token{
try Token.any("echo"),
@@ -719,13 +719,13 @@ test "parse vars existing" {
i += 1;
}
try expectEql(i, 1);
var first = itr.first().cannon();
const first = itr.first().cannon();
try eqlStr("echocorrectblerg", first);
try expect(itr.next() == null);
}
 
test "parse vars existing with white space" {
var a = std.testing.allocator;
const a = std.testing.allocator;
 
comptime var ts = [5]Token{
try Token.any("echo"),
@@ -750,7 +750,7 @@ test "parse vars existing with white space" {
i += 1;
}
try expectEql(i, 3);
var first = itr.first().cannon();
const first = itr.first().cannon();
try eqlStr("echo", first);
var tst = itr.next().?;
try eqlStr("correct", tst.cannon());
@@ -782,7 +782,7 @@ test "parse vars existing braces" {
i += 1;
}
try expectEql(i, 3);
var first = itr.first().cannon();
const first = itr.first().cannon();
try eqlStr("echo", first);
 
try eqlStr("valueextra", itr.next().?.cannon());
@@ -813,7 +813,7 @@ test "parse vars existing braces inline" {
i += 1;
}
try expectEql(i, 3);
var first = itr.first().cannon();
const first = itr.first().cannon();
try eqlStr("echo", first);
 
try eqlStr("extravalue", itr.next().?.cannon());
@@ -844,7 +844,7 @@ test "parse vars existing braces inline both" {
i += 1;
}
try expectEql(i, 3);
var first = itr.first().cannon();
const first = itr.first().cannon();
try eqlStr("echo", first);
 
try eqlStr("extravaluethingy", itr.next().?.cannon());
@@ -853,7 +853,7 @@ test "parse vars existing braces inline both" {
}
 
test "parse dollar dollar bills y'all" {
var a = std.testing.allocator;
const a = std.testing.allocator;
 
var tkns = [_]Token{
Token.make("echo", .word),
@@ -920,7 +920,7 @@ test "parse path" {
i += 1;
}
try expectEql(i, 2);
var first = itr.first().cannon();
const first = itr.first().cannon();
try eqlStr("ls", first);
 
try eqlStr("~", itr.next().?.cannon());
@@ -949,7 +949,7 @@ test "parse path ~" {
i += 1;
}
try expectEql(i, 2);
var first = itr.first().cannon();
const first = itr.first().cannon();
try eqlStr("ls", first);
 
var thing = itr.next();
@@ -980,7 +980,7 @@ test "parse path ~/" {
i += 1;
}
try expectEql(i, 2);
var first = itr.first().cannon();
const first = itr.first().cannon();
try eqlStr("ls", first);
 
var thing = itr.next();
@@ -1011,7 +1011,7 @@ test "parse path ~/place" {
i += 1;
}
try expectEql(i, 2);
var first = itr.first().cannon();
const first = itr.first().cannon();
try eqlStr("ls", first);
 
var tst = itr.next();
@@ -1042,7 +1042,7 @@ test "parse path /~/otherplace" {
i += 1;
}
try expectEql(i, 2);
var first = itr.first().cannon();
const first = itr.first().cannon();
try eqlStr("ls", first);
 
var tst = itr.next();
@@ -1055,23 +1055,23 @@ test "glob" {
var a = std.testing.allocator;
 
var oldcwd = std.fs.cwd();
var basecwd = try oldcwd.realpathAlloc(a, ".");
const basecwd = try oldcwd.realpathAlloc(a, ".");
defer {
var dir = std.fs.openDirAbsolute(basecwd, .{}) catch unreachable;
dir.setAsCwd() catch {};
a.free(basecwd);
}
 
var tmpCwd = std.testing.tmpIterableDir(.{});
var tmpCwd = std.testing.tmpDir(.{ .iterate = true });
defer tmpCwd.cleanup();
try tmpCwd.iterable_dir.dir.setAsCwd();
_ = try tmpCwd.iterable_dir.dir.createFile("blerg", .{});
_ = try tmpCwd.iterable_dir.dir.createFile(".blerg", .{});
_ = try tmpCwd.iterable_dir.dir.createFile("blerg2", .{});
_ = try tmpCwd.iterable_dir.dir.createFile("w00t", .{});
_ = try tmpCwd.iterable_dir.dir.createFile("no_wai", .{});
_ = try tmpCwd.iterable_dir.dir.createFile("ya-wai", .{});
var di = tmpCwd.iterable_dir.iterate();
try tmpCwd.dir.setAsCwd();
_ = try tmpCwd.dir.createFile("blerg", .{});
_ = try tmpCwd.dir.createFile(".blerg", .{});
_ = try tmpCwd.dir.createFile("blerg2", .{});
_ = try tmpCwd.dir.createFile("w00t", .{});
_ = try tmpCwd.dir.createFile("no_wai", .{});
_ = try tmpCwd.dir.createFile("ya-wai", .{});
var di = tmpCwd.dir.iterate();
 
var names = std.ArrayList([]u8).init(a);
 
@@ -1116,21 +1116,21 @@ test "glob ." {
var a = std.testing.allocator;
 
var oldcwd = std.fs.cwd();
var basecwd = try oldcwd.realpathAlloc(a, ".");
const basecwd = try oldcwd.realpathAlloc(a, ".");
defer {
var dir = std.fs.openDirAbsolute(basecwd, .{}) catch unreachable;
dir.setAsCwd() catch {};
a.free(basecwd);
}
 
var tmpCwd = std.testing.tmpIterableDir(.{});
var tmpCwd = std.testing.tmpDir(.{ .iterate = true });
defer tmpCwd.cleanup();
try tmpCwd.iterable_dir.dir.setAsCwd();
_ = try tmpCwd.iterable_dir.dir.createFile("blerg", .{});
_ = try tmpCwd.iterable_dir.dir.createFile(".blerg", .{});
_ = try tmpCwd.iterable_dir.dir.createFile("no_wai", .{});
_ = try tmpCwd.iterable_dir.dir.createFile("ya-wai", .{});
var di = tmpCwd.iterable_dir.iterate();
try tmpCwd.dir.setAsCwd();
_ = try tmpCwd.dir.createFile("blerg", .{});
_ = try tmpCwd.dir.createFile(".blerg", .{});
_ = try tmpCwd.dir.createFile("no_wai", .{});
_ = try tmpCwd.dir.createFile("ya-wai", .{});
var di = tmpCwd.dir.iterate();
 
var names = std.ArrayList([]u8).init(a);
 
@@ -1176,16 +1176,16 @@ test "glob ~/*" {
Variables.init(a);
defer Variables.raze();
 
var tmpCwd = std.testing.tmpIterableDir(.{});
var tmpCwd = std.testing.tmpDir(.{ .iterate = true });
defer tmpCwd.cleanup();
var baseCwd = try tmpCwd.iterable_dir.dir.realpathAlloc(a, ".");
const baseCwd = try tmpCwd.dir.realpathAlloc(a, ".");
defer a.free(baseCwd);
 
_ = try tmpCwd.iterable_dir.dir.createFile("blerg", .{});
_ = try tmpCwd.dir.createFile("blerg", .{});
 
try Variables.put("HOME", baseCwd);
 
var di = tmpCwd.iterable_dir.iterate();
var di = tmpCwd.dir.iterate();
var names = std.ArrayList([]u8).init(a);
 
while (try di.next()) |each| {
@@ -1279,7 +1279,7 @@ test "naughty strings parsed" {
 
var itr = TokenIterator{ .raw = while_str };
 
var slice = try itr.toSlice(a);
const slice = try itr.toSlice(a);
defer a.free(slice);
 
var pitr = try Parser.parse(a, slice);
 
src/prompt.zig added: 233, removed: 227, total 6
@@ -80,7 +80,7 @@ fn prompt(d: *Draw.Drawable, u: ?[]const u8, cwd: []const u8) !void {
}
 
pub fn draw(hsh: *HSH, tkn: *Tokenizer) !void {
var bgjobs = Jobs.getBgSlice(hsh.alloc) catch unreachable;
const bgjobs = Jobs.getBgSlice(hsh.alloc) catch unreachable;
defer hsh.alloc.free(bgjobs);
try jobsContext(hsh, bgjobs);
//try ctxContext(hsh, try Context.fetch(hsh, .git));
 
src/random.zig added: 233, removed: 227, total 6
@@ -5,7 +5,7 @@ var prng = std.rand.DefaultPrng.init(0);
var rand: std.rand.Random = prng.random();
 
pub fn init() void {
var time = std.time.microTimestamp();
const time = std.time.microTimestamp();
prng.seed(@bitCast(time));
rand = prng.random();
}
 
src/signals.zig added: 233, removed: 227, total 6
@@ -1,7 +1,7 @@
const std = @import("std");
const Allocator = std.mem.Allocator;
const os = std.os;
const Queue = std.atomic.Queue;
const Queue = std.TailQueue;
const HSH = @import("hsh.zig").HSH;
const log = @import("log");
const jobs = @import("jobs.zig");
@@ -37,7 +37,7 @@ var root_alloc: Allocator = undefined;
var alloc: Allocator = undefined;
var fba: std.heap.FixedBufferAllocator = undefined;
var fbuffer: []u8 = undefined;
var queue: Queue(Signal) = Queue(Signal).init();
var queue: Queue(Signal) = Queue(Signal){};
 
export fn sig_cb(sig: c_int, info: *const os.siginfo_t, _: ?*const anyopaque) callconv(.C) void {
log.trace(
@@ -63,15 +63,17 @@ export fn sig_cb(sig: c_int, info: *const os.siginfo_t, _: ?*const anyopaque) ca
sigp.* = Queue(Signal).Node{
.data = Signal{ .signal = sig, .info = info.* },
};
queue.put(sigp);
queue.append(sigp);
},
}
}
 
pub fn get() ?Queue(Signal).Node {
var node = queue.get() orelse return null;
defer alloc.destroy(node);
return node.*;
if (queue.pop()) |node| {
defer alloc.destroy(node);
return node.*;
}
return null;
}
 
/// TODO change init to accept a GP allocator, and wrap *that* with arena
 
src/token.zig added: 233, removed: 227, total 6
@@ -129,7 +129,7 @@ fn ioredir(src: []const u8) Error!Token {
else => return Error.InvalidSrc,
}
while (src[i] == ' ' or src[i] == '\t') : (i += 1) {}
var target = (try word(src[i..])).str;
const target = (try word(src[i..])).str;
t.substr = target;
t.str = src[0 .. i + target.len];
return t;
@@ -262,7 +262,7 @@ pub fn word(src: []const u8) Error!Token {
}
 
pub fn wordExpanded(src: []const u8) Error!Token {
var tkn = try word(src);
const tkn = try word(src);
 
// I know, and I'm sorry
if (tkn.str.len <= 5) {
@@ -276,7 +276,7 @@ pub fn wordExpanded(src: []const u8) Error!Token {
// TODO accept other whitespace?
while (offset < src.len and src[offset] == ' ') offset += 1;
 
var f = func(src[offset..]) catch return tkn;
const f = func(src[offset..]) catch return tkn;
return Token.make(src[0 .. offset + f.str.len], .nos);
}
 
@@ -290,7 +290,7 @@ pub fn logic(src: []const u8) Error!Token {
}
return Error.InvalidSrc;
};
var r = Reserved.fromStr(src[0..end]) orelse unreachable;
const r = Reserved.fromStr(src[0..end]) orelse unreachable;
 
const marker: Reserved = switch (r) {
.If => .Fi,
@@ -536,7 +536,7 @@ pub const Iterator = struct {
 
pub fn toSliceExecStr(self: *Self, a: Allocator) ![]const []const u8 {
const tokens = try self.toSliceExec(a);
var strs = try a.alloc([]u8, tokens.len);
const strs = try a.alloc([]u8, tokens.len);
for (tokens, strs) |t, *s| {
s.* = @constCast(t.str);
}
 
src/tokenizer.zig added: 233, removed: 227, total 6
@@ -487,7 +487,7 @@ test "quotes tokened" {
}
 
test "alloc" {
var t = Tokenizer.init(std.testing.allocator);
const t = Tokenizer.init(std.testing.allocator);
try expect(std.mem.eql(u8, t.raw.items, ""));
}
 
@@ -499,7 +499,7 @@ test "tokens" {
try t.consumec(c);
}
var titr = t.iterator();
var tokens = try titr.toSlice(a);
const tokens = try titr.toSlice(a);
defer a.free(tokens);
try expect(std.mem.eql(u8, t.raw.items, "token"));
}
@@ -585,7 +585,7 @@ test "breaking" {
 
try t.consumes("alias la='ls -la'");
var titr = t.iterator();
var tokens = try titr.toSlice(a);
const tokens = try titr.toSlice(a);
try expectEql(tokens.len, 4);
a.free(tokens);
}
@@ -816,7 +816,7 @@ test "token > execSlice" {
 
ti.restart();
try std.testing.expect(ti.peek() != null);
var slice = try ti.toSliceExec(std.testing.allocator);
const slice = try ti.toSliceExec(std.testing.allocator);
try std.testing.expect(ti.peek() == null);
try std.testing.expect(ti.peek() == null);
try std.testing.expect(ti.peek() == null);
@@ -994,7 +994,7 @@ test "all execs" {
}
 
test "pop" {
var a = std.testing.allocator;
const a = std.testing.allocator;
var t = Tokenizer.init(a);
const str = "this is a string";
for (str) |c| {
@@ -1157,7 +1157,7 @@ test "make safe" {
 
try std.testing.expect(null == try tk.makeSafe("string"));
 
var str = try tk.makeSafe("str ing");
const str = try tk.makeSafe("str ing");
defer a.free(str.?);
try std.testing.expectEqualStrings("str\\ ing", str.?);
}
@@ -1262,7 +1262,7 @@ test "invalid logic" {
\\done
;
 
var ifs = Token.logic(if_str);
const ifs = Token.logic(if_str);
try std.testing.expectError(TokenError.OpenLogic, ifs);
 
const case_str =
@@ -1273,7 +1273,7 @@ test "invalid logic" {
\\fi
;
 
var cases = Token.logic(case_str);
const cases = Token.logic(case_str);
try std.testing.expectError(TokenError.OpenLogic, cases);
 
const for_str =
@@ -1283,7 +1283,7 @@ test "invalid logic" {
\\until
;
 
var fors = Token.logic(for_str);
const fors = Token.logic(for_str);
try std.testing.expectError(TokenError.OpenLogic, fors);
 
const while_str =
@@ -1293,7 +1293,7 @@ test "invalid logic" {
\\true
;
 
var whiles = Token.logic(while_str);
const whiles = Token.logic(while_str);
try std.testing.expectError(TokenError.OpenLogic, whiles);
}
 
@@ -1369,7 +1369,7 @@ test "naughty strings" {
}
 
test "escape newline" {
var a = std.testing.allocator;
const a = std.testing.allocator;
 
var tzr = Tokenizer.init(a);
defer tzr.raze();
@@ -1389,7 +1389,7 @@ test "escape newline" {
}
 
test "build functions" {
var a = std.testing.allocator;
const a = std.testing.allocator;
var tzr = Tokenizer.init(a);
defer tzr.raze();
 
 
src/tty.zig added: 233, removed: 227, total 6
@@ -41,13 +41,13 @@ pub fn init(a: Allocator) !TTY {
const is_tty = std.io.getStdOut().isTty() and std.io.getStdIn().isTty();
 
const tty = if (is_tty)
os.open("/dev/tty", os.linux.O.RDWR, 0) catch std.io.getStdOut().handle
os.open("/dev/tty", .{ .ACCMODE = .RDWR }, 0) catch std.io.getStdOut().handle
else
std.io.getStdOut().handle;
 
std.debug.assert(current_tty == null);
 
var self = TTY{
const self = TTY{
.alloc = a,
.dev = tty,
.is_tty = is_tty,
@@ -70,22 +70,26 @@ pub fn getAttr(self: *TTY) ?os.termios {
 
fn makeRaw(orig: ?os.termios) os.termios {
var next = orig orelse os.termios{
.oflag = os.linux.OPOST | os.linux.ONLCR,
.cflag = os.linux.CS8 | os.linux.CREAD | os.linux.CLOCAL,
.lflag = os.linux.ISIG | os.linux.ICANON | os.linux.ECHO | os.linux.IEXTEN | os.linux.ECHOE,
.iflag = os.linux.BRKINT | os.linux.ICRNL | os.linux.IMAXBEL,
.oflag = .{ .OPOST = true, .ONLCR = true },
.cflag = .{ .CSIZE = .CS8, .CREAD = true, .CLOCAL = true },
.lflag = .{ .ISIG = true, .ICANON = true, .ECHO = true, .IEXTEN = true, .ECHOE = true },
.iflag = .{ .BRKINT = true, .ICRNL = true, .IMAXBEL = true },
.line = 0,
.cc = .{},
.ispeed = 9600,
.ospeed = 9600,
.cc = [_]u8{0} ** std.os.linux.NCCS,
.ispeed = .B9600,
.ospeed = .B9600,
};
next.iflag &= ~(os.linux.IXON |
os.linux.BRKINT | os.linux.INPCK | os.linux.ISTRIP);
next.iflag |= os.linux.ICRNL;
next.iflag.IXON = false;
next.iflag.BRKINT = false;
next.iflag.INPCK = false;
next.iflag.ISTRIP = false;
//next.lflag &= ~(os.linux.ECHO | os.linux.ICANON | os.linux.ISIG | os.linux.IEXTEN);
next.lflag &= ~(os.linux.ECHO | os.linux.ECHONL | os.linux.ICANON | os.linux.IEXTEN);
next.cc[os.system.V.TIME] = 1; // 0.1 sec resolution
next.cc[os.system.V.MIN] = 0;
next.lflag.ECHO = false;
next.lflag.ECHONL = false;
next.lflag.ICANON = false;
next.lflag.IEXTEN = false;
next.cc[@intFromEnum(os.system.V.TIME)] = 1; // 0.1 sec resolution
next.cc[@intFromEnum(os.system.V.MIN)] = 0;
return next;
}
 
@@ -230,7 +234,7 @@ const expect = std.testing.expect;
test "split" {
var s = "\x1B[86;1R";
var splits = std.mem.split(u8, s[2..], ";");
var x: usize = std.fmt.parseInt(usize, splits.next().?, 10) catch 0;
const x: usize = std.fmt.parseInt(usize, splits.next().?, 10) catch 0;
var y: usize = 0;
if (splits.next()) |thing| {
y = std.fmt.parseInt(usize, thing[0 .. thing.len - 1], 10) catch unreachable;
 
src/variables.zig added: 233, removed: 227, total 6
@@ -108,7 +108,7 @@ fn environBuild() ![:null]?[*:0]u8 {
environ[index] = str[0 .. str.len - 1 :0];
index += 1;
}
var last = @as(*?[*:0]u8, &environ[index]);
const last = @as(*?[*:0]u8, &environ[index]);
last.* = null;
environ_dirty = false;
return environ;
@@ -120,7 +120,7 @@ pub fn henviron() [:null]?[*:0]u8 {
}
 
pub fn getKind(k: []const u8, comptime G: Kind) ?std.meta.FieldType(Var, G) {
var vs = variables[@intFromEnum(G)].get(k) orelse return null;
const vs = variables[@intFromEnum(G)].get(k) orelse return null;
return switch (G) {
.nos => vs.nos,
.sysenv => vs.sysenv,
@@ -142,7 +142,7 @@ pub fn getStr(k: []const u8) ?[]const u8 {
 
pub fn putKind(k: []const u8, v: []const u8, comptime G: Kind) !void {
var vs = &variables[@intFromEnum(G)];
var ret = switch (G) {
const ret = switch (G) {
.nos => vs.put(k, Var{ .nos = v }),
.sysenv => vs.put(k, Var{ .sysenv = .{ .value = v } }),
.internal => vs.put(k, Var{ .internal = .{ .str = v } }),
@@ -194,14 +194,14 @@ pub fn raze() void {
}
 
test "variables standard usage" {
var a = std.testing.allocator;
const a = std.testing.allocator;
 
init(a);
defer raze();
 
try put("key", "value");
 
var str = getStr("key").?;
const str = getStr("key").?;
try std.testing.expectEqualStrings("value", str);
 
var x = get("key").?;
@@ -216,17 +216,17 @@ test "variables standard usage" {
}
 
test "variables ephemeral" {
var a = std.testing.allocator;
const a = std.testing.allocator;
 
init(a);
defer raze();
 
try putKind("key", "value", .ephemeral);
 
var str = getKind("key", .ephemeral).?;
const str = getKind("key", .ephemeral).?;
try std.testing.expectEqualStrings("value", str);
razeEphemeral();
 
var n = getKind("key", .ephemeral);
const n = getKind("key", .ephemeral);
try std.testing.expect(n == null);
}