srctree

Gregory Mullen parent b0630432 ac6888a0
rename token type to kind

build.zig added: 83, removed: 68, total 15
@@ -13,9 +13,11 @@ pub fn build(b: *std.Build) void {
.optimize = optimize,
});
 
exe.addModule("log", b.createModule(.{
const log = b.createModule(.{
.source_file = .{ .path = "src/log.zig" },
}));
});
 
exe.addModule("log", log);
 
b.installArtifact(exe);
 
@@ -33,8 +35,9 @@ pub fn build(b: *std.Build) void {
.target = target,
.optimize = optimize,
});
 
unit_tests.addModule("log", log);
const run_tests = b.addRunArtifact(unit_tests);
 
const test_step = b.step("test", "Run unit tests");
test_step.dependOn(&run_tests.step);
}
 
src/builtins.zig added: 83, removed: 68, total 15
@@ -102,7 +102,7 @@ fn cd(hsh: *HSH, titr: *ParsedIterator) Err!u8 {
 
_ = titr.first();
while (titr.next()) |t| {
switch (t.type) {
switch (t.kind) {
.String, .Quote, .Var => {
std.mem.copy(u8, &path, t.cannon());
path_len = t.cannon().len;
 
src/builtins/alias.zig added: 83, removed: 68, total 15
@@ -60,7 +60,7 @@ pub fn alias(h: *HSH, titr: *ParsedIterator) Err!u8 {
var value: ?[]const u8 = null;
var mode: ?[]const u8 = null;
while (titr.next()) |t| {
switch (t.type) {
switch (t.kind) {
.Operator => {},
else => {
if (name) |_| {
 
src/completion.zig added: 83, removed: 68, total 15
@@ -5,7 +5,7 @@ const HSH = @import("hsh.zig").HSH;
const IterableDir = std.fs.IterableDir;
const tokenizer = @import("tokenizer.zig");
const Token = tokenizer.Token;
const TokenKind = tokenizer.TokenKind;
const Kind = tokenizer.Kind;
 
const Self = @This();
 
@@ -65,7 +65,7 @@ pub const CompSet = struct {
// actually using most of orig_token is much danger, such UB
// the pointers contained within are likely already invalid!
//orig_token: ?*const Token = null,
kind: TokenKind = undefined,
kind: Kind = undefined,
 
/// true when there's a known completion [or the original]
pub fn known(self: *CompSet) bool {
@@ -173,7 +173,7 @@ fn completePath(h: *HSH, target: []const u8) !void {
/// called again.
pub fn complete(hsh: *HSH, t: *const Token) !*CompSet {
compset.raze();
compset.kind = t.type;
compset.kind = t.kind;
compset.index = 0;
 
const full = try compset.alloc.dupe(u8, t.cannon());
@@ -182,7 +182,7 @@ pub fn complete(hsh: *HSH, t: *const Token) !*CompSet {
.name = full,
.kind = CompKindE{ .Original = 0 },
});
switch (t.type) {
switch (t.kind) {
.WhiteSpace => try completeDir(&hsh.hfs.dirs.cwd),
.String => try completeDirBase(&hsh.hfs.dirs.cwd, t.cannon()),
.Path => try completePath(hsh, t.cannon()),
 
src/exec.zig added: 83, removed: 68, total 15
@@ -145,7 +145,7 @@ fn mkCallableStack(h: *HSH, itr: *TokenIterator) Error![]CallableStack {
//defer h.alloc.free(eslice);
 
var io: ?StdIo = null;
switch (peek.type) {
switch (peek.kind) {
.IoRedir => {
if (!std.mem.eql(u8, "|", peek.cannon())) unreachable;
const pipe = std.os.pipe2(0) catch return Error.OSErr;
@@ -160,7 +160,7 @@ fn mkCallableStack(h: *HSH, itr: *TokenIterator) Error![]CallableStack {
 
var parsed = Parser.parse(&h.alloc, eslice, false) catch unreachable;
stack.append(CallableStack{
.callable = switch (parsed.peek().?.type) {
.callable = switch (parsed.peek().?.kind) {
.Builtin => Callable{ .builtin = try mkBuiltin(h, parsed) },
else => Callable{ .exec = try mkExec(h, parsed) },
},
@@ -183,11 +183,7 @@ fn execBuiltin(h: *HSH, b: *Builtin) Error!u8 {
 
fn execBin(e: Binary) Error!void {
// TODO manage env
const res = std.os.execveZ(
e.arg,
e.argv,
@ptrCast([*:null]?[*:0]u8, std.os.environ),
);
const res = std.os.execveZ(e.arg, e.argv, @ptrCast([*:null]?[*:0]u8, std.os.environ));
switch (res) {
error.FileNotFound => {
// we validate exes internally now this should be impossible
@@ -330,7 +326,7 @@ test "c memory" {
try std.testing.expect(mem.eql(u8, tkn.tokens.items[0].raw, "ls"));
try std.testing.expect(mem.eql(u8, tkn.tokens.items[0].cannon(), "ls"));
for (tkn.tokens.items) |token| {
if (token.type == .WhiteSpace) continue;
if (token.kind == .WhiteSpace) continue;
var arg = a.alloc(u8, token.cannon().len + 1) catch unreachable;
mem.copy(u8, arg, token.cannon());
arg[token.cannon().len] = 0;
 
src/hsh.zig added: 83, removed: 68, total 15
@@ -112,7 +112,7 @@ fn initHSH(hsh: *HSH) !void {
tokenizer.consumes(line) catch continue;
_ = tokenizer.tokenize() catch continue;
var titr = Parser.parse(&a, tokenizer.tokens.items, false) catch continue;
if (titr.first().type != .Builtin) continue;
if (titr.first().kind != .Builtin) continue;
 
const bi_func = bi.strExec(titr.first().cannon());
titr.restart();
 
src/parse.zig added: 83, removed: 68, total 15
@@ -56,10 +56,10 @@ pub const ParsedIterator = struct {
 
if (self.subtokens) |_| return self.nextSubtoken(token);
 
if (i == 0 and token.type == .String) {
if (i == 0 and token.kind == .String) {
if (self.nextSubtoken(token)) |tk| return tk;
return token;
} else if (token.type == .WhiteSpace and !self.ws) {
} else if (token.kind == .WhiteSpace and !self.ws) {
self.index.? += 1;
return self.next();
}
@@ -163,7 +163,7 @@ pub const Parser = struct {
fn parseToken(a: *Allocator, token: *Token) Error!*Token {
if (token.raw.len == 0) return token;
 
switch (token.type) {
switch (token.kind) {
.Quote => {
var needle = [2]u8{ '\\', token.subtoken };
if (mem.indexOfScalar(u8, token.raw, '\\')) |_| {} else return token;
@@ -182,7 +182,7 @@ pub const Parser = struct {
},
.String => {
if (mem.indexOf(u8, token.raw, "/")) |_| {
token.type = .Path;
token.kind = .Path;
return token;
} else return token;
},
@@ -210,7 +210,7 @@ pub const Parser = struct {
 
fn parseBuiltin(tkn: *Token) Error!*Token {
if (Builtins.exists(tkn.cannon())) {
tkn.*.type = .Builtin;
tkn.*.kind = .Builtin;
return tkn;
}
return Error.Empty;
@@ -346,9 +346,9 @@ test "iterator ws" {
// var a = std.testing.allocator;
//
// var ts = [_]Token{
// Token{ .type = .Tree, .raw = "ls -la" },
// Token{ .type = .WhiteSpace, .raw = " " },
// Token{ .type = .String, .raw = "src" },
// Token{ .kind = .Tree, .raw = "ls -la" },
// Token{ .kind = .WhiteSpace, .raw = " " },
// Token{ .kind = .String, .raw = "src" },
// };
//
// var itr = try Parser.parse(&a, &ts, false);
@@ -367,7 +367,7 @@ test "iterator alias is builtin" {
var a = std.testing.allocator;
 
var ts = [_]Token{
Token{ .type = .String, .raw = "alias" },
Token{ .kind = .String, .raw = "alias" },
};
 
var itr = try Parser.parse(&a, &ts, false);
@@ -378,7 +378,7 @@ test "iterator alias is builtin" {
try expectEql(i, 1);
try std.testing.expectEqualStrings("alias", itr.first().cannon());
try expect(itr.next() == null);
try std.testing.expect(itr.first().type == .Builtin);
try std.testing.expect(itr.first().kind == .Builtin);
}
 
test "iterator aliased" {
@@ -391,9 +391,9 @@ test "iterator aliased" {
});
 
var ts = [_]Token{
Token{ .type = .String, .raw = "la" },
Token{ .type = .WhiteSpace, .raw = " " },
Token{ .type = .String, .raw = "src" },
Token{ .kind = .String, .raw = "la" },
Token{ .kind = .WhiteSpace, .raw = " " },
Token{ .kind = .String, .raw = "src" },
};
 
var itr = try Parser.parse(&a, &ts, false);
@@ -418,9 +418,9 @@ test "iterator aliased self" {
});
 
var ts = [_]Token{
Token{ .type = .String, .raw = "ls" },
Token{ .type = .WhiteSpace, .raw = " " },
Token{ .type = .String, .raw = "src" },
Token{ .kind = .String, .raw = "ls" },
Token{ .kind = .WhiteSpace, .raw = " " },
Token{ .kind = .String, .raw = "src" },
};
 
var itr = try Parser.parse(&a, &ts, false);
@@ -451,9 +451,9 @@ test "iterator aliased recurse" {
});
 
var ts = [_]Token{
Token{ .type = .String, .raw = "la" },
Token{ .type = .WhiteSpace, .raw = " " },
Token{ .type = .String, .raw = "src" },
Token{ .kind = .String, .raw = "la" },
Token{ .kind = .WhiteSpace, .raw = " " },
Token{ .kind = .String, .raw = "src" },
};
 
var itr = try Parser.parse(&a, &ts, false);
 
src/tokenizer.zig added: 83, removed: 68, total 15
@@ -9,7 +9,7 @@ const CompOption = @import("completion.zig").CompOption;
 
const breaking_tokens = " \t\"'`${|><#;:";
 
pub const TokenKind = enum(u8) {
pub const Kind = enum(u8) {
WhiteSpace,
String,
Builtin,
@@ -22,6 +22,21 @@ pub const TokenKind = enum(u8) {
Aliased,
};
 
pub const IOKind = enum {
Pipe,
In,
HDoc,
Out,
Append,
Err,
};
 
pub const KindExt = union(enum) {
nos: void,
word: void,
io: IOKind,
};
 
pub const Error = error{
Unknown,
Memory,
@@ -69,7 +84,7 @@ pub const TokenIterator = struct {
pub fn next(self: *Self) ?*const Token {
const n = self.nextAny();
 
if (n != null and n.?.type == .WhiteSpace) {
if (n != null and n.?.kind == .WhiteSpace) {
return self.next();
}
return n;
@@ -83,7 +98,7 @@ pub const TokenIterator = struct {
 
const t_ = self.next();
if (t_) |t| {
switch (t.type) {
switch (t.kind) {
.IoRedir, .ExecDelim => {
self.index.? -= t.raw.len;
return null;
@@ -123,7 +138,7 @@ pub const TokenIterator = struct {
if (self.nextExec()) |n| {
try list.append(n.*);
} else if (self.next()) |n| {
if (n.type != .IoRedir and n.type != .ExecDelim) {
if (n.kind != .IoRedir and n.kind != .ExecDelim) {
try list.append(n.*);
}
}
@@ -155,7 +170,8 @@ pub const Token = struct {
raw: []const u8, // "full" Slice, you probably want to use cannon()
i: u16 = 0,
backing: ?ArrayList(u8) = null,
type: TokenKind,
kind: Kind,
extrakind: KindExt = .nos,
parsed: bool = false,
subtoken: u8 = 0,
// I hate this but I've spent too much time on this already #YOLO
@@ -164,14 +180,14 @@ pub const Token = struct {
pub fn format(self: Token, comptime fmt: []const u8, _: std.fmt.FormatOptions, out: anytype) !void {
// this is what net.zig does, so it's what I do
if (fmt.len != 0) std.fmt.invalidFmtError(fmt, self);
try std.fmt.format(out, "Token({}){{{s}}}", .{ self.type, self.raw });
try std.fmt.format(out, "Token({}){{{s}}}", .{ self.kind, self.raw });
}
 
pub fn cannon(self: Token) []const u8 {
if (self.backing) |b| return b.items;
//if (self.resolved) |r| return r;
 
return switch (self.type) {
return switch (self.kind) {
.Quote => return self.raw[1 .. self.raw.len - 1],
else => self.raw,
};
@@ -189,9 +205,9 @@ pub const Token = struct {
}
};
 
pub fn tokenPos(comptime t: TokenKind, hs: []const Token) ?usize {
pub fn tokenPos(comptime t: Kind, hs: []const Token) ?usize {
for (hs, 0..) |tk, i| {
if (t == tk.type) return i;
if (t == tk.kind) return i;
}
return null;
}
@@ -303,23 +319,23 @@ pub const Tokenizer = struct {
} else end += 1;
return Token{
.raw = src[0..end],
.type = TokenKind.String,
.kind = Kind.String,
};
}
 
fn ioredir(src: []const u8) Error!Token {
switch (src[0]) {
'|' => return Token{ .raw = src[0..1], .type = .IoRedir },
'|' => return Token{ .raw = src[0..1], .kind = .IoRedir },
'<' => {
return Token{
.raw = if (src.len > 1 and src[1] == '<') src[0..2] else src[0..1],
.type = .IoRedir,
.kind = .IoRedir,
};
},
'>' => {
return Token{
.raw = if (src.len > 1 and src[1] == '>') src[0..2] else src[0..1],
.type = .IoRedir,
.kind = .IoRedir,
};
},
else => return Error.InvalidSrc,
@@ -330,11 +346,11 @@ pub const Tokenizer = struct {
switch (src[0]) {
';' => return Token{
.raw = src[0..1],
.type = .ExecDelim,
.kind = .ExecDelim,
},
'&' => return Token{
.raw = src[0..1],
.type = .ExecDelim,
.kind = .ExecDelim,
},
else => return Error.InvalidSrc,
}
@@ -344,7 +360,7 @@ pub const Tokenizer = struct {
switch (src[0]) {
'=' => return Token{
.raw = src[0..1],
.type = .Operator,
.kind = .Operator,
},
else => return Error.InvalidSrc,
}
@@ -368,7 +384,7 @@ pub const Tokenizer = struct {
 
return Token{
.raw = src[0..end],
.type = TokenKind.Quote,
.kind = Kind.Quote,
.subtoken = subt,
};
}
@@ -381,20 +397,20 @@ pub const Tokenizer = struct {
}
return Token{
.raw = src[0..end],
.type = TokenKind.WhiteSpace,
.kind = Kind.WhiteSpace,
};
}
 
fn path(src: []const u8) Error!Token {
var t = try Tokenizer.string(src);
t.type = TokenKind.Path;
t.kind = Kind.Path;
return t;
}
 
pub fn dump_tokens(self: Tokenizer, ws: bool) !void {
std.debug.print("\n", .{});
for (self.tokens.items) |i| {
if (!ws and i.type == .WhiteSpace) continue;
if (!ws and i.kind == .WhiteSpace) continue;
std.debug.print("{}\n", .{i});
}
}
@@ -415,7 +431,7 @@ pub const Tokenizer = struct {
sum += t.raw.len;
}
self.c_idx = sum + old.raw.len;
if (old.type != .WhiteSpace) try self.popRange(old.raw.len);
if (old.kind != .WhiteSpace) try self.popRange(old.raw.len);
if (new.kind == .Original and mem.eql(u8, new.full, " ")) return;
 
try self.consumeSafeish(new.full);
@@ -692,7 +708,7 @@ test "tokenize path" {
_ = try t.tokenize();
try expectEql(t.raw.items.len, "blerg ~/dir".len);
try expectEql(t.tokens.items.len, 3);
try expect(t.tokens.items[2].type == TokenKind.Path);
try expect(t.tokens.items[2].kind == Kind.Path);
try expect(eql(u8, t.tokens.items[2].raw, "~/dir"));
t.reset();
 
@@ -700,7 +716,7 @@ test "tokenize path" {
_ = try t.tokenize();
try expectEql(t.raw.items.len, "blerg /home/user/something".len);
try expectEql(t.tokens.items.len, 3);
try expect(t.tokens.items[2].type == TokenKind.Path);
try expect(t.tokens.items[2].kind == Kind.Path);
try expect(eql(u8, t.tokens.items[2].raw, "/home/user/something"));
}