srctree

Gregory Mullen parent e3c256a4 34f4f13e
major refactor of data ownership

.gitignore added: 542, removed: 605, total 0
@@ -1,4 +1,4 @@
zig-cache
.zig-cache
zig-out
test.zig
 
 
src/builtins.zig added: 542, removed: 605, total 0
@@ -204,7 +204,6 @@ fn exit(hsh: *HSH, i: *ParsedIterator) Err!u8 {
}
hsh.draw.raze();
hsh.tty.raze();
hsh.tkn.raze();
hsh.raze();
std.posix.exit(code);
}
 
src/builtins/alias.zig added: 542, removed: 605, total 0
@@ -1,8 +1,7 @@
const std = @import("std");
const hsh = @import("../hsh.zig");
const HSH = hsh.HSH;
const tokenizer = @import("../tokenizer.zig");
const Token = tokenizer.Token;
const Token = @import("../token.zig");
const bi = @import("../builtins.zig");
const Err = bi.Err;
const Parse = @import("../parse.zig");
@@ -158,7 +157,7 @@ test "save" {
defer raze(a);
const str = "alias haxzor='ssh 127.0.0.1 \"echo hsh was here | sudo tee /root/.lmao.txt\"'";
 
var itr = tokenizer.TokenIterator{ .raw = str };
var itr = Token.Iterator{ .raw = str };
const slice = try itr.toSliceExec(a);
defer a.free(slice);
var pitr = try Parse.Parser.parse(a, slice);
 
src/completion.zig added: 542, removed: 605, total 0
@@ -207,7 +207,7 @@ pub const CompSet = struct {
//orig_token: ?*const Token = null,
kind: Token.Kind = .nos,
err: bool = false,
draw_cache: [flavors_len]?[]Draw.LexTree = .{null} ** 3,
draw_cache: [flavors_len]?[]Draw.Lexeme = .{null} ** 3,
 
/// Intentionally excludes original from the count
pub fn count(self: *const CompSet) usize {
@@ -387,8 +387,9 @@ pub const CompSet = struct {
if (err == Draw.Layout.Error.ItemCount) {
var fbuf: [128]u8 = undefined;
const str = try std.fmt.bufPrint(&fbuf, ERRSTR_TOOBIG, .{self.count()});
try Draw.drawAfter(d, Draw.LexTree{
.lex = Draw.Lexeme{ .char = str, .style = .{ .attr = .bold, .fg = .red } },
try Draw.drawAfter(d, Draw.Lexeme{
.char = str,
.style = .{ .attr = .bold, .fg = .red },
});
self.err = true;
return err;
@@ -400,15 +401,17 @@ pub const CompSet = struct {
if (self.err) {
var fbuf: [128]u8 = undefined;
const str = try std.fmt.bufPrint(&fbuf, ERRSTR_TOOBIG, .{self.count()});
try Draw.drawAfter(d, Draw.LexTree{
.lex = Draw.Lexeme{ .char = str, .style = .{ .attr = .bold, .fg = .red } },
});
try Draw.drawAfter(
d,
Draw.Lexeme{ .char = str, .style = .{ .attr = .bold, .fg = .red } },
);
return;
}
if (self.count() == 0) {
try Draw.drawAfter(d, Draw.LexTree{
.lex = Draw.Lexeme{ .char = ERRSTR_NOOPTS, .style = .{ .attr = .bold, .fg = .red } },
});
try Draw.drawAfter(
d,
Draw.Lexeme{ .char = ERRSTR_NOOPTS, .style = .{ .attr = .bold, .fg = .red } },
);
return;
}
 
 
src/draw.zig added: 542, removed: 605, total 0
@@ -11,12 +11,24 @@ pub const Layout = @import("draw/layout.zig");
 
const DrawBuf = ArrayList(u8);
 
alloc: Allocator,
tty: *TTY,
hsh: *HSH,
cursor: u32 = 0,
cursor_reposition: bool = true,
before: DrawBuf = undefined,
b: DrawBuf = undefined,
right: DrawBuf = undefined,
after: DrawBuf = undefined,
term_size: Cord = .{},
lines: u16 = 0,
 
pub const Cord = struct {
x: isize = 0,
y: isize = 0,
};
 
pub const Err = error{
pub const Error = error{
Unknown,
OutOfMemory,
WriterIO,
@@ -52,23 +64,22 @@ pub const Color = enum {
green,
};
 
pub const Style = struct {
attr: ?Attr = null,
fg: ?Color = null,
bg: ?Color = null,
};
 
pub const Lexeme = struct {
char: []const u8,
style: Style = .{},
};
padding: ?Padding = null,
style: ?Style = null,
 
const LexSibling = []Lexeme;
pub const Style = struct {
attr: ?Attr = null,
fg: ?Color = null,
bg: ?Color = null,
};
 
pub const LexTree = union(enum) {
lex: Lexeme,
siblings: LexSibling,
children: []LexTree,
pub const Padding = struct {
char: u8 = ' ',
left: i32 = 0,
right: i32 = 0,
};
};
 
var colorize: bool = true;
@@ -84,19 +95,7 @@ const Direction = enum {
 
pub const Drawable = @This();
 
alloc: Allocator,
tty: *TTY,
hsh: *HSH,
cursor: u32 = 0,
cursor_reposition: bool = true,
before: DrawBuf = undefined,
b: DrawBuf = undefined,
right: DrawBuf = undefined,
after: DrawBuf = undefined,
term_size: Cord = .{},
lines: u16 = 0,
 
pub fn init(hsh: *HSH) Err!Drawable {
pub fn init(hsh: *HSH) Error!Drawable {
colorize = hsh.enabled(Features.Colorize);
return .{
.alloc = hsh.alloc,
@@ -109,12 +108,12 @@ pub fn init(hsh: *HSH) Err!Drawable {
};
}
 
pub fn key(d: *Drawable, c: u8) Err!void {
_ = d.tty.out.write(&[1]u8{c}) catch return Err.WriterIO;
pub fn key(d: *Drawable, c: u8) Error!void {
_ = d.tty.out.write(&[1]u8{c}) catch return Error.WriterIO;
}
 
pub fn write(d: *Drawable, out: []const u8) Err!usize {
return d.tty.out.write(out) catch Err.WriterIO;
pub fn write(d: *Drawable, out: []const u8) Error!usize {
return d.tty.out.write(out) catch Error.WriterIO;
}
 
pub fn move(_: *Drawable, comptime dir: Direction, count: u16) []const u8 {
@@ -150,7 +149,7 @@ pub fn raze(d: *Drawable) void {
d.b.clearAndFree();
}
 
fn setAttr(buf: *DrawBuf, attr: ?Attr) Err!void {
fn setAttr(buf: *DrawBuf, attr: ?Attr) Error!void {
if (attr) |a| {
switch (a) {
.bold => try buf.appendSlice("\x1B[1m"),
@@ -163,7 +162,7 @@ fn setAttr(buf: *DrawBuf, attr: ?Attr) Err!void {
}
}
 
fn bgColor(buf: *DrawBuf, c: ?Color) Err!void {
fn bgColor(buf: *DrawBuf, c: ?Color) Error!void {
if (c) |bg| {
const color = switch (bg) {
.red => "\x1B[41m",
@@ -175,7 +174,7 @@ fn bgColor(buf: *DrawBuf, c: ?Color) Err!void {
}
}
 
fn fgColor(buf: *DrawBuf, c: ?Color) Err!void {
fn fgColor(buf: *DrawBuf, c: ?Color) Error!void {
if (c) |fg| {
const color = switch (fg) {
.red => "\x1B[31m",
@@ -187,40 +186,32 @@ fn fgColor(buf: *DrawBuf, c: ?Color) Err!void {
}
}
 
fn drawLexeme(buf: *DrawBuf, x: usize, y: usize, l: Lexeme) Err!void {
fn drawLexeme(buf: *DrawBuf, _: usize, _: usize, l: Lexeme) Error!void {
if (l.char.len == 0) return;
_ = x;
_ = y;
if (colorize) {
try setAttr(buf, l.style.attr);
try fgColor(buf, l.style.fg);
try bgColor(buf, l.style.bg);
if (l.style) |style| {
try setAttr(buf, style.attr);
try fgColor(buf, style.fg);
try bgColor(buf, style.bg);
}
}
try buf.appendSlice(l.char);
if (colorize) {
if (colorize and l.style != null) {
try bgColor(buf, .none);
try fgColor(buf, .none);
try setAttr(buf, .reset);
}
}
 
fn drawSibling(buf: *DrawBuf, x: usize, y: usize, s: []Lexeme) Err!void {
fn drawLexemeMany(buf: *DrawBuf, x: usize, y: usize, s: []const Lexeme) Error!void {
for (s) |sib| {
try drawLexeme(buf, x, y, sib);
}
}
 
fn drawTree(buf: *DrawBuf, x: usize, y: usize, t: LexTree) Err!void {
return switch (t) {
LexTree.lex => |lex| drawLexeme(buf, x, y, lex),
LexTree.siblings => |sib| drawSibling(buf, x, y, sib),
LexTree.children => |child| drawTrees(buf, x, y, child),
};
}
 
fn drawTrees(buf: *DrawBuf, x: usize, y: usize, tree: []LexTree) Err!void {
for (tree) |t| {
try drawTree(buf, x, y, t);
fn drawLexemeTree(buf: *DrawBuf, x: usize, y: usize, t: []const []const Lexeme) Error!void {
for (t) |set| {
drawLexemeMany(buf, x, y, set);
}
}
 
@@ -228,29 +219,29 @@ fn countLines(buf: []const u8) u16 {
return @truncate(std.mem.count(u8, buf, "\n"));
}
 
pub fn drawBefore(d: *Drawable, t: LexTree) !void {
try drawTree(&d.before, 0, 0, t);
pub fn drawBefore(d: *Drawable, t: []const Lexeme) !void {
try drawLexemeMany(&d.before, 0, 0, t);
try d.before.appendSlice("\x1B[K");
}
 
pub fn drawAfter(d: *Drawable, t: LexTree) !void {
pub fn drawAfter(d: *Drawable, t: []const Lexeme) !void {
try d.after.append('\n');
try drawTree(&d.after, 0, 0, t);
try drawLexemeMany(&d.after, 0, 0, t);
}
 
pub fn drawRight(d: *Drawable, tree: LexTree) !void {
try drawTree(&d.right, 0, 0, tree);
pub fn drawRight(d: *Drawable, tree: []const Lexeme) !void {
try drawLexemeMany(&d.right, 0, 0, tree);
}
 
pub fn draw(d: *Drawable, tree: LexTree) !void {
try drawTree(&d.b, 0, 0, tree);
pub fn draw(d: *Drawable, tree: []const Lexeme) !void {
try drawLexemeMany(&d.b, 0, 0, tree);
}
 
/// Renders the "prompt" line
/// hsh is based around the idea of user keyboard-driven input, so plugin should
/// provide the context, expecting not to know about, or touch the final user
/// input line
pub fn render(d: *Drawable) Err!void {
pub fn render(d: *Drawable) Error!void {
_ = try d.write("\r");
_ = try d.write(d.move(.Up, d.lines));
d.lines = 0;
@@ -299,7 +290,7 @@ pub fn clearCtx(d: *Drawable) void {
/// prompt before exec.
pub fn clear_before_exec(_: *Drawable) void {}
 
pub fn newLine(d: *Drawable) Err!void {
pub fn newLine(d: *Drawable) Error!void {
_ = try d.write("\n");
}
 
 
src/draw/layout.zig added: 542, removed: 605, total 0
@@ -1,16 +1,14 @@
const std = @import("std");
const Allocator = std.mem.Allocator;
const draw = @import("../draw.zig");
const Cord = draw.Cord;
const LexTree = draw.LexTree;
const Lexeme = draw.Lexeme;
const Draw = @import("../draw.zig");
const Cord = Draw.Cord;
const Lexeme = Draw.Lexeme;
const dupePadded = @import("../mem.zig").dupePadded;
 
pub const Error = error{
pub const Error = Draw.Error || error{
ViewportFit,
ItemCount,
LayoutUnable,
Memory,
};
 
/// TODO unicode support when?
@@ -72,36 +70,40 @@ fn maxWidthLexem(lexs: []const Lexeme) u32 {
return max + 1;
}
 
/// Caller owns memory, strings **are** duplicated
/// *LexTree
/// LexTree.siblings.Lexem,
/// LexTree.siblings.Lexem[..].char must all be free'd
pub fn grid(a: Allocator, items: []const []const u8, wh: Cord) Error![]LexTree {
/// items aren't duplicated and must outlive Lexeme based grid
pub fn grid(a: Allocator, items: []const []const u8, wh: Cord) Error![][]Lexeme {
// errdefer
const largest = maxWidth(items);
if (largest > wh.x) return Error.ViewportFit;
 
const cols: u32 = @max(@as(u32, @intCast(wh.x)) / largest, 1);
const remainder: u32 = if (items.len % cols > 0) 1 else 0;
const rows: u32 = @as(u32, @truncate(items.len)) / cols + remainder;
const stride: usize = @max(@divFloor(wh.x, largest), 1);
const full_count: usize = items.len / stride;
const remainder: usize = items.len % stride;
const row_count: usize = full_count + @as(usize, if (remainder != 0) 1 else 0);
 
const rows = try a.alloc([]Lexeme, row_count);
 
var trees = a.alloc(LexTree, rows) catch return Error.Memory;
var lexes = a.alloc(Lexeme, items.len) catch return Error.Memory;
// errdefer
 
for (0..rows) |row| {
trees[row] = LexTree{
.siblings = lexes[row * cols .. @min((row + 1) * cols, items.len)],
};
for (0..cols) |col| {
if (items.len <= cols * row + col) break;
trees[row].siblings[col] = Lexeme{
.char = dupePadded(a, items[row * cols + col], largest) catch return Error.Memory,
var i: usize = 0;
root: for (rows) |*row| {
row.* = if (i < stride * full_count)
try a.alloc(Lexeme, stride)
else
try a.alloc(Lexeme, remainder);
 
for (row.*) |*col| {
const char = items[i];
col.* = Lexeme{
.char = char,
.padding = .{ .right = @intCast(largest - countPrintable(char)) },
};
i += 1;
if (i >= items.len) break :root;
}
}
 
return trees;
return rows;
}
 
fn sum(cs: []u16) u32 {
@@ -110,8 +112,8 @@ fn sum(cs: []u16) u32 {
return total;
}
 
pub fn tableSize(a: Allocator, items: []Lexeme, wh: Cord) Error![]u16 {
var colsize: []u16 = a.alloc(u16, 0) catch return Error.Memory;
pub fn tableSize(a: Allocator, items: []const Lexeme, wh: Cord) Error![]u16 {
var colsize: []u16 = try a.alloc(u16, 0);
errdefer a.free(colsize);
 
var cols = items.len;
@@ -121,7 +123,7 @@ pub fn tableSize(a: Allocator, items: []Lexeme, wh: Cord) Error![]u16 {
if (cols == 0) return Error.LayoutUnable;
if (countLexems(items[0..cols]) > wh.x) continue;
 
colsize = a.realloc(colsize, cols) catch return Error.Memory;
colsize = try a.realloc(colsize, cols);
@memset(colsize, 0);
rows = @as(u32, @truncate(items.len / cols));
if (items.len % cols > 0) rows += 1;
@@ -141,50 +143,33 @@ pub fn tableSize(a: Allocator, items: []Lexeme, wh: Cord) Error![]u16 {
return colsize;
}
 
pub fn table(a: Allocator, items: anytype, wh: Cord) Error![]LexTree {
const T = @TypeOf(items);
const func = comptime switch (T) {
[]Lexeme => tableLexeme,
*[][]const u8 => tableChar,
*const [12][]const u8 => tableChar,
else => unreachable,
};
return func(a, items, wh);
}
 
fn tableLexeme(a: Allocator, items: []Lexeme, wh: Cord) Error![]LexTree {
fn tableLexeme(a: Allocator, items: []const Lexeme, wh: Cord) Error![][]Lexeme {
const largest = maxWidthLexem(items);
if (largest > wh.x) return Error.ViewportFit;
 
const colsz = try tableSize(a, items, wh);
const stride = colsz.len;
defer a.free(colsz);
var rows = (items.len / colsz.len);
if (items.len % colsz.len > 0) rows += 1;
const row_count = std.math.divCeil(usize, items.len, stride) catch unreachable;
const remainder = (items.len % stride) -| 1;
 
var trees = a.alloc(LexTree, rows) catch return Error.Memory;
const rows = try a.alloc([]Lexeme, row_count);
for (rows, 0..) |*dstrow, i| {
const row_num = if (i == row_count - 1) remainder else stride;
 
for (0..rows) |row| {
trees[row] = LexTree{
.siblings = items[row * colsz.len .. @min((row + 1) * colsz.len, items.len)],
};
for (0..colsz.len) |c| {
const rowcol = row * colsz.len + c;
if (rowcol >= items.len) break;
const old = items[rowcol].char;
trees[row].siblings[c].char = dupePadded(a, old, colsz[c]) catch return Error.Memory;
dstrow.* = try a.alloc(Lexeme, row_num);
for (dstrow.*, 0..) |*col, j| {
const offset = i * stride + j;
col.char = try dupePadded(a, items[offset].char, colsz[j]);
}
}
return trees;
return rows;
}
 
/// Caller owns memory, strings **are** duplicated
/// *LexTree
/// LexTree.siblings.Lexem,
/// LexTree.siblings.Lexem[..].char must all be free'd
/// items are not reordered
fn tableChar(a: Allocator, items: []const []const u8, wh: Cord) Error![]LexTree {
const lexes = a.alloc(Lexeme, items.len) catch return Error.Memory;
errdefer a.free(lexes);
fn tableChar(a: Allocator, items: []const []const u8, wh: Cord) Error![][]Lexeme {
const lexes = try a.alloc(Lexeme, items.len);
defer a.free(lexes);
 
for (items, lexes) |i, *l| {
l.*.char = i;
@@ -218,66 +203,48 @@ const strs13 = strs12 ++ [_][]const u8{"extra4luck"};
 
test "table" {
var a = std.testing.allocator;
const err = table(a, &strs12, Cord{ .x = 50, .y = 1 });
const err = tableChar(a, strs12[0..], Cord{ .x = 50, .y = 1 });
try std.testing.expectError(Error.ItemCount, err);
 
const rows = try table(a, &strs12, Cord{ .x = 50, .y = 5 });
const rows = try tableChar(a, strs12[0..], Cord{ .x = 50, .y = 5 });
//std.debug.print("rows {any}\n", .{rows});
for (rows) |row| {
//std.debug.print(" row {any}\n", .{row});
for (row.siblings) |sib| {
for (row) |col| {
//std.debug.print(" sib {s}\n", .{sib.char});
a.free(sib.char);
a.free(col.char);
}
}
 
try std.testing.expect(rows.len == 3);
try std.testing.expect(rows[0].siblings.len == 4);
try std.testing.expect(rows[0].len == 4);
 
// I have my good ol' C pointers back... this is so nice :)
a.free(@as(*[strs12.len]Lexeme, @ptrCast(rows[0].siblings)));
for (rows) |row| a.free(row);
a.free(rows);
}
 
test "grid 3*4" {
var a = std.testing.allocator;
 
const rows = try grid(a, &strs12, Cord{ .x = 50, .y = 1 });
const rows = try grid(a, strs12[0..], Cord{ .x = 50, .y = 1 });
//std.debug.print("rows {any}\n", .{rows});
for (rows) |row| {
//std.debug.print(" row {any}\n", .{row});
for (row.siblings) |sib| {
//std.debug.print(" sib {s}\n", .{sib.char});
a.free(sib.char);
}
}
 
try std.testing.expect(rows.len == 4);
try std.testing.expect(rows[0].siblings.len == 3);
try std.testing.expect(rows[3].siblings.len == 3);
try std.testing.expect(rows[0].len == 3);
try std.testing.expect(rows[3].len == 3);
 
// I have my good ol' C pointers back... this is so nice :)
a.free(@as(*[strs12.len]Lexeme, @ptrCast(rows[0].siblings)));
for (rows) |row| a.free(row);
a.free(rows);
}
 
test "grid 3*4 + 1" {
var a = std.testing.allocator;
const rows = try grid(a, &strs13, Cord{ .x = 50, .y = 1 });
//std.debug.print("rows {any}\n", .{rows});
for (rows) |row| {
//std.debug.print(" row {any}\n", .{row});
for (row.siblings) |sib| {
//std.debug.print(" sib {s}\n", .{sib.char});
a.free(sib.char);
}
}
 
try std.testing.expectEqual(rows.len, 5);
try std.testing.expect(rows[0].siblings.len == 3);
try std.testing.expect(rows[4].siblings.len == 1);
try std.testing.expect(rows[0].len == 3);
try std.testing.expect(rows[4].len == 1);
 
// I have my good ol' C pointers back... this is so nice :)
a.free(@as(*[strs13.len]Lexeme, @ptrCast(rows[0].siblings)));
for (rows) |row| a.free(row);
a.free(rows);
}
 
src/exec.zig added: 542, removed: 605, total 0
@@ -87,7 +87,7 @@ pub fn execFromInput(h: *HSH, str: []const u8) ![]u8 {
var itr = TokenIterator{ .raw = str };
const tokens = try itr.toSlice(h.alloc);
defer h.alloc.free(tokens);
var ps = try Parser.parse(h.tkn.alloc, tokens);
var ps = try Parser.parse(h.alloc, tokens);
defer ps.raze();
return h.alloc.dupe(u8, ps.first().cannon());
}
 
src/hsh.zig added: 542, removed: 605, total 0
@@ -2,10 +2,10 @@ const std = @import("std");
const mem = @import("mem.zig");
const Allocator = mem.Allocator;
const Drawable = @import("draw.zig").Drawable;
const Tokenizer = @import("tokenizer.zig").Tokenizer;
const TTY = @import("tty.zig").TTY;
const builtin = @import("builtin");
const ArrayList = std.ArrayList;
const Token = @import("token.zig");
const Signals = @import("signals.zig");
const Queue = std.atomic.Queue;
const jobs = @import("jobs.zig");
@@ -84,8 +84,6 @@ fn readFromRC(hsh: *HSH) E!void {
const r = rc_.reader();
var a = hsh.alloc;
 
var tokenizer = Tokenizer.init(a);
defer tokenizer.raze();
rc_.seekTo(0) catch return E.FSError;
while (readLine(&a, r)) |line| {
defer a.free(line);
@@ -95,9 +93,7 @@ fn readFromRC(hsh: *HSH) E!void {
if (line.len > 0 and line[0] == '#') {
continue;
}
defer tokenizer.reset();
tokenizer.consumes(line) catch return E.Memory;
var titr = tokenizer.iterator();
var titr = Token.Iterator{ .raw = line };
const tokens = titr.toSlice(a) catch return E.Memory;
defer a.free(tokens);
var pitr = Parser.parse(a, tokens) catch continue;
@@ -189,7 +185,6 @@ pub const HSH = struct {
jobs: *jobs.Jobs,
tty: TTY = undefined,
draw: Drawable = undefined,
tkn: Tokenizer = undefined,
line: *Line = undefined,
input: i32 = 0,
changes: []u8 = undefined,
@@ -213,7 +208,6 @@ pub const HSH = struct {
.env = env,
.pid = std.os.linux.getpid(),
.jobs = jobs.init(a),
.tkn = Tokenizer.init(a),
.hfs = hfs,
};
 
@@ -239,7 +233,6 @@ pub const HSH = struct {
hsh.env.deinit();
jobs.raze(hsh.alloc);
hsh.hfs.raze(hsh.alloc);
hsh.tkn.raze();
}
 
fn sleep(_: *HSH) void {
 
src/line.zig added: 542, removed: 605, total 0
@@ -1,4 +1,5 @@
const std = @import("std");
const Allocator = std.mem.Allocator;
const log = @import("log");
 
const fs = @import("fs.zig");
@@ -14,9 +15,7 @@ const Draw = @import("draw.zig");
 
const Line = @This();
 
// delete plx
const tokenizer = @import("tokenizer.zig");
const Tokenizer = tokenizer.Tokenizer;
const Tokenizer = @import("tokenizer.zig");
 
const Mode = enum {
TYPING,
@@ -26,7 +25,9 @@ const Mode = enum {
};
 
hsh: *HSH,
alloc: Allocator,
input: Input,
tkn: Tokenizer,
options: Options,
mode: union(enum) {
interactive: void,
@@ -43,13 +44,15 @@ pub const Options = struct {
interactive: bool = true,
};
 
pub fn init(hsh: *HSH, options: Options) !Line {
pub fn init(hsh: *HSH, a: Allocator, options: Options) !Line {
return .{
.hsh = hsh,
.alloc = a,
.input = .{ .stdin = hsh.input, .spin = spin, .hsh = hsh },
.tkn = Tokenizer.init(a),
.completion = try Complete.init(hsh),
.options = options,
.history = History.init(hsh.hfs.history, hsh.alloc),
.input = .{ .stdin = hsh.input, .spin = spin, .hsh = hsh },
.mode = if (options.interactive) .{ .interactive = {} } else .{ .scripted = {} },
.text = try hsh.alloc.alloc(u8, 0),
};
@@ -65,11 +68,15 @@ fn spin(hsh: ?*HSH) bool {
}
 
fn char(line: *Line, c: u8) !void {
try line.hsh.tkn.consumec(c);
try line.tkn.consumec(c);
try line.hsh.draw.key(c);
 
// TODO FIXME
line.text = line.hsh.tkn.raw.items;
line.text = line.tkn.raw.items;
}
 
pub fn peek(line: Line) []const u8 {
return line.tkn.raw.items;
}
 
pub fn do(line: *Line) ![]u8 {
@@ -83,11 +90,10 @@ pub fn do(line: *Line) ![]u8 {
error.signaled => {
Draw.clearCtx(&line.hsh.draw);
try Draw.render(&line.hsh.draw);
return line.hsh.alloc.dupe(u8, "");
return line.alloc.dupe(u8, "");
},
error.end_of_text => {
defer line.hsh.tkn.exec();
return try line.hsh.alloc.dupe(u8, line.hsh.tkn.raw.items);
return try line.alloc.dupe(u8, line.tkn.raw.items);
},
};
////hsh.draw.cursor = 0;
@@ -122,16 +128,17 @@ pub fn do(line: *Line) ![]u8 {
.esc => continue,
.up => line.findHistory(.up),
.down => line.findHistory(.down),
.left => line.hsh.tkn.move(.dec),
.right => line.hsh.tkn.move(.inc),
.backspace => line.hsh.tkn.pop(),
.left => line.tkn.move(.dec),
.right => line.tkn.move(.inc),
.backspace => line.tkn.pop(),
.newline => return try line.dupeText(),
.end_of_text => return try line.dupeText(),
.delete_word => _ = try line.hsh.tkn.dropWord(),
else => log.warn("unknown {}\n", .{ctrl}),
.delete_word => _ = try line.tkn.dropWord(),
.tab => {}, //try line.complete(),
else => |els| log.warn("unknown {}\n", .{els}),
}
line.hsh.draw.clear();
try Prompt.draw(line.hsh);
try Prompt.draw(line.hsh, line.peek());
try line.hsh.draw.render();
},
 
@@ -144,7 +151,7 @@ pub fn do(line: *Line) ![]u8 {
}
 
fn dupeText(line: Line) ![]u8 {
return try line.hsh.alloc.dupe(u8, line.text);
return try line.alloc.dupe(u8, line.text);
}
 
pub fn externEditor(line: *Line) ![]u8 {
@@ -158,12 +165,12 @@ pub fn externEditor(line: *Line) ![]u8 {
pub fn externEditorRead(line: *Line) ![]u8 {
const tmp = line.mode.external_editor;
defer line.mode = .{ .interactive = {} };
defer line.hsh.alloc.free(tmp);
defer line.alloc.free(tmp);
defer std.posix.unlink(tmp) catch unreachable;
 
var file = fs.openFile(tmp, false) orelse return error.io;
defer file.close();
line.text = file.reader().readAllAlloc(line.hsh.alloc, 4096) catch unreachable;
line.text = file.reader().readAllAlloc(line.alloc, 4096) catch unreachable;
return line.text;
}
 
@@ -176,7 +183,7 @@ fn saveLine(_: *Line, _: []const u8) void {
 
fn findHistory(line: *Line, dr: enum { up, down }) void {
var history = line.history;
var tkn = &line.hsh.tkn;
var tkn = &line.tkn;
if (tkn.user_data) {
line.saveLine(tkn.raw.items);
}
@@ -197,7 +204,7 @@ fn findHistory(line: *Line, dr: enum { up, down }) void {
}
}
_ = history.readAtFiltered(tkn.lineReplaceHistory(), &line.usr_line);
line.hsh.tkn.move(.end);
line.tkn.move(.end);
return;
},
.down => {
 
src/log.zig added: 542, removed: 605, total 0
@@ -32,8 +32,8 @@ pub fn hshLogFn(
else => "[ NOS ] ",
};
 
std.debug.getStderrMutex().lock();
defer std.debug.getStderrMutex().unlock();
std.debug.lockStdErr();
defer std.debug.unlockStdErr();
const stderr = std.io.getStdErr().writer();
stderr.print(prefix ++ format, args) catch return;
}
 
src/main.zig added: 542, removed: 605, total 0
@@ -20,24 +20,29 @@ test "main" {
std.testing.refAllDecls(@This());
}
 
fn core(hsh: *HSH) ![]u8 {
fn core(hsh: *HSH, a: Allocator) ![]u8 {
var array_alloc = std.heap.ArenaAllocator.init(a);
defer array_alloc.deinit();
const alloc = array_alloc.allocator();
var line = try Line.init(hsh, alloc, .{ .interactive = hsh.tty.is_tty });
 
defer hsh.draw.reset();
//try Context.update(hsh, &[_]Context.Contexts{.git});
 
var redraw = true;
// TODO drop hsh
var line = hsh.line;
 
while (true) {
hsh.draw.clear();
redraw = hsh.spin() or redraw;
 
if (redraw) {
try prompt.draw(hsh);
try prompt.draw(hsh, line.peek());
try Draw.render(&hsh.draw);
redraw = false;
}
 
return try line.do();
return a.dupe(u8, try line.do());
}
}
 
@@ -57,13 +62,12 @@ fn execTacC(args: *std.process.ArgIterator) u8 {
hsh.tty = TTY.init(a) catch return 255;
defer hsh.tty.raze();
 
while (args.next()) |arg| {
hsh.tkn.consumes(arg) catch return 2;
while (args.next()) |_| {
unreachable;
//hsh.tkn.consumes(arg) catch return 2;
}
const str = hsh.alloc.dupe(u8, hsh.tkn.raw.items) catch return 2;
defer hsh.alloc.free(str);
 
Exec.exec(&hsh, str) catch |err| {
if (true) return 0;
Exec.exec(&hsh, undefined) catch |err| {
log.err("-c error [{}]\n", .{err});
return 1;
};
@@ -130,16 +134,13 @@ pub fn main() !void {
// Look at me, I'm the captain now!
hsh.tty.pwnTTY();
 
var line = try Line.init(&hsh, .{ .interactive = hsh.tty.is_tty });
hsh.line = &line;
 
hsh.draw = Drawable.init(&hsh) catch unreachable;
defer hsh.draw.raze();
hsh.draw.term_size = hsh.tty.geom() catch unreachable;
 
var inerr = false;
while (true) {
if (core(&hsh)) |str| {
if (core(&hsh, a)) |str| {
inerr = false;
if (str.len == 0) {
std.debug.print("\n goodbye :) \n", .{});
@@ -154,18 +155,12 @@ pub fn main() !void {
error.ExeNotFound => {
const first = Exec.execFromInput(&hsh, str) catch @panic("memory");
defer hsh.alloc.free(first);
const tree = Draw.LexTree{ .siblings = @constCast(&[_]Draw.Lexeme{
Draw.Lexeme{
.char = "[ Unable to find ",
.style = .{ .attr = .bold, .fg = .red },
},
Draw.Lexeme{
.char = first,
.style = .{ .attr = .bold, .fg = .red },
},
Draw.Lexeme{ .char = " ]", .style = .{ .attr = .bold, .fg = .red } },
}) };
try Draw.drawAfter(&hsh.draw, tree);
const tree = [_]Draw.Lexeme{
.{ .char = "[ Unable to find ", .style = .{ .attr = .bold, .fg = .red } },
.{ .char = first, .style = .{ .attr = .bold, .fg = .red } },
.{ .char = " ]", .style = .{ .attr = .bold, .fg = .red } },
};
try Draw.drawAfter(&hsh.draw, tree[0..]);
try Draw.render(&hsh.draw);
},
error.StdIOError => {
@@ -176,7 +171,6 @@ pub fn main() !void {
unreachable;
},
};
hsh.tkn.exec();
continue;
} else |err| {
switch (err) {
 
src/prompt.zig added: 542, removed: 605, total 0
@@ -39,55 +39,41 @@ fn userTextMultiline(hsh: *HSH, tkn: *Tokenizer) !void {
const err = if (tkn.err_idx > 0) tkn.err_idx else tkn.raw.items.len;
const good = tkn.raw.items[0..err];
const bad = tkn.raw.items[err..];
try Draw.draw(&hsh.draw, LexTree{
.siblings = @constCast(&[_]Lexeme{
try Draw.draw(&hsh.draw, .{
.siblings = &[_]Lexeme{
.{ .char = good },
.{ .char = bad, .style = .{ .bg = .red } },
}),
},
});
}
 
fn userText(hsh: *HSH, tkn: *Tokenizer) !void {
if (std.mem.indexOf(u8, tkn.raw.items, "\n")) |_| return userTextMultiline(hsh, tkn);
fn userText(hsh: *HSH, good: []const u8, bad: []const u8) !void {
//if (std.mem.indexOf(u8, tkn.raw.items, "\n")) |_| return userTextMultiline(hsh, tkn);
 
const err = if (tkn.err_idx > 0) tkn.err_idx else tkn.raw.items.len;
const good = tkn.raw.items[0..err];
const bad = tkn.raw.items[err..];
try Draw.draw(&hsh.draw, LexTree{
.siblings = @constCast(&[_]Lexeme{
.{ .char = good },
.{ .char = bad, .style = .{ .bg = .red } },
}),
try Draw.draw(&hsh.draw, &[_]Lexeme{
.{ .char = good },
.{ .char = bad, .style = .{ .bg = .red } },
});
}
 
fn prompt(d: *Draw.Drawable, u: ?[]const u8, cwd: []const u8) !void {
try Draw.draw(d, .{
.siblings = @constCast(&[_]Lexeme{
.{
.char = u orelse "[username unknown]",
.style = .{
.attr = .bold,
.fg = .blue,
},
},
.{ .char = "@" },
.{ .char = "host " },
.{ .char = cwd },
.{ .char = " $ " },
}),
try Draw.draw(d, &[_]Lexeme{
.{ .char = u orelse "[username unknown]", .style = .{ .attr = .bold, .fg = .blue } },
.{ .char = "@" },
.{ .char = "host " },
.{ .char = cwd },
.{ .char = " $ " },
});
}
 
pub fn draw(hsh: *HSH) !void {
var tkn = hsh.tkn;
pub fn draw(hsh: *HSH, line: []const u8) !void {
const bgjobs = Jobs.getBgSlice(hsh.alloc) catch unreachable;
defer hsh.alloc.free(bgjobs);
try jobsContext(hsh, bgjobs);
//try ctxContext(hsh, try Context.fetch(hsh, .git));
 
try prompt(&hsh.draw, hsh.env.get("USER"), hsh.hfs.names.cwd_short);
try userText(hsh, &tkn);
try userText(hsh, line, "");
// try drawRight(&hsh.draw, .{
// .siblings = @constCast(&[_]Lexeme{
// .{ .char = try std.fmt.bufPrint(&tokens, "({}) ({}) [{}]", .{
@@ -101,16 +87,15 @@ pub fn draw(hsh: *HSH) !void {
 
fn jobsContext(hsh: *HSH, jobs: []*Job) !void {
for (jobs) |j| {
const lex = LexTree{
.siblings = @constCast(&[_]Lexeme{
.{ .char = "[ " },
if (j.status == .background) spinner(.dots2t3) else .{ .char = "Z" },
.{ .char = " " },
.{ .char = j.name orelse "Unknown Job" },
.{ .char = " ]" },
}),
const lex = [_]Lexeme{
.{ .char = "[ " },
if (j.status == .background) spinner(.dots2t3) else .{ .char = "Z" },
.{ .char = " " },
.{ .char = j.name orelse "Unknown Job" },
.{ .char = " ]" },
};
try Draw.drawBefore(&hsh.draw, lex);
 
try Draw.drawBefore(&hsh.draw, &lex);
}
}
 
 
src/signals.zig added: 542, removed: 605, total 0
@@ -130,7 +130,8 @@ pub const SigEvent = enum {
pub fn do(hsh: *HSH) SigEvent {
while (flags.int > 0) {
flags.int -|= 1;
hsh.tkn.reset();
// TODO do something
//hsh.tkn.reset();
_ = hsh.draw.write("^C\n\r") catch {};
//if (hsh.hist) |*hist| {
// hist.cnt = 0;
 
src/tokenizer.zig added: 542, removed: 605, total 0
@@ -8,6 +8,8 @@ const mem = std.mem;
const CompOption = @import("completion.zig").CompOption;
const Token = @import("token.zig");
 
pub const Tokenizer = @This();
 
pub const TokenError = Token.Error;
 
pub const Error = error{
@@ -16,8 +18,6 @@ pub const Error = error{
OutOfMemory,
};
 
pub const TokenIterator = Token.Iterator;
 
pub const CursorMotion = enum(u8) {
home,
end,
@@ -27,350 +27,348 @@ pub const CursorMotion = enum(u8) {
dec,
};
 
pub const Tokenizer = struct {
alloc: Allocator,
raw: ArrayList(u8),
idx: usize = 0,
raw_maybe: ?[]const u8 = null,
prev_exec: ?ArrayList(u8) = null,
c_tkn: usize = 0, // cursor is over this token
err_idx: usize = 0,
user_data: bool = false,
editor_mktmp: ?[]u8 = null,
alloc: Allocator,
raw: ArrayList(u8),
idx: usize = 0,
raw_maybe: ?[]const u8 = null,
prev_exec: ?ArrayList(u8) = null,
c_tkn: usize = 0, // cursor is over this token
err_idx: usize = 0,
user_data: bool = false,
editor_mktmp: ?[]u8 = null,
 
pub fn init(a: Allocator) Tokenizer {
return Tokenizer{
.alloc = a,
.raw = ArrayList(u8).init(a),
pub fn init(a: Allocator) Tokenizer {
return Tokenizer{
.alloc = a,
.raw = ArrayList(u8).init(a),
};
}
 
fn cChar(self: *Tokenizer) ?u8 {
if (self.raw.items.len == 0) return null;
if (self.idx == self.raw.items.len) return self.raw.items[self.idx - 1];
return self.raw.items[self.idx];
}
 
fn cToBoundry(self: *Tokenizer, comptime forward: bool) void {
std.debug.assert(self.raw.items.len > 0);
const cursor = if (forward) .inc else .dec;
self.move(cursor);
 
while (std.ascii.isWhitespace(self.cChar().?) and
self.idx > 0 and
self.idx < self.raw.items.len)
{
self.move(cursor);
}
 
while (!std.ascii.isWhitespace(self.cChar().?) and
self.idx != 0 and
self.idx < self.raw.items.len)
{
self.move(cursor);
}
if (!forward and self.idx != 0) self.move(.inc);
}
 
pub fn move(self: *Tokenizer, motion: CursorMotion) void {
if (self.raw.items.len == 0) return;
switch (motion) {
.home => self.idx = 0,
.end => self.idx = self.raw.items.len,
.back => self.cToBoundry(false),
.word => self.cToBoundry(true),
.inc => self.idx +|= 1,
.dec => self.idx -|= 1,
}
self.idx = @min(self.idx, self.raw.items.len);
}
 
pub fn cursor_token(self: *Tokenizer) !Token {
var i: usize = 0;
self.c_tkn = 0;
if (self.raw.items.len == 0) return Error.Empty;
while (i < self.raw.items.len) {
const t = Token.any(self.raw.items[i..]) catch break;
if (t.str.len == 0) break;
i += t.str.len;
if (i >= self.idx) return t;
self.c_tkn += 1;
}
return Error.TokenizeFailed;
}
 
pub fn iterator(self: *Tokenizer) Token.Iterator {
return Token.Iterator{ .raw = self.raw.items };
}
 
/// Returns a Token error
pub fn validate(self: *Tokenizer) TokenError!void {
var i: usize = 0;
while (i < self.raw.items.len) {
const t = try Token.any(self.raw.items[i..]);
i += t.str.len;
}
}
 
// completion commands
 
/// remove the completion maybe from input
pub fn maybeDrop(self: *Tokenizer) !void {
if (self.raw_maybe) |rm| {
self.popRange(rm.len) catch {
log.err("Unable to drop maybe {s} len = {}\n", .{ rm, rm.len });
log.err("Unable to drop maybe {s} len = {}\n", .{ rm, rm.len });
@panic("dropMaybe");
};
}
self.maybeClear();
}
 
fn cChar(self: *Tokenizer) ?u8 {
if (self.raw.items.len == 0) return null;
if (self.idx == self.raw.items.len) return self.raw.items[self.idx - 1];
return self.raw.items[self.idx];
pub fn maybeClear(self: *Tokenizer) void {
if (self.raw_maybe) |rm| {
self.alloc.free(rm);
}
self.raw_maybe = null;
}
 
fn cToBoundry(self: *Tokenizer, comptime forward: bool) void {
std.debug.assert(self.raw.items.len > 0);
const cursor = if (forward) .inc else .dec;
self.move(cursor);
pub fn maybeDupe(self: *Tokenizer, str: []const u8) !void {
self.maybeClear();
self.raw_maybe = try self.alloc.dupe(u8, str);
}
 
while (std.ascii.isWhitespace(self.cChar().?) and
self.idx > 0 and
self.idx < self.raw.items.len)
{
self.move(cursor);
}
/// str must be safe to insert directly as is
pub fn maybeAdd(self: *Tokenizer, str: []const u8) !void {
const safe = try self.makeSafe(str) orelse try self.alloc.dupe(u8, str);
defer self.alloc.free(safe);
try self.maybeDupe(safe);
try self.consumes(safe);
}
 
while (!std.ascii.isWhitespace(self.cChar().?) and
self.idx != 0 and
self.idx < self.raw.items.len)
{
self.move(cursor);
}
if (!forward and self.idx != 0) self.move(.inc);
}
 
pub fn move(self: *Tokenizer, motion: CursorMotion) void {
if (self.raw.items.len == 0) return;
switch (motion) {
.home => self.idx = 0,
.end => self.idx = self.raw.items.len,
.back => self.cToBoundry(false),
.word => self.cToBoundry(true),
.inc => self.idx +|= 1,
.dec => self.idx -|= 1,
}
self.idx = @min(self.idx, self.raw.items.len);
}
 
pub fn cursor_token(self: *Tokenizer) !Token {
var i: usize = 0;
self.c_tkn = 0;
if (self.raw.items.len == 0) return Error.Empty;
while (i < self.raw.items.len) {
const t = Token.any(self.raw.items[i..]) catch break;
if (t.str.len == 0) break;
i += t.str.len;
if (i >= self.idx) return t;
self.c_tkn += 1;
}
return Error.TokenizeFailed;
}
 
pub fn iterator(self: *Tokenizer) TokenIterator {
return TokenIterator{ .raw = self.raw.items };
}
 
/// Returns a Token error
pub fn validate(self: *Tokenizer) TokenError!void {
var i: usize = 0;
while (i < self.raw.items.len) {
const t = try Token.any(self.raw.items[i..]);
i += t.str.len;
}
}
 
// completion commands
 
/// remove the completion maybe from input
pub fn maybeDrop(self: *Tokenizer) !void {
if (self.raw_maybe) |rm| {
self.popRange(rm.len) catch {
log.err("Unable to drop maybe {s} len = {}\n", .{ rm, rm.len });
log.err("Unable to drop maybe {s} len = {}\n", .{ rm, rm.len });
@panic("dropMaybe");
};
}
self.maybeClear();
}
 
pub fn maybeClear(self: *Tokenizer) void {
if (self.raw_maybe) |rm| {
self.alloc.free(rm);
}
self.raw_maybe = null;
}
 
pub fn maybeDupe(self: *Tokenizer, str: []const u8) !void {
self.maybeClear();
self.raw_maybe = try self.alloc.dupe(u8, str);
}
 
/// str must be safe to insert directly as is
pub fn maybeAdd(self: *Tokenizer, str: []const u8) !void {
const safe = try self.makeSafe(str) orelse try self.alloc.dupe(u8, str);
defer self.alloc.free(safe);
try self.maybeDupe(safe);
try self.consumes(safe);
}
 
/// This function edits user text, so extra care must be taken to ensure
/// it's something the user asked for!
pub fn maybeReplace(self: *Tokenizer, new: *const CompOption) !void {
const str = try self.makeSafe(new.str) orelse try self.alloc.dupe(u8, new.str);
defer self.alloc.free(str);
if (self.raw_maybe) |_| {
try self.maybeDrop();
} else if (new.kind == null) {
try self.maybeDupe(str);
}
 
if (new.kind == null) return;
/// This function edits user text, so extra care must be taken to ensure
/// it's something the user asked for!
pub fn maybeReplace(self: *Tokenizer, new: *const CompOption) !void {
const str = try self.makeSafe(new.str) orelse try self.alloc.dupe(u8, new.str);
defer self.alloc.free(str);
if (self.raw_maybe) |_| {
try self.maybeDrop();
} else if (new.kind == null) {
try self.maybeDupe(str);
 
try self.consumes(str);
}
 
pub fn maybeCommit(self: *Tokenizer, new: ?*const CompOption) !void {
self.maybeClear();
if (new) |n| {
switch (n.kind.?) {
.file_system => |f_s| {
switch (f_s) {
.dir => try self.consumec('/'),
.file, .link, .pipe => try self.consumec(' '),
else => {},
}
},
.path_exe => try self.consumec(' '),
else => {},
}
if (new.kind == null) return;
try self.maybeDupe(str);
 
try self.consumes(str);
}
 
pub fn maybeCommit(self: *Tokenizer, new: ?*const CompOption) !void {
self.maybeClear();
if (new) |n| {
switch (n.kind.?) {
.file_system => |f_s| {
switch (f_s) {
.dir => try self.consumec('/'),
.file, .link, .pipe => try self.consumec(' '),
else => {},
}
},
.path_exe => try self.consumec(' '),
else => {},
}
}
}
 
/// if returned value is null, string is already safe.
fn makeSafe(self: *Tokenizer, str: []const u8) !?[]u8 {
if (mem.indexOfAny(u8, str, Token.BREAKING_TOKENS)) |_| {} else {
return null;
}
var extra: usize = str.len;
var look = [1]u8{0};
for (Token.BREAKING_TOKENS) |t| {
look[0] = t;
extra += mem.count(u8, str, &look);
}
std.debug.assert(extra > str.len);
/// if returned value is null, string is already safe.
fn makeSafe(self: *Tokenizer, str: []const u8) !?[]u8 {
if (mem.indexOfAny(u8, str, Token.BREAKING_TOKENS)) |_| {} else {
return null;
}
var extra: usize = str.len;
var look = [1]u8{0};
for (Token.BREAKING_TOKENS) |t| {
look[0] = t;
extra += mem.count(u8, str, &look);
}
std.debug.assert(extra > str.len);
 
var safer = try self.alloc.alloc(u8, extra);
var i: usize = 0;
for (str) |c| {
if (mem.indexOfScalar(u8, Token.BREAKING_TOKENS, c)) |_| {
safer[i] = '\\';
i += 1;
}
safer[i] = c;
var safer = try self.alloc.alloc(u8, extra);
var i: usize = 0;
for (str) |c| {
if (mem.indexOfScalar(u8, Token.BREAKING_TOKENS, c)) |_| {
safer[i] = '\\';
i += 1;
}
return safer;
safer[i] = c;
i += 1;
}
return safer;
}
 
fn dropWhitespace(self: *Tokenizer) Error!usize {
if (self.idx == 0 or !std.ascii.isWhitespace(self.raw.items[self.idx - 1])) {
return 0;
}
var count: usize = 1;
fn dropWhitespace(self: *Tokenizer) Error!usize {
if (self.idx == 0 or !std.ascii.isWhitespace(self.raw.items[self.idx - 1])) {
return 0;
}
var count: usize = 1;
self.idx -|= 1;
var c = self.raw.orderedRemove(@intCast(self.idx));
while (self.idx > 0 and std.ascii.isWhitespace(c)) {
self.idx -|= 1;
var c = self.raw.orderedRemove(@intCast(self.idx));
while (self.idx > 0 and std.ascii.isWhitespace(c)) {
self.idx -|= 1;
c = self.raw.orderedRemove(@intCast(self.idx));
count +|= 1;
}
if (!std.ascii.isWhitespace(c)) {
try self.consumec(c);
count -|= 1;
}
return count;
c = self.raw.orderedRemove(@intCast(self.idx));
count +|= 1;
}
if (!std.ascii.isWhitespace(c)) {
try self.consumec(c);
count -|= 1;
}
return count;
}
 
fn dropAlphanum(self: *Tokenizer) Error!usize {
if (self.idx == 0 or !std.ascii.isAlphanumeric(self.raw.items[self.idx - 1])) {
return 0;
}
var count: usize = 1;
fn dropAlphanum(self: *Tokenizer) Error!usize {
if (self.idx == 0 or !std.ascii.isAlphanumeric(self.raw.items[self.idx - 1])) {
return 0;
}
var count: usize = 1;
self.idx -|= 1;
var c = self.raw.orderedRemove(@intCast(self.idx));
while (self.idx > 0 and (c == '-' or std.ascii.isAlphanumeric(c))) {
self.idx -|= 1;
var c = self.raw.orderedRemove(@intCast(self.idx));
while (self.idx > 0 and (c == '-' or std.ascii.isAlphanumeric(c))) {
self.idx -|= 1;
c = self.raw.orderedRemove(@intCast(self.idx));
count +|= 1;
}
if (!std.ascii.isAlphanumeric(c)) {
try self.consumec(c);
count -|= 1;
}
return count;
c = self.raw.orderedRemove(@intCast(self.idx));
count +|= 1;
}
if (!std.ascii.isAlphanumeric(c)) {
try self.consumec(c);
count -|= 1;
}
return count;
}
 
// this clearly needs a bit more love
pub fn dropWord(self: *Tokenizer) Error!usize {
if (self.raw.items.len == 0 or self.idx == 0) return 0;
// this clearly needs a bit more love
pub fn dropWord(self: *Tokenizer) Error!usize {
if (self.raw.items.len == 0 or self.idx == 0) return 0;
 
var count = try self.dropWhitespace();
var wd = try self.dropAlphanum();
var count = try self.dropWhitespace();
var wd = try self.dropAlphanum();
if (wd > 0) {
count += wd;
wd = try self.dropWhitespace();
count += wd;
if (wd > 0) {
count += wd;
wd = try self.dropWhitespace();
count += wd;
if (wd > 0) {
try self.consumec(' ');
count -|= 1;
}
try self.consumec(' ');
count -|= 1;
}
if (count == 0 and self.raw.items.len > 0 and self.idx != 0) {
self.pop();
return 1 + try self.dropWord();
}
return count;
}
if (count == 0 and self.raw.items.len > 0 and self.idx != 0) {
self.pop();
return 1 + try self.dropWord();
}
return count;
}
 
pub fn pop(self: *Tokenizer) void {
self.user_data = true;
if (self.raw.items.len == 0 or self.idx == 0) return;
if (self.idx < self.raw.items.len) {
self.idx -|= 1;
_ = self.raw.orderedRemove(self.idx);
return;
}
 
pub fn pop(self: *Tokenizer) void {
self.user_data = true;
if (self.raw.items.len == 0 or self.idx == 0) return;
if (self.idx < self.raw.items.len) {
self.idx -|= 1;
self.raw.items.len -|= 1;
self.err_idx = @min(self.idx, self.err_idx);
}
 
pub fn delc(self: *Tokenizer) void {
if (self.raw.items.len == 0 or self.idx == self.raw.items.len) return;
self.user_data = true;
_ = self.raw.orderedRemove(self.idx);
return;
}
 
pub fn popRange(self: *Tokenizer, count: usize) Error!void {
if (count == 0) return;
if (self.raw.items.len == 0 or self.idx == 0) return;
if (count > self.raw.items.len) return Error.Empty;
self.user_data = true;
self.idx -|= count;
_ = self.raw.replaceRange(@as(usize, self.idx), count, "") catch unreachable;
// replaceRange is able to expand, but we don't here, thus unreachable
self.err_idx = @min(self.idx, self.err_idx);
}
self.idx -|= 1;
self.raw.items.len -|= 1;
self.err_idx = @min(self.idx, self.err_idx);
}
 
/// consumes(tring) will swallow exec, assuming strings shouldn't be able to
/// start execution
pub fn consumes(self: *Tokenizer, str: []const u8) Error!void {
for (str) |s| {
self.consumec(s) catch |e| {
if (e == Error.Exec) continue;
return e;
};
}
}
pub fn delc(self: *Tokenizer) void {
if (self.raw.items.len == 0 or self.idx == self.raw.items.len) return;
self.user_data = true;
_ = self.raw.orderedRemove(self.idx);
}
 
pub fn consumec(self: *Tokenizer, c: u8) Error!void {
try self.raw.insert(self.idx, @bitCast(c));
self.idx += 1;
self.user_data = true;
if (c == '\n' and self.idx == self.raw.items.len) {
if (self.raw.items.len > 1 and self.raw.items[self.raw.items.len - 2] != '\\')
return Error.Exec;
} else if (c == '\n') {
// I'd like to give this some more thought, but I'm tired of this bug *now*
return error.Exec;
}
}
pub fn popRange(self: *Tokenizer, count: usize) Error!void {
if (count == 0) return;
if (self.raw.items.len == 0 or self.idx == 0) return;
if (count > self.raw.items.len) return Error.Empty;
self.user_data = true;
self.idx -|= count;
_ = self.raw.replaceRange(@as(usize, self.idx), count, "") catch unreachable;
// replaceRange is able to expand, but we don't here, thus unreachable
self.err_idx = @min(self.idx, self.err_idx);
}
 
// TODO rename verbNoun -> lineVerb
 
pub fn lineReplaceHistory(self: *Tokenizer) *ArrayList(u8) {
self.resetRaw();
return &self.raw;
/// consumes(tring) will swallow exec, assuming strings shouldn't be able to
/// start execution
pub fn consumes(self: *Tokenizer, str: []const u8) Error!void {
for (str) |s| {
self.consumec(s) catch |e| {
if (e == Error.Exec) continue;
return e;
};
}
}
 
pub fn saveLine(self: *Tokenizer) void {
self.raw = ArrayList(u8).init(self.alloc);
self.idx = 0;
self.user_data = false;
pub fn consumec(self: *Tokenizer, c: u8) Error!void {
try self.raw.insert(self.idx, @bitCast(c));
self.idx += 1;
self.user_data = true;
if (c == '\n' and self.idx == self.raw.items.len) {
if (self.raw.items.len > 1 and self.raw.items[self.raw.items.len - 2] != '\\')
return Error.Exec;
} else if (c == '\n') {
// I'd like to give this some more thought, but I'm tired of this bug *now*
return error.Exec;
}
}
 
pub fn restoreLine(self: *Tokenizer) void {
self.resetRaw();
self.user_data = true;
self.idx = self.raw.items.len;
}
// TODO rename verbNoun -> lineVerb
 
pub fn reset(self: *Tokenizer) void {
self.resetRaw();
self.resetPrevExec();
}
pub fn lineReplaceHistory(self: *Tokenizer) *ArrayList(u8) {
self.resetRaw();
return &self.raw;
}
 
fn resetRaw(self: *Tokenizer) void {
self.raw.clearRetainingCapacity();
self.idx = 0;
self.err_idx = 0;
self.c_tkn = 0;
self.user_data = false;
self.maybeClear();
}
pub fn saveLine(self: *Tokenizer) void {
self.raw = ArrayList(u8).init(self.alloc);
self.idx = 0;
self.user_data = false;
}
 
fn resetPrevExec(self: *Tokenizer) void {
if (self.prev_exec) |*pr| pr.clearAndFree();
}
pub fn restoreLine(self: *Tokenizer) void {
self.resetRaw();
self.user_data = true;
self.idx = self.raw.items.len;
}
 
/// Doesn't exec, called to save previous "local" command
pub fn exec(self: *Tokenizer) void {
if (self.prev_exec) |*pr| pr.clearAndFree();
self.prev_exec = self.raw;
self.raw = ArrayList(u8).init(self.alloc);
self.resetRaw();
}
pub fn reset(self: *Tokenizer) void {
self.resetRaw();
self.resetPrevExec();
}
 
pub fn raze(self: *Tokenizer) void {
self.reset();
self.raw.clearAndFree();
}
};
fn resetRaw(self: *Tokenizer) void {
self.raw.clearRetainingCapacity();
self.idx = 0;
self.err_idx = 0;
self.c_tkn = 0;
self.user_data = false;
self.maybeClear();
}
 
fn resetPrevExec(self: *Tokenizer) void {
if (self.prev_exec) |*pr| pr.clearAndFree();
}
 
/// Doesn't exec, called to save previous "local" command
pub fn exec(self: *Tokenizer) void {
if (self.prev_exec) |*pr| pr.clearAndFree();
self.prev_exec = self.raw;
self.raw = ArrayList(u8).init(self.alloc);
self.resetRaw();
}
 
pub fn raze(self: *Tokenizer) void {
self.reset();
self.raw.clearAndFree();
}
 
const expect = std.testing.expect;
const expectEql = std.testing.expectEqual;
@@ -559,7 +557,7 @@ test "breaking" {
}
 
test "tokeniterator 0" {
var ti = TokenIterator{
var ti = Token.Iterator{
.raw = "one two three",
};
 
@@ -572,7 +570,7 @@ test "tokeniterator 0" {
}
 
test "tokeniterator 1" {
var ti = TokenIterator{
var ti = Token.Iterator{
.raw = "one two three",
};
 
@@ -585,7 +583,7 @@ test "tokeniterator 1" {
}
 
test "tokeniterator 2" {
var ti = TokenIterator{
var ti = Token.Iterator{
.raw = "one two three",
};
 
@@ -596,7 +594,7 @@ test "tokeniterator 2" {
}
 
test "tokeniterator 3" {
var ti = TokenIterator{
var ti = Token.Iterator{
.raw = "one two three",
};
 
@@ -609,7 +607,7 @@ test "tokeniterator 3" {
}
 
test "token pipeline" {
var ti = TokenIterator{
var ti = Token.Iterator{
.raw = "ls -la | cat | sort ; echo this works",
};
 
@@ -646,7 +644,7 @@ test "token pipeline" {
}
 
test "token pipeline slice" {
var ti = TokenIterator{
var ti = Token.Iterator{
.raw = "ls -la | cat | sort ; echo this works",
};
 
@@ -686,7 +684,7 @@ test "token pipeline slice" {
}
 
test "token pipeline slice safe with next()" {
var ti = TokenIterator{
var ti = Token.Iterator{
.raw = "ls -la | cat | sort ; echo this works",
};
 
@@ -732,7 +730,7 @@ test "token pipeline slice safe with next()" {
}
 
test "token > file" {
var ti = TokenIterator{
var ti = Token.Iterator{
.raw = "ls > file.txt",
};
 
@@ -750,7 +748,7 @@ test "token > file" {
}
 
test "token > file extra ws" {
var ti = TokenIterator{
var ti = Token.Iterator{
.raw = "ls > file.txt",
};
 
@@ -766,7 +764,7 @@ test "token > file extra ws" {
}
 
test "token > execSlice" {
var ti = TokenIterator{
var ti = Token.Iterator{
.raw = "ls > file.txt",
};
 
@@ -793,7 +791,7 @@ test "token > execSlice" {
}
 
test "token >> file" {
var ti = TokenIterator{
var ti = Token.Iterator{
.raw = "ls >> file.txt",
};
 
@@ -808,7 +806,7 @@ test "token >> file" {
var iot = ti.next().?;
try eqlStr("file.txt", iot.cannon());
try std.testing.expect(iot.kind.io == .Append);
ti = TokenIterator{ .raw = "ls >>file.txt" };
ti = Token.Iterator{ .raw = "ls >>file.txt" };
try eqlStr("ls", ti.first().cannon());
ti.skip();
iot = ti.next().?;
@@ -817,7 +815,7 @@ test "token >> file" {
}
 
test "token < file" {
var ti = TokenIterator{
var ti = Token.Iterator{
.raw = "ls < file.txt",
};
 
@@ -836,7 +834,7 @@ test "token < file" {
}
 
test "token < file extra ws" {
var ti = TokenIterator{
var ti = Token.Iterator{
.raw = "ls < file.txt",
};
 
@@ -852,7 +850,7 @@ test "token < file extra ws" {
}
 
test "token &&" {
var ti = TokenIterator{
var ti = Token.Iterator{
.raw = "ls && success",
};
 
@@ -873,7 +871,7 @@ test "token &&" {
}
 
test "token ||" {
var ti = TokenIterator{
var ti = Token.Iterator{
.raw = "ls || fail",
};
 
@@ -938,7 +936,7 @@ test "token vari braces" {
t = try Token.any("${STR_ING}extra");
try eqlStr("STR_ING", t.cannon());
 
var itr = TokenIterator{ .raw = "${STR_ING}extra" };
var itr = Token.Iterator{ .raw = "${STR_ING}extra" };
var count: usize = 0;
while (itr.next()) |_| count += 1;
try expectEql(count, 2);
@@ -951,7 +949,7 @@ test "dollar posix" {
}
 
test "all execs" {
var tt = TokenIterator{ .raw = "ls -with -some -params && files || thing | pipeline ; othercmd & screenshot && some/rel/exec" };
var tt = Token.Iterator{ .raw = "ls -with -some -params && files || thing | pipeline ; othercmd & screenshot && some/rel/exec" };
var count: usize = 0;
while (tt.next()) |_| {
while (tt.nextExec()) |_| {}
@@ -1064,7 +1062,7 @@ test "inline quotes" {
var t = try Token.any("--inline='quoted string'");
try std.testing.expectEqualStrings("--inline=", t.cannon());
 
var itr = TokenIterator{ .raw = "--inline='quoted string'" };
var itr = Token.Iterator{ .raw = "--inline='quoted string'" };
try eqlStr("--inline=", itr.next().?.cannon());
try eqlStr("quoted string", itr.next().?.cannon());
}
@@ -1136,7 +1134,7 @@ test "comment" {
try std.testing.expectEqualStrings("# comment", tk.str);
try std.testing.expectEqualStrings("", tk.cannon());
 
var itr = TokenIterator{ .raw = " echo #comment" };
var itr = Token.Iterator{ .raw = " echo #comment" };
 
itr.skip();
try std.testing.expectEqualStrings("echo", itr.next().?.cannon());
@@ -1144,7 +1142,7 @@ test "comment" {
try std.testing.expectEqualStrings("", itr.next().?.cannon());
try std.testing.expect(null == itr.next());
 
itr = TokenIterator{ .raw = " echo #comment\ncd home" };
itr = Token.Iterator{ .raw = " echo #comment\ncd home" };
 
itr.skip();
try std.testing.expectEqualStrings("echo", itr.next().?.cannon());
@@ -1157,7 +1155,7 @@ test "comment" {
}
 
test "backslash" {
var itr = TokenIterator{ .raw = "this\\ is some text" };
var itr = Token.Iterator{ .raw = "this\\ is some text" };
 
var count: usize = 0;
while (itr.next()) |_| {
@@ -1325,7 +1323,7 @@ test "nested logic" {
test "naughty strings" {
const while_str = "thingy (b.argv.next()) |_| {}";
 
var itr = TokenIterator{ .raw = while_str };
var itr = Token.Iterator{ .raw = while_str };
 
var count: usize = 0;
while (itr.next()) |t| {